1
|
package eu.dnetlib.iis.core.examples.javamapreduce;
|
2
|
//package eu.dnetlib.iis.core.examples.mapreduce;
|
3
|
//
|
4
|
//import java.io.IOException;
|
5
|
//
|
6
|
//import org.apache.avro.mapred.AvroMapper;
|
7
|
//import org.apache.avro.mapred.Pair;
|
8
|
//import org.apache.hadoop.io.BytesWritable;
|
9
|
//import org.apache.hadoop.io.Text;
|
10
|
//import org.apache.hadoop.mapred.JobConf;
|
11
|
//import org.apache.hadoop.mapreduce.Mapper;
|
12
|
//import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
|
13
|
//
|
14
|
//import eu.dnetlib.iis.io.MapRedUtils;
|
15
|
//import eu.dnetlib.iis.core.examples.protobuf.generated.PersonProto.Person;
|
16
|
//
|
17
|
///**
|
18
|
// * Mapper writing to multiple output splitting data by age.
|
19
|
// * @author mhorst
|
20
|
// *
|
21
|
// */
|
22
|
//public class AvroPersonByAgeSplitter extends AvroMapper<Person, Pair<String, Person>> {
|
23
|
//
|
24
|
// private String namedOutputPersonEvenAge;
|
25
|
// private String namedOutputPersonOddAge;
|
26
|
//
|
27
|
// @SuppressWarnings("rawtypes")
|
28
|
// private MultipleOutputs mos = null;
|
29
|
//
|
30
|
// /* (non-Javadoc)
|
31
|
// * @see org.apache.hadoop.mapreduce.Mapper#setup(org.apache.hadoop.mapreduce.Mapper.Context)
|
32
|
// */
|
33
|
// @SuppressWarnings({ "rawtypes", "unchecked" })
|
34
|
// @Override
|
35
|
// protected void setup(Context context) throws IOException,
|
36
|
// InterruptedException {
|
37
|
// this.namedOutputPersonEvenAge = conf.get(
|
38
|
// "named.output.person.age.even");
|
39
|
// this.namedOutputPersonOddAge = conf.get(
|
40
|
// "named.output.person.age.odd");
|
41
|
// this.mos = new MultipleOutputs(context);
|
42
|
// }
|
43
|
//
|
44
|
// /** This is the place you can access map-reduce workflow node parameters */
|
45
|
// @Override
|
46
|
// public void configure(JobConf jobConf) {
|
47
|
// this.namedOutputPersonEvenAge = jobConf.get(
|
48
|
// "named.output.person.age.even");
|
49
|
// this.namedOutputPersonOddAge = jobConf.get(
|
50
|
// "named.output.person.age.odd");
|
51
|
// this.mos = new MultipleOutputs(jobConf);
|
52
|
// }
|
53
|
//
|
54
|
// /* (non-Javadoc)
|
55
|
// * @see org.apache.hadoop.mapreduce.Mapper#cleanup(org.apache.hadoop.mapreduce.Mapper.Context)
|
56
|
// */
|
57
|
// @Override
|
58
|
// public void cleanup(Context context) throws IOException, InterruptedException {
|
59
|
// mos.close();
|
60
|
// }
|
61
|
//
|
62
|
// /* (non-Javadoc)
|
63
|
// * @see org.apache.hadoop.mapreduce.Mapper#map(KEYIN, VALUEIN, org.apache.hadoop.mapreduce.Mapper.Context)
|
64
|
// */
|
65
|
// @Override
|
66
|
// public void map(Text key, BytesWritable value, Context context)
|
67
|
// throws IOException, InterruptedException {
|
68
|
// Person person = MapRedUtils.parse(Person.class, key, value);
|
69
|
// if (person.getAge()%2==0) {
|
70
|
// MapRedUtils.write(mos, namedOutputPersonEvenAge, person);
|
71
|
// } else {
|
72
|
// MapRedUtils.write(mos, namedOutputPersonOddAge, person);
|
73
|
// }
|
74
|
// }
|
75
|
//
|
76
|
//}
|