Project

General

Profile

1
package eu.dnetlib.data.mapreduce.hbase.statsExport;
2

    
3
import java.io.IOException;
4
import java.nio.charset.Charset;
5
import java.util.Iterator;
6

    
7
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
8
import org.apache.hadoop.io.Text;
9
import org.apache.hadoop.mapreduce.Reducer;
10
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
11
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
12
import org.apache.log4j.Logger;
13

    
14
public class StatsReducer extends Reducer<Text, ImmutableBytesWritable, Text, Text> {
15

    
16
	private Logger log = Logger.getLogger(StatsReducer.class);
17
	private MultipleOutputs MultipleOutputWriter;
18

    
19
	/**
20
	 * Reducer that splits input according to their Type ( datasource, results
21
	 * etc..) and writes each kind to a seperate output
22
	 */
23
	@Override
24
	protected void setup(Context context) throws IOException, InterruptedException {
25

    
26
		MultipleOutputWriter = new MultipleOutputs((TaskInputOutputContext) context);
27
	};
28

    
29
	@Override
30
	protected void reduce(final Text key, final Iterable<ImmutableBytesWritable> values, final Context context) throws IOException, InterruptedException {
31

    
32
		Iterator<ImmutableBytesWritable> it = values.iterator();
33
		while (it.hasNext()) {
34
			String[] split = key.toString().split(",");
35
			String type = split[0];
36
			String id = split[1];
37
			String value = new String(it.next().copyBytes(),Charset.forName("UTF-8"));
38
            log.info(" Reducer Key is" + type  + "Id is " + id);
39
			value = value.trim();
40
			MultipleOutputWriter.write(type, new Text(id.getBytes(Charset.forName("UTF-8"))), new Text(value.getBytes(Charset.forName("UTF-8"))), type.toString());
41

    
42
		}
43

    
44
	}
45

    
46
	@Override
47
	protected void cleanup(Context context) throws IOException, InterruptedException {
48
		log.info("Cleaning up reducer...");
49
		MultipleOutputWriter.close();
50
	}
51
}
(3-3/3)