Project

General

Profile

1
package eu.dnetlib.data.mapreduce.hbase.statsExport.mapreduce;
2

    
3
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
4
import org.apache.hadoop.io.Text;
5
import org.apache.hadoop.mapreduce.Reducer;
6
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
7
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
8
import org.apache.log4j.Logger;
9

    
10
import java.io.IOException;
11
import java.nio.charset.Charset;
12
import java.util.Iterator;
13

    
14
public class StatsReducer extends Reducer<Text, ImmutableBytesWritable, Text, Text> {
15

    
16
	private Logger log = Logger.getLogger(StatsReducer.class);
17
	private MultipleOutputs MultipleOutputWriter;
18

    
19
	/**
20
	 * Reducer that splits input according to their Type ( datasource, results
21
	 * etc..) and writes each kind to a seperate output
22
	 */
23
	@Override
24
	protected void setup(Context context) throws IOException, InterruptedException {
25

    
26
		MultipleOutputWriter = new MultipleOutputs((TaskInputOutputContext) context);
27
	};
28

    
29
	@Override
30
	protected void reduce(final Text key, final Iterable<ImmutableBytesWritable> values, final Context context) throws IOException, InterruptedException {
31

    
32
		Iterator<ImmutableBytesWritable> it = values.iterator();
33
		while (it.hasNext()) {
34
			String[] split = key.toString().split(",");
35
			String type = split[0];
36
			String id = split[1];
37
			String value = new String(it.next().copyBytes(),Charset.forName("UTF-8"));
38
            value = value.trim();
39
			MultipleOutputWriter.write(type, new Text(id.getBytes(Charset.forName("UTF-8"))), new Text(value.getBytes(Charset.forName("UTF-8"))), type.toString());
40

    
41
		}
42

    
43
	}
44

    
45
	@Override
46
	protected void cleanup(Context context) throws IOException, InterruptedException {
47
		log.info("Cleaning up reducer...");
48
		MultipleOutputWriter.close();
49
	}
50
}
(2-2/2)