Project

General

Profile

1
package eu.dnetlib.data.mapreduce.hbase.statsExport;
2

    
3
import java.io.IOException;
4
import java.util.Iterator;
5

    
6
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
7
import org.apache.hadoop.io.Text;
8
import org.apache.hadoop.mapreduce.Reducer;
9
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
10
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
11
import org.apache.log4j.Logger;
12

    
13
public class StatsReducer extends Reducer<Text, ImmutableBytesWritable, Text, Text> {
14

    
15
	private Logger log = Logger.getLogger(StatsReducer.class);
16
	private MultipleOutputs MultipleOutputWriter;
17
	private long counter = 0;
18

    
19
	/**
20
	 * Reducer that splits input according to their Type ( datasource, results
21
	 * etc..) and writes each kind to a seperate output
22
	 */
23
	@Override
24
	protected void setup(Context context) throws IOException, InterruptedException {
25

    
26
		MultipleOutputWriter = new MultipleOutputs((TaskInputOutputContext) context);
27
	};
28

    
29
	@Override
30
	protected void reduce(final Text key, final Iterable<ImmutableBytesWritable> values, final Context context) throws IOException, InterruptedException {
31

    
32
		Iterator<ImmutableBytesWritable> it = values.iterator();
33
		while (it.hasNext()) {
34
			String[] split = key.toString().split(",");
35
			String type = split[0];
36
			String id = split[1];
37
			String value = new String(it.next().copyBytes());
38
			counter++;
39

    
40
			value = value.trim();
41
			MultipleOutputWriter.write(type, "	" + new Text(id), new Text(context.getConfiguration().get("stats.delim") + value), type.toString());
42
		}
43

    
44
	}
45

    
46
	@Override
47
	protected void cleanup(Context context) throws IOException, InterruptedException {
48

    
49
		log.info("******************************* TOTAL RECORDS  " + counter);
50
		MultipleOutputWriter.close();
51
	}
52
}
(3-3/3)