Project

General

Profile

1
package eu.dnetlib.data.mapreduce.hbase.dataimport;
2

    
3
import java.io.IOException;
4

    
5
import org.apache.hadoop.hbase.client.Put;
6
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
7
import org.apache.hadoop.hbase.util.Bytes;
8
import org.apache.hadoop.io.Text;
9
import org.apache.hadoop.mapreduce.Mapper;
10

    
11
import eu.dnetlib.data.mapreduce.JobParams;
12

    
13
public class SequenceFileRecordMapper extends Mapper<Text, Text, ImmutableBytesWritable, Put> {
14

    
15
	private String family;
16

    
17
	private String qualifier;
18
	
19
	@Override
20
	protected void setup(Context context) throws IOException, InterruptedException {
21
		super.setup(context);
22
		
23
		this.family = context.getConfiguration().get(JobParams.HBASE_TARGET_FAMILY).trim();
24
		this.qualifier = context.getConfiguration().get(JobParams.HBASE_TARGET_QUALIFIER).trim();
25
	}
26
	
27
	@Override
28
	protected void map(Text key, Text value, Context context) throws IOException, InterruptedException {
29

    
30
		//byte[] rowKey = DigestUtils.md5(key.toString());
31
		byte[] rowKey = Bytes.toBytes(key.toString());
32
		
33
		Put put = new Put(rowKey).add(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value.toString()));
34
	
35
        context.write(new ImmutableBytesWritable(rowKey), put);
36
	}
37

    
38
}
(11-11/11)