1
|
package eu.dnetlib.data.mapreduce.hbase.lodExport.utils;
|
2
|
|
3
|
import org.apache.hadoop.io.Text;
|
4
|
import org.apache.hadoop.mapreduce.RecordWriter;
|
5
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
6
|
|
7
|
import java.io.DataOutputStream;
|
8
|
import java.io.IOException;
|
9
|
|
10
|
public class SingleRecordWriter extends RecordWriter<Text, Text> {
|
11
|
private DataOutputStream out;
|
12
|
|
13
|
public SingleRecordWriter(DataOutputStream stream) {
|
14
|
out = stream;
|
15
|
try {
|
16
|
out.writeBytes("\r\n");
|
17
|
} catch (Exception ex) {
|
18
|
}
|
19
|
}
|
20
|
|
21
|
@Override
|
22
|
public void close(TaskAttemptContext arg0) throws IOException, InterruptedException {
|
23
|
//close our file
|
24
|
out.close();
|
25
|
}
|
26
|
|
27
|
@Override
|
28
|
public void write(Text key, Text value) throws IOException, InterruptedException {
|
29
|
//write out our key
|
30
|
if (key != null) {
|
31
|
out.writeBytes("\n" + key.toString() + "\t");
|
32
|
}
|
33
|
|
34
|
out.writeBytes(String.valueOf(value) + ",");
|
35
|
}
|
36
|
|
37
|
|
38
|
}
|