1
|
package eu.dnetlib.data.mapreduce.hbase.lodExport.preprocessing;
|
2
|
|
3
|
import eu.dnetlib.data.mapreduce.hbase.lodExport.utils.configuration.LodConfiguration;
|
4
|
import eu.dnetlib.data.mapreduce.hbase.lodExport.utils.configuration.Properties;
|
5
|
import org.apache.commons.lang.StringUtils;
|
6
|
import org.apache.hadoop.io.LongWritable;
|
7
|
import org.apache.hadoop.io.Text;
|
8
|
import org.apache.hadoop.mapreduce.Mapper;
|
9
|
import org.apache.log4j.Logger;
|
10
|
|
11
|
import java.io.IOException;
|
12
|
|
13
|
/**
|
14
|
* Mapper Class that reads HBASE contents and prepares them for the StatsDB
|
15
|
* export
|
16
|
*/
|
17
|
/*
|
18
|
-> Parse LOD dump files
|
19
|
|
20
|
Process lod input files and divide by entity type (both source and target)
|
21
|
Transform to id, array of [ properties] format
|
22
|
Store to HDFS
|
23
|
For -> Multiple outputs and inputs
|
24
|
Multiple inputs: all source and target datasets and their corresponding mappings
|
25
|
M/O: separate output files for each dataset: mark records so that they are written to the correct one
|
26
|
*/
|
27
|
|
28
|
/*
|
29
|
<http://lod.openaire.eu/data/result/doajarticles::89217af00809a91acc15a416e56b3782> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.eurocris.org/ontologies/cerif/1.3#ResultEntity> .
|
30
|
<http://lod.openaire.eu/data/result/doajarticles::89217af00809a91acc15a416e56b3782> <http://www.eurocris.org/ontologies/cerif/1.3#name> "Une nouvelle anomalie a allure h r ditaire chez des agneaux it khouzistans /it" .
|
31
|
*/
|
32
|
public class SourceMapper extends Mapper<LongWritable, Text, Text, Text> {
|
33
|
|
34
|
private Logger log = Logger.getLogger(SourceMapper.class);
|
35
|
private LodConfiguration lodConfiguration;
|
36
|
|
37
|
public static enum SOURCE_COUNTERS {
|
38
|
SOURCE_ENTITIES, TOTAL_ENTITIES
|
39
|
}
|
40
|
|
41
|
|
42
|
@Override
|
43
|
protected void setup(Context context) throws IOException, InterruptedException {
|
44
|
lodConfiguration = new LodConfiguration();
|
45
|
lodConfiguration.load(context.getConfiguration().get(Properties.LOD_SOURCE_MAPPINGS));
|
46
|
log.info("file loaded!" + context.getConfiguration().get(Properties.LOD_SOURCE_MAPPINGS));
|
47
|
}
|
48
|
|
49
|
@Override
|
50
|
protected void map(final LongWritable keyIn, final Text result, final Context context) throws IOException {
|
51
|
|
52
|
try {
|
53
|
context.getCounter(SOURCE_COUNTERS.TOTAL_ENTITIES).increment(1);
|
54
|
//get ID - output source_recordID so we can group by id and get all props of a record
|
55
|
|
56
|
StringBuilder value = new StringBuilder();
|
57
|
String[] inputParts = result.toString().split("\"");
|
58
|
|
59
|
int i = 0;
|
60
|
while (i < inputParts.length) {
|
61
|
inputParts[i] = inputParts[i].replaceAll("\\s", "\t");
|
62
|
i += 2;
|
63
|
}
|
64
|
|
65
|
String output = StringUtils.join(inputParts, "\"");
|
66
|
String[] Fields = output.split("\t");
|
67
|
|
68
|
if (Fields.length >= 2) {
|
69
|
//here addd all fields as array props and append "\t"
|
70
|
// betweeen them so we can write directly to output
|
71
|
//DO NOT enter id- we'll get it from key output
|
72
|
|
73
|
i = 1;
|
74
|
//extract entity type from subject
|
75
|
String[] tmp = Fields[0].split("/");
|
76
|
if (tmp.length >= 5) {
|
77
|
String type = tmp[4];
|
78
|
String subject = Fields[0];
|
79
|
if (lodConfiguration.entityExists(type)) {
|
80
|
while (i < Fields.length - 1) {
|
81
|
String field = Fields[i];
|
82
|
String fieldValue = Fields[i + 1];
|
83
|
if (lodConfiguration.isValidField(field)) {
|
84
|
value.append(subject).append("\t").append(field).append("\t").append(fieldValue).append("\t.\t");
|
85
|
}
|
86
|
i += 2;
|
87
|
}
|
88
|
// write out type,source_ID as key, and rest of props as value
|
89
|
Text key = new Text("OA" + "," + type + "," + subject);
|
90
|
if(value.toString().length()>0){
|
91
|
context.write(key, new Text(value.toString()));
|
92
|
context.getCounter(SOURCE_COUNTERS.SOURCE_ENTITIES).increment(1);
|
93
|
}}
|
94
|
}
|
95
|
}
|
96
|
} catch (Exception e) {
|
97
|
log.error("Error writing entity to M/R output", e);
|
98
|
// throw new InterruptedIOException(e.toString());
|
99
|
}
|
100
|
|
101
|
}
|
102
|
|
103
|
private static String cleanInput(Text result) {
|
104
|
String resulString = result.toString().replace("<", "").replace(">", "");
|
105
|
|
106
|
int ind = resulString.lastIndexOf(".");
|
107
|
if (ind >= 0) {
|
108
|
resulString = resulString.substring(0, ind);
|
109
|
}
|
110
|
|
111
|
return resulString;
|
112
|
}
|
113
|
|
114
|
@Override
|
115
|
protected void cleanup(Context context) throws IOException, InterruptedException {
|
116
|
super.cleanup(context);
|
117
|
}
|
118
|
|
119
|
}
|