1
|
package eu.dnetlib.msro.openaireplus.workflows.nodes.claims;
|
2
|
|
3
|
import java.io.IOException;
|
4
|
import java.util.List;
|
5
|
|
6
|
import com.googlecode.sarasvati.Arc;
|
7
|
import com.googlecode.sarasvati.NodeToken;
|
8
|
import eu.dnetlib.data.hadoop.rmi.HadoopService;
|
9
|
import eu.dnetlib.data.mapreduce.util.OafRowKeyDecoder;
|
10
|
import eu.dnetlib.data.proto.FieldTypeProtos.DataInfo;
|
11
|
import eu.dnetlib.data.proto.FieldTypeProtos.Qualifier;
|
12
|
import eu.dnetlib.data.proto.KindProtos.Kind;
|
13
|
import eu.dnetlib.data.proto.OafProtos.Oaf;
|
14
|
import eu.dnetlib.data.proto.OafProtos.OafRel;
|
15
|
import eu.dnetlib.data.proto.RelMetadataProtos.RelMetadata;
|
16
|
import eu.dnetlib.data.proto.RelTypeProtos.RelType;
|
17
|
import eu.dnetlib.data.proto.RelTypeProtos.SubRelType;
|
18
|
import eu.dnetlib.data.proto.ResultProjectProtos.ResultProject;
|
19
|
import eu.dnetlib.data.proto.ResultProjectProtos.ResultProject.Outcome;
|
20
|
import eu.dnetlib.data.proto.ResultResultProtos.ResultResult;
|
21
|
import eu.dnetlib.data.proto.ResultResultProtos.ResultResult.PublicationDataset;
|
22
|
import eu.dnetlib.data.transform.xml.AbstractDNetXsltFunctions;
|
23
|
import eu.dnetlib.enabling.locators.UniqueServiceLocator;
|
24
|
import eu.dnetlib.msro.rmi.MSROException;
|
25
|
import eu.dnetlib.msro.workflows.nodes.SimpleJobNode;
|
26
|
import eu.dnetlib.utils.ontologies.OntologyLoader;
|
27
|
import org.apache.commons.codec.binary.Base64;
|
28
|
import org.apache.commons.io.IOUtils;
|
29
|
import org.apache.commons.logging.Log;
|
30
|
import org.apache.commons.logging.LogFactory;
|
31
|
import org.apache.hadoop.util.StringUtils;
|
32
|
import org.springframework.beans.factory.annotation.Autowired;
|
33
|
|
34
|
/**
|
35
|
* Created by alessia on 23/10/15.
|
36
|
*/
|
37
|
public class ApplyClaimRelsJobNode extends SimpleJobNode {
|
38
|
|
39
|
private static final Log log = LogFactory.getLog(ApplyClaimRelsJobNode.class);
|
40
|
|
41
|
private final String SEPARATOR = "_";
|
42
|
|
43
|
@Autowired
|
44
|
private UniqueServiceLocator serviceLocator;
|
45
|
|
46
|
@Autowired
|
47
|
private ClaimDatabaseUtils claimDatabaseUtils;
|
48
|
|
49
|
private String sql;
|
50
|
|
51
|
private String clusterName;
|
52
|
|
53
|
private String tableName;
|
54
|
|
55
|
private String fetchSqlAsText(final String path) throws IOException {
|
56
|
return IOUtils.toString(getClass().getResourceAsStream(path));
|
57
|
}
|
58
|
|
59
|
@Override
|
60
|
protected String execute(NodeToken token) throws Exception {
|
61
|
//TODO: use claim.claim_date from the claim db
|
62
|
long timestamp = System.currentTimeMillis();
|
63
|
|
64
|
List<Claim> claimRels = this.claimDatabaseUtils.query(sql);
|
65
|
int totalClaims = 0;
|
66
|
int totalWrites = 0;
|
67
|
|
68
|
HadoopService hadoopService = serviceLocator.getService(HadoopService.class);
|
69
|
|
70
|
for (Claim claim : claimRels) {
|
71
|
log.debug(claim);
|
72
|
totalClaims++;
|
73
|
String sourceId = fullId(getOpenAIREType(claim.getSourceType()), claim.getSource());
|
74
|
String targetId = fullId(getOpenAIREType(claim.getTargetType()), claim.getTarget());
|
75
|
String value = getValue(sourceId, targetId, claim.getSemantics(), timestamp);
|
76
|
/*
|
77
|
public void addHBaseColumn(final String clusterName,
|
78
|
final String tableName,
|
79
|
final String rowKey,
|
80
|
final String columnFamily,
|
81
|
final String qualifier,
|
82
|
final String value)
|
83
|
*/
|
84
|
hadoopService.addHBaseColumn(clusterName, tableName, sourceId, claim.getSemantics(), targetId, value);
|
85
|
totalWrites++;
|
86
|
|
87
|
String inverseSemantics = OntologyLoader.fetchInverse(claim.getSemantics());
|
88
|
String inverseValue = getValue(targetId, sourceId, inverseSemantics, timestamp);
|
89
|
hadoopService.addHBaseColumn(clusterName, tableName, targetId, inverseSemantics, sourceId, inverseValue);
|
90
|
totalWrites++;
|
91
|
|
92
|
}
|
93
|
|
94
|
log.info("totalClaims: " + totalClaims);
|
95
|
token.getEnv().setAttribute("claimSize", totalClaims);
|
96
|
log.info("writeOps: " + totalWrites);
|
97
|
token.getEnv().setAttribute("writeOps", totalWrites);
|
98
|
|
99
|
return Arc.DEFAULT_ARC;
|
100
|
}
|
101
|
|
102
|
protected String getOpenAIREType(final String type){
|
103
|
switch(type){
|
104
|
case "publication":
|
105
|
case "dataset":
|
106
|
return "result";
|
107
|
default:
|
108
|
return type;
|
109
|
}
|
110
|
}
|
111
|
|
112
|
|
113
|
public String getValue(final String sourceId, final String semantics, final String targetId, final long timestamp) throws MSROException{
|
114
|
log.debug(StringUtils.format("%s -- %s -- %s", sourceId, semantics, targetId));
|
115
|
String[] relInfo = semantics.split(SEPARATOR);
|
116
|
if(relInfo.length < 3){
|
117
|
throw new MSROException("Semantics "+semantics+" not supported: must be splittable in 3 by '_'");
|
118
|
}
|
119
|
Qualifier.Builder semanticsBuilder = Qualifier.newBuilder().setClassid(relInfo[2]).setClassname(relInfo[2]);
|
120
|
|
121
|
Oaf.Builder builder = Oaf.newBuilder().setKind(Kind.relation).setLastupdatetimestamp(timestamp);
|
122
|
builder.setDataInfo(DataInfo.newBuilder().setTrust("0.91").setInferred(false)
|
123
|
.setProvenanceaction(
|
124
|
Qualifier.newBuilder()
|
125
|
.setClassid("user:claim")
|
126
|
.setClassname("user:claim")
|
127
|
.setSchemeid("dnet:provenanceActions")
|
128
|
.setSchemename("dnet:provenanceActions")
|
129
|
));
|
130
|
|
131
|
final SubRelType subRelType = SubRelType.valueOf(relInfo[1]);
|
132
|
final OafRel.Builder relBuilder = OafRel.newBuilder()
|
133
|
.setSubRelType(subRelType)
|
134
|
.setRelClass(relInfo[2])
|
135
|
.setRelType(RelType.valueOf(relInfo[0]))
|
136
|
.setSource(sourceId).setTarget(targetId).setChild(false);
|
137
|
|
138
|
switch (relInfo[0]) {
|
139
|
case "resultProject":
|
140
|
|
141
|
relBuilder.setResultProject(ResultProject.newBuilder()
|
142
|
.setOutcome(Outcome.newBuilder().setRelMetadata(
|
143
|
RelMetadata.newBuilder().setSemantics(
|
144
|
semanticsBuilder
|
145
|
.setSchemeid("dnet:result_project_relations")
|
146
|
.setSchemename("dnet:result_project_relations")
|
147
|
.build()
|
148
|
))));
|
149
|
break;
|
150
|
case "resultResult":
|
151
|
relBuilder.setResultResult(ResultResult.newBuilder()
|
152
|
.setPublicationDataset(PublicationDataset.newBuilder().setRelMetadata(
|
153
|
RelMetadata.newBuilder().setSemantics(
|
154
|
semanticsBuilder
|
155
|
.setSchemeid("dnet:result_result_relations")
|
156
|
.setSchemename("dnet:result_result_relations")
|
157
|
.build()
|
158
|
))));
|
159
|
break;
|
160
|
default:
|
161
|
throw new MSROException("Semantics "+relInfo[0]+" not supported");
|
162
|
}
|
163
|
|
164
|
builder.setRel(relBuilder);
|
165
|
return Base64.encodeBase64String(builder.build().toByteArray());
|
166
|
}
|
167
|
|
168
|
private String fullId(final String type, final String id) {
|
169
|
final String fullId = AbstractDNetXsltFunctions.oafSimpleId(type, id);
|
170
|
return OafRowKeyDecoder.decode(fullId).getKey();
|
171
|
}
|
172
|
|
173
|
public String getClusterName() {
|
174
|
return clusterName;
|
175
|
}
|
176
|
|
177
|
public void setClusterName(final String clusterName) {
|
178
|
this.clusterName = clusterName;
|
179
|
}
|
180
|
|
181
|
public String getTableName() {
|
182
|
return tableName;
|
183
|
}
|
184
|
|
185
|
public void setTableName(final String tableName) {
|
186
|
this.tableName = tableName;
|
187
|
}
|
188
|
|
189
|
public UniqueServiceLocator getServiceLocator() {
|
190
|
return serviceLocator;
|
191
|
}
|
192
|
|
193
|
public void setServiceLocator(final UniqueServiceLocator serviceLocator) {
|
194
|
this.serviceLocator = serviceLocator;
|
195
|
}
|
196
|
|
197
|
public ClaimDatabaseUtils getClaimDatabaseUtils() {
|
198
|
return claimDatabaseUtils;
|
199
|
}
|
200
|
|
201
|
public void setClaimDatabaseUtils(final ClaimDatabaseUtils claimDatabaseUtils) {
|
202
|
this.claimDatabaseUtils = claimDatabaseUtils;
|
203
|
}
|
204
|
|
205
|
public String getSql() {
|
206
|
return sql;
|
207
|
}
|
208
|
|
209
|
public void setSql(final String sql) {
|
210
|
this.sql = sql;
|
211
|
}
|
212
|
}
|