25 |
25 |
|
26 |
26 |
final OafProtos.Oaf.Builder builder = OafProtos.Oaf.newBuilder(oaf);
|
27 |
27 |
|
28 |
|
throw new RuntimeException("MI SPACCO");
|
29 |
|
// if(oaf.getDataInfo().getDeletedbyinference())
|
30 |
|
// return null;
|
31 |
|
//
|
32 |
|
// final List<ResultProtos.Result.Context> contextList = oaf.getEntity().getResult().getMetadata().getContextList();
|
33 |
|
//
|
34 |
|
// //communities contains all the communities to be added as context for the result
|
35 |
|
// final Set<String> communities = new HashSet<>();
|
36 |
|
// List<FieldTypeProtos.StructuredProperty> subjectSet = oaf.getEntity().getResult().getMetadata().getSubjectList();
|
37 |
|
// communities.addAll(analiseSubjects(subjectSet, conf));
|
38 |
|
//
|
39 |
|
// List<Pair<String, String>> datasourceSet = oaf.getEntity().getResult().getInstanceList()
|
40 |
|
// .stream()
|
41 |
|
// .map(i -> new Pair<>(i.getCollectedfrom().getKey(), i.getHostedby().getKey()))
|
42 |
|
// .collect(Collectors.toList());
|
43 |
|
//
|
44 |
|
// communities.addAll(analiseDatasource(datasourceSet, conf));
|
45 |
|
//
|
46 |
|
// //TODO: add code for Zenodo Communities
|
47 |
|
//
|
48 |
|
//
|
49 |
|
// final Map<String,Pair<Integer,ResultProtos.Result.Context>> contexts = new HashMap<>();
|
50 |
|
//
|
51 |
|
// for(int i = 0; i<contextList.size(); i++){
|
52 |
|
// contexts.put(contextList.get(i).getId(),new Pair<>(i,contextList.get(i)));
|
53 |
|
// }
|
54 |
|
//
|
55 |
|
// for(String contextId:communities){
|
56 |
|
// final Pair<Integer,ResultProtos.Result.Context> pair = contexts.get(contextId);
|
57 |
|
// ResultProtos.Result.Context c;
|
58 |
|
// if(pair == null)
|
59 |
|
// c = null;
|
60 |
|
// else
|
61 |
|
// c = pair.getSnd();
|
62 |
|
// if (c != null) {
|
63 |
|
// // add new dataInfo if needed
|
64 |
|
//
|
65 |
|
// Set<String> set = new HashSet<>();
|
66 |
|
// set.addAll(c.getDataInfoList().stream().map(datainfo->datainfo.getInferenceprovenance()).collect(Collectors.toList()));
|
67 |
|
// if (!set.contains(DATA_INFO_TYPE)) {
|
68 |
|
// builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().getContextBuilder(pair.getFst()).addDataInfo(buildDataInfo());
|
69 |
|
//
|
70 |
|
// context.getCounter(COUNTER_GROUP, "add provenance").increment(1);
|
71 |
|
// }
|
72 |
|
//
|
73 |
|
//
|
74 |
|
// } else {
|
75 |
|
// builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().addContext(buildContext(contextId));
|
76 |
|
//
|
77 |
|
// context.getCounter(COUNTER_GROUP, "add context").increment(1);
|
78 |
|
//
|
79 |
|
// }
|
80 |
|
// }
|
81 |
|
//
|
82 |
|
// return builder.build();
|
|
28 |
|
|
29 |
if(oaf.getDataInfo().getDeletedbyinference())
|
|
30 |
return null;
|
|
31 |
|
|
32 |
final List<ResultProtos.Result.Context> contextList = oaf.getEntity().getResult().getMetadata().getContextList();
|
|
33 |
|
|
34 |
//communities contains all the communities to be added as context for the result
|
|
35 |
final Set<String> communities = new HashSet<>();
|
|
36 |
List<FieldTypeProtos.StructuredProperty> subjectSet = oaf.getEntity().getResult().getMetadata().getSubjectList();
|
|
37 |
communities.addAll(analiseSubjects(subjectSet, conf));
|
|
38 |
|
|
39 |
List<Pair<String, String>> datasourceSet = oaf.getEntity().getResult().getInstanceList()
|
|
40 |
.stream()
|
|
41 |
.map(i -> new Pair<>(i.getCollectedfrom().getKey(), i.getHostedby().getKey()))
|
|
42 |
.collect(Collectors.toList());
|
|
43 |
|
|
44 |
communities.addAll(analiseDatasource(datasourceSet, conf));
|
|
45 |
|
|
46 |
//TODO: add code for Zenodo Communities
|
|
47 |
|
|
48 |
|
|
49 |
final Map<String,Pair<Integer,ResultProtos.Result.Context>> contexts = new HashMap<>();
|
|
50 |
|
|
51 |
for(int i = 0; i<contextList.size(); i++){
|
|
52 |
contexts.put(contextList.get(i).getId(),new Pair<>(i,contextList.get(i)));
|
|
53 |
}
|
|
54 |
|
|
55 |
for(String contextId:communities){
|
|
56 |
final Pair<Integer,ResultProtos.Result.Context> pair = contexts.get(contextId);
|
|
57 |
ResultProtos.Result.Context c;
|
|
58 |
if(pair == null)
|
|
59 |
c = null;
|
|
60 |
else
|
|
61 |
c = pair.getSnd();
|
|
62 |
if (c != null) {
|
|
63 |
// add new dataInfo if needed
|
|
64 |
|
|
65 |
Set<String> set = new HashSet<>();
|
|
66 |
set.addAll(c.getDataInfoList().stream().map(datainfo->datainfo.getInferenceprovenance()).collect(Collectors.toList()));
|
|
67 |
if (!set.contains(DATA_INFO_TYPE)) {
|
|
68 |
builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().getContextBuilder(pair.getFst()).addDataInfo(buildDataInfo());
|
|
69 |
|
|
70 |
context.getCounter(COUNTER_GROUP, "add provenance").increment(1);
|
|
71 |
}
|
|
72 |
|
|
73 |
|
|
74 |
} else {
|
|
75 |
builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().addContext(buildContext(contextId));
|
|
76 |
|
|
77 |
context.getCounter(COUNTER_GROUP, "add context").increment(1);
|
|
78 |
|
|
79 |
}
|
|
80 |
}
|
|
81 |
|
|
82 |
return builder.build();
|
83 |
83 |
}
|
84 |
84 |
|
85 |
85 |
private Set<String> analiseDatasource(List<Pair<String, String>> datasourceSet, final CommunityConfiguration conf) {
|
Adding counters for testing