Revision 52941
Added by Miriam Baglioni over 5 years ago
ResultTagger.java | ||
---|---|---|
25 | 25 |
|
26 | 26 |
final OafProtos.Oaf.Builder builder = OafProtos.Oaf.newBuilder(oaf); |
27 | 27 |
|
28 |
if(oaf.getDataInfo().getDeletedbyinference()) |
|
29 |
return null; |
|
30 |
|
|
31 |
final List<ResultProtos.Result.Context> contextList = oaf.getEntity().getResult().getMetadata().getContextList(); |
|
32 |
|
|
33 |
//communities contains all the communities to be added as context for the result |
|
34 |
final Set<String> communities = new HashSet<>(); |
|
35 |
List<FieldTypeProtos.StructuredProperty> subjectSet = oaf.getEntity().getResult().getMetadata().getSubjectList(); |
|
36 |
communities.addAll(analiseSubjects(subjectSet, conf)); |
|
37 |
|
|
38 |
List<Pair<String, String>> datasourceSet = oaf.getEntity().getResult().getInstanceList() |
|
39 |
.stream() |
|
40 |
.map(i -> new Pair<>(i.getCollectedfrom().getKey(), i.getHostedby().getKey())) |
|
41 |
.collect(Collectors.toList()); |
|
42 |
|
|
43 |
communities.addAll(analiseDatasource(datasourceSet, conf)); |
|
44 |
|
|
45 |
//TODO: add code for Zenodo Communities |
|
46 |
|
|
47 |
|
|
48 |
final Map<String,Pair<Integer,ResultProtos.Result.Context>> contexts = new HashMap<>(); |
|
49 |
|
|
50 |
for(int i = 0; i<contextList.size(); i++){ |
|
51 |
contexts.put(contextList.get(i).getId(),new Pair<>(i,contextList.get(i))); |
|
52 |
} |
|
53 |
|
|
54 |
for(String contextId:communities){ |
|
55 |
final Pair<Integer,ResultProtos.Result.Context> pair = contexts.get(contextId); |
|
56 |
ResultProtos.Result.Context c; |
|
57 |
if(pair == null) |
|
58 |
c = null; |
|
59 |
else |
|
60 |
c = pair.getSnd(); |
|
61 |
if (c != null) { |
|
62 |
// add new dataInfo if needed |
|
63 |
|
|
64 |
Set<String> set = new HashSet<>(); |
|
65 |
set.addAll(c.getDataInfoList().stream().map(datainfo->datainfo.getInferenceprovenance()).collect(Collectors.toList())); |
|
66 |
if (!set.contains(DATA_INFO_TYPE)) { |
|
67 |
builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().getContextBuilder(pair.getFst()).addDataInfo(buildDataInfo()); |
|
68 |
|
|
69 |
context.getCounter(COUNTER_GROUP, "add provenance").increment(1); |
|
70 |
} |
|
71 |
|
|
72 |
|
|
73 |
} else { |
|
74 |
builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().addContext(buildContext(contextId)); |
|
75 |
|
|
76 |
context.getCounter(COUNTER_GROUP, "add context").increment(1); |
|
77 |
|
|
78 |
} |
|
79 |
} |
|
80 |
|
|
81 |
return builder.build(); |
|
28 |
throw new RuntimeException("MI SPACCO"); |
|
29 |
// if(oaf.getDataInfo().getDeletedbyinference()) |
|
30 |
// return null; |
|
31 |
// |
|
32 |
// final List<ResultProtos.Result.Context> contextList = oaf.getEntity().getResult().getMetadata().getContextList(); |
|
33 |
// |
|
34 |
// //communities contains all the communities to be added as context for the result |
|
35 |
// final Set<String> communities = new HashSet<>(); |
|
36 |
// List<FieldTypeProtos.StructuredProperty> subjectSet = oaf.getEntity().getResult().getMetadata().getSubjectList(); |
|
37 |
// communities.addAll(analiseSubjects(subjectSet, conf)); |
|
38 |
// |
|
39 |
// List<Pair<String, String>> datasourceSet = oaf.getEntity().getResult().getInstanceList() |
|
40 |
// .stream() |
|
41 |
// .map(i -> new Pair<>(i.getCollectedfrom().getKey(), i.getHostedby().getKey())) |
|
42 |
// .collect(Collectors.toList()); |
|
43 |
// |
|
44 |
// communities.addAll(analiseDatasource(datasourceSet, conf)); |
|
45 |
// |
|
46 |
// //TODO: add code for Zenodo Communities |
|
47 |
// |
|
48 |
// |
|
49 |
// final Map<String,Pair<Integer,ResultProtos.Result.Context>> contexts = new HashMap<>(); |
|
50 |
// |
|
51 |
// for(int i = 0; i<contextList.size(); i++){ |
|
52 |
// contexts.put(contextList.get(i).getId(),new Pair<>(i,contextList.get(i))); |
|
53 |
// } |
|
54 |
// |
|
55 |
// for(String contextId:communities){ |
|
56 |
// final Pair<Integer,ResultProtos.Result.Context> pair = contexts.get(contextId); |
|
57 |
// ResultProtos.Result.Context c; |
|
58 |
// if(pair == null) |
|
59 |
// c = null; |
|
60 |
// else |
|
61 |
// c = pair.getSnd(); |
|
62 |
// if (c != null) { |
|
63 |
// // add new dataInfo if needed |
|
64 |
// |
|
65 |
// Set<String> set = new HashSet<>(); |
|
66 |
// set.addAll(c.getDataInfoList().stream().map(datainfo->datainfo.getInferenceprovenance()).collect(Collectors.toList())); |
|
67 |
// if (!set.contains(DATA_INFO_TYPE)) { |
|
68 |
// builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().getContextBuilder(pair.getFst()).addDataInfo(buildDataInfo()); |
|
69 |
// |
|
70 |
// context.getCounter(COUNTER_GROUP, "add provenance").increment(1); |
|
71 |
// } |
|
72 |
// |
|
73 |
// |
|
74 |
// } else { |
|
75 |
// builder.getEntityBuilder().getResultBuilder().getMetadataBuilder().addContext(buildContext(contextId)); |
|
76 |
// |
|
77 |
// context.getCounter(COUNTER_GROUP, "add context").increment(1); |
|
78 |
// |
|
79 |
// } |
|
80 |
// } |
|
81 |
// |
|
82 |
// return builder.build(); |
|
82 | 83 |
} |
83 | 84 |
|
84 | 85 |
private Set<String> analiseDatasource(List<Pair<String, String>> datasourceSet, final CommunityConfiguration conf) { |
Also available in: Unified diff
throw runtime exception for testing