Revision 36893
Added by Alessia Bardi about 9 years ago
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/deploy.info | ||
---|---|---|
1 |
{"type_source": "SVN", "goal": "package -U -T 4C source:jar", "url": "http://svn-public.driver.research-infrastructures.eu/driver/dnet40/modules/dnet-oai-store-service/trunk/", "deploy_repository": "dnet4-snapshots", "version": "4", "mail": "sandro.labruzzo@isti.cnr.it,michele.artini@isti.cnr.it, claudio.atzori@isti.cnr.it, alessia.bardi@isti.cnr.it", "deploy_repository_url": "http://maven.research-infrastructures.eu/nexus/content/repositories/dnet4-snapshots", "name": "dnet-oai-store-service"} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/test/java/eu/dnetlib/data/oai/store/parser/MongoQueryParserTest.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.parser; |
|
2 |
|
|
3 |
import static org.junit.Assert.assertEquals; |
|
4 |
|
|
5 |
import java.util.Date; |
|
6 |
|
|
7 |
import org.apache.commons.logging.Log; |
|
8 |
import org.apache.commons.logging.LogFactory; |
|
9 |
import org.bson.types.ObjectId; |
|
10 |
import org.joda.time.format.DateTimeFormat; |
|
11 |
import org.joda.time.format.DateTimeFormatter; |
|
12 |
import org.joda.time.format.ISODateTimeFormat; |
|
13 |
import org.junit.Test; |
|
14 |
|
|
15 |
import com.google.common.collect.Lists; |
|
16 |
import com.mongodb.BasicDBObject; |
|
17 |
import com.mongodb.DBObject; |
|
18 |
|
|
19 |
public class MongoQueryParserTest { |
|
20 |
|
|
21 |
private final MongoQueryParser mongoParser = new MongoQueryParser(); |
|
22 |
private static final Log log = LogFactory.getLog(MongoQueryParserTest.class); // NOPMD by marko on 11/24/08 5:02 PM |
|
23 |
|
|
24 |
@Test |
|
25 |
public void testParseEq() { |
|
26 |
DBObject expected = new BasicDBObject("set", "CEDIASManuscripts"); |
|
27 |
DBObject o = this.mongoParser.parse("set = \"CEDIASManuscripts\""); |
|
28 |
assertEquals(expected, o); |
|
29 |
|
|
30 |
} |
|
31 |
|
|
32 |
@Test |
|
33 |
public void testParseNeq() { |
|
34 |
DBObject expected = new BasicDBObject("set", new BasicDBObject("$ne", "CEDIASManuscripts")); |
|
35 |
DBObject o = this.mongoParser.parse("set <> \"CEDIASManuscripts\""); |
|
36 |
assertEquals(expected, o); |
|
37 |
} |
|
38 |
|
|
39 |
@Test |
|
40 |
public void testParseAnd() { |
|
41 |
DBObject expected = new BasicDBObject("$and", Lists.newArrayList(new BasicDBObject("set", new BasicDBObject("$ne", "CEDIASManuscripts")), |
|
42 |
new BasicDBObject("pippo", new BasicDBObject("$gt", "x")))); |
|
43 |
DBObject o = this.mongoParser.parse("set <> \"CEDIASManuscripts\" AND pippo > x"); |
|
44 |
log.info(o); |
|
45 |
assertEquals(expected, o); |
|
46 |
} |
|
47 |
|
|
48 |
@Test |
|
49 |
public void testParseOr() { |
|
50 |
DBObject expected = new BasicDBObject("$or", Lists.newArrayList(new BasicDBObject("set", new BasicDBObject("$ne", "CEDIASManuscripts")), |
|
51 |
new BasicDBObject("pippo", new BasicDBObject("$gt", "x")))); |
|
52 |
DBObject o = this.mongoParser.parse("set <> \"CEDIASManuscripts\" OR pippo > x"); |
|
53 |
log.info(o); |
|
54 |
assertEquals(expected, o); |
|
55 |
} |
|
56 |
|
|
57 |
@Test |
|
58 |
public void testParseNot() { |
|
59 |
DBObject expected = new BasicDBObject("$and", Lists.newArrayList(new BasicDBObject("set", "CEDIASManuscripts"), new BasicDBObject("$not", |
|
60 |
new BasicDBObject("pippo", new BasicDBObject("$gt", "x"))))); |
|
61 |
DBObject o = this.mongoParser.parse("set = \"CEDIASManuscripts\" NOT pippo > x"); |
|
62 |
log.info(o); |
|
63 |
assertEquals(expected, o); |
|
64 |
} |
|
65 |
|
|
66 |
@Test |
|
67 |
public void testParseStar() { |
|
68 |
DBObject expected = new BasicDBObject(); |
|
69 |
DBObject o = this.mongoParser.parse("*"); |
|
70 |
DBObject o2 = this.mongoParser.parse("*=*"); |
|
71 |
assertEquals(expected, o); |
|
72 |
assertEquals(expected, o2); |
|
73 |
} |
|
74 |
|
|
75 |
@Test |
|
76 |
public void testParseStarAnd() { |
|
77 |
DBObject expected = new BasicDBObject("$and", Lists.newArrayList(new BasicDBObject(), new BasicDBObject("pippo", new BasicDBObject("$gt", "x")))); |
|
78 |
DBObject o = this.mongoParser.parse("* AND pippo > x"); |
|
79 |
DBObject o2 = this.mongoParser.parse("*=* AND pippo > x"); |
|
80 |
assertEquals(expected, o); |
|
81 |
assertEquals(expected, o2); |
|
82 |
} |
|
83 |
|
|
84 |
@Test |
|
85 |
public void testParseIdQuery() { |
|
86 |
DBObject expected = new BasicDBObject("_id", new BasicDBObject("$gt", new ObjectId("5225e093aabf055637bf2c65"))); |
|
87 |
DBObject o = this.mongoParser.parse("_id > 5225e093aabf055637bf2c65"); |
|
88 |
assertEquals(expected, o); |
|
89 |
} |
|
90 |
|
|
91 |
@Test |
|
92 |
public void testParseUntilDatestamp() { |
|
93 |
Date dateTime = this.parseDate("2014-04-02T00:00:00.000Z"); |
|
94 |
// { "$and" : [ { "oaftype" : { "$ne" : "person"}} , { "datestamp" : { "$lte" : { "$date" : "2014-04-02T00:00:00.000Z"}}}]} |
|
95 |
DBObject expected = new BasicDBObject("$and", Lists.newArrayList(new BasicDBObject("oaftype", new BasicDBObject("$ne", "person")), new BasicDBObject( |
|
96 |
"datestamp", new BasicDBObject("$lte", dateTime)))); |
|
97 |
// System.out.println(expected); |
|
98 |
DBObject o = this.mongoParser.parse("(oaftype <> \"person\") AND datestamp <= 2014-04-02"); |
|
99 |
assertEquals(expected, o); |
|
100 |
// System.out.println(o); |
|
101 |
} |
|
102 |
|
|
103 |
private Date parseDate(final String date) { |
|
104 |
DateTimeFormatter dateNoTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC(); |
|
105 |
DateTimeFormatter iso8601NoMsTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ").withZoneUTC(); |
|
106 |
DateTimeFormatter iso8601Formatter = ISODateTimeFormat.dateTime().withZoneUTC(); |
|
107 |
try { |
|
108 |
log.debug("Using default " + iso8601Formatter.getClass()); |
|
109 |
return iso8601Formatter.parseDateTime(date).toDate(); |
|
110 |
} catch (Exception e) { |
|
111 |
try { |
|
112 |
log.debug("Switching to ISO with no millisecond date formatter: yyyy-MM-dd'T'HH:mm:ssZ"); |
|
113 |
return iso8601NoMsTimeFormatter.parseDateTime(date).toDate(); |
|
114 |
} catch (Exception ex) { |
|
115 |
log.debug("Switching to simple date formatter: yyyy-MM-dd"); |
|
116 |
return dateNoTimeFormatter.parseDateTime(date).toDate(); |
|
117 |
} |
|
118 |
} |
|
119 |
} |
|
120 |
|
|
121 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/test/java/eu/dnetlib/data/oai/store/sets/MongoSetCollectionTest.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.sets; |
|
2 |
|
|
3 |
import static org.junit.Assert.assertEquals; |
|
4 |
|
|
5 |
import org.junit.Before; |
|
6 |
import org.junit.Test; |
|
7 |
|
|
8 |
public class MongoSetCollectionTest { |
|
9 |
|
|
10 |
private MongoSetCollection mongoSetCollection; |
|
11 |
private String strangeSet = "Наукові журнали Національного Авіаційного Університету"; |
|
12 |
|
|
13 |
@Before |
|
14 |
public void setup() { |
|
15 |
this.mongoSetCollection = new MongoSetCollection(); |
|
16 |
} |
|
17 |
|
|
18 |
@Test |
|
19 |
public void test() { |
|
20 |
String normalised = this.mongoSetCollection.normalizeSetSpec(strangeSet); |
|
21 |
assertEquals(MongoSetCollection.DEFAULT_SET, normalised); |
|
22 |
} |
|
23 |
|
|
24 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/test/resources/eu/dnetlib/test/profiles/OAIPublisherConfigurationDSResources/OAIPublisherConfigurationDSResourceType/OAIPublisherConfiguration-1.xml | ||
---|---|---|
1 |
<RESOURCE_PROFILE> |
|
2 |
<HEADER> |
|
3 |
<RESOURCE_IDENTIFIER value="OAIPublisherConfiguration" /> |
|
4 |
<RESOURCE_TYPE value="OAIPublisherConfigurationDSResourceType" /> |
|
5 |
<RESOURCE_KIND value="OAIPublisherConfigurationDSResources" /> |
|
6 |
<RESOURCE_URI value="" /> |
|
7 |
<DATE_OF_CREATION value="2001-12-31T12:00:00" /> |
|
8 |
</HEADER> |
|
9 |
<BODY> |
|
10 |
<CONFIGURATION> |
|
11 |
<OAISETS> |
|
12 |
<OAISET enabled="true"> |
|
13 |
<spec>OpenAccess</spec> |
|
14 |
<name>Set of Open Access articles</name> |
|
15 |
<description>Set of records having 'OPEN' license</description> |
|
16 |
<query>(license = "OPEN")</query> |
|
17 |
</OAISET> |
|
18 |
<OAISET enabled="true"> |
|
19 |
<spec>ArticlesInNature</spec> |
|
20 |
<name>Articles published by Nature</name> |
|
21 |
<description>Set of articles published by Nature Publishing Group</description> |
|
22 |
<query>(publisher = "Nature Publishing Group")</query> |
|
23 |
</OAISET> |
|
24 |
<OAISET enabled="true"> |
|
25 |
<spec>publications</spec> |
|
26 |
<name>Publications</name> |
|
27 |
<description>Set of all Publications</description> |
|
28 |
<query>resulttypeid="publication"</query> |
|
29 |
</OAISET> |
|
30 |
</OAISETS> |
|
31 |
<METADATAFORMATS> |
|
32 |
<METADATAFORMAT exportable="true" metadataPrefix="oaf"> |
|
33 |
<NAMESPACE>http://namespace.openaire.eu/oaf</NAMESPACE> |
|
34 |
<SCHEMA>http://www.openaire.eu/schema/0.1/oaf-0.1.xsd</SCHEMA> |
|
35 |
<SOURCE_METADATA_FORMAT interpretation="openaire" layout="index" name="oaf"/> |
|
36 |
<TRANSFORMATION_RULE/> |
|
37 |
<BASE_QUERY>*</BASE_QUERY> |
|
38 |
</METADATAFORMAT> |
|
39 |
<METADATAFORMAT metadataPrefix="oai_dc" exportable="false"> |
|
40 |
<NAMESPACE>http://www.openarchives.org/OAI/2.0/oai_dc/</NAMESPACE> |
|
41 |
<SCHEMA>http://www.openarchives.org/OAI/2.0/oai_dc.xsd</SCHEMA> |
|
42 |
<SOURCE_METADATA_FORMAT interpretation="openaire" layout="index" name="oaf"/> |
|
43 |
<TRANSFORMATION_RULE>oaf2dc_VHJhbnNmb3JtYXRpb25SdWxlRFNSZXNvdXJjZXMvVHJhbnNmb3JtYXRpb25SdWxlRFNSZXNvdXJjZVR5cGU=</TRANSFORMATION_RULE> |
|
44 |
<BASE_QUERY>oaftype="result"</BASE_QUERY> |
|
45 |
</METADATAFORMAT> |
|
46 |
</METADATAFORMATS> |
|
47 |
<INDICES> |
|
48 |
<INDEX name="objIdentifier" repeatable="false"> |
|
49 |
<SOURCE name="oaf" layout="index" interpretation="openaire" path="//*[local-name() ='objIdentifier']"/> |
|
50 |
</INDEX> |
|
51 |
<INDEX name="set" repeatable="true"> |
|
52 |
<SOURCE name="oaf" layout="index" interpretation="openaire" path="//collectedfrom/@name"/> |
|
53 |
</INDEX> |
|
54 |
<INDEX name="publisher" repeatable="true"> |
|
55 |
<SOURCE name="oaf" layout="index" interpretation="openaire" path="//publisher"/> |
|
56 |
</INDEX> |
|
57 |
<INDEX name="license" repeatable="false"> |
|
58 |
<SOURCE name="oaf" layout="index" interpretation="openaire" path="//bestlicense/@classid"/> |
|
59 |
</INDEX> |
|
60 |
<INDEX name="oaftype" repeatable="false"> |
|
61 |
<SOURCE name="oaf" layout="index" interpretation="openaire" path="local-name(//*[local-name()='entity']/*)"/> |
|
62 |
</INDEX> |
|
63 |
<INDEX name="resulttypeid" repeatable="false"> |
|
64 |
<SOURCE name="oaf" layout="index" interpretation="openaire" path="//*[local-name()='entity']/*[local-name()='result']/resulttype/@classid"/> |
|
65 |
</INDEX> |
|
66 |
</INDICES> |
|
67 |
</CONFIGURATION> |
|
68 |
<STATUS> |
|
69 |
<LAST_UPDATE value="2001-12-31T12:00:00" /> |
|
70 |
</STATUS> |
|
71 |
<SECURITY_PARAMETERS>SECURITY_PARAMETERS</SECURITY_PARAMETERS> |
|
72 |
</BODY> |
|
73 |
</RESOURCE_PROFILE> |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/test/resources/log4j.properties | ||
---|---|---|
1 |
org.apache.cxf.Logger=org.apache.cxf.common.logging.Log4jLogger |
|
2 |
|
|
3 |
log4j.rootLogger=WARN, CONSOLE |
|
4 |
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender |
|
5 |
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout |
|
6 |
log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n |
|
7 |
|
|
8 |
log4j.logger.eu.dnetlib=INFO |
|
9 |
log4j.logger.eu.dnetlib.data=DEBUG |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/mongo/RecordInfoGenerator.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.mongo; |
|
2 |
|
|
3 |
import java.io.StringReader; |
|
4 |
import java.util.Date; |
|
5 |
import java.util.List; |
|
6 |
|
|
7 |
import javax.annotation.Resource; |
|
8 |
|
|
9 |
import org.apache.commons.lang.StringEscapeUtils; |
|
10 |
import org.dom4j.Document; |
|
11 |
import org.dom4j.DocumentException; |
|
12 |
import org.dom4j.io.SAXReader; |
|
13 |
|
|
14 |
import com.google.common.collect.Sets; |
|
15 |
import com.mongodb.DBObject; |
|
16 |
|
|
17 |
import eu.dnetlib.data.information.oai.publisher.OaiPublisherRuntimeException; |
|
18 |
import eu.dnetlib.data.information.oai.publisher.conf.OAIConfigurationReader; |
|
19 |
import eu.dnetlib.data.information.oai.publisher.info.RecordInfo; |
|
20 |
|
|
21 |
/** |
|
22 |
* Helper class to generate a RecordInfo object from a Mongo DBObject. |
|
23 |
* |
|
24 |
* @author alessia |
|
25 |
* |
|
26 |
*/ |
|
27 |
public class RecordInfoGenerator { |
|
28 |
|
|
29 |
@Resource |
|
30 |
private MetadataExtractor metadataExtractor; |
|
31 |
@Resource |
|
32 |
private ProvenanceExtractor provenanceExtractor; |
|
33 |
|
|
34 |
@SuppressWarnings("unchecked") |
|
35 |
public RecordInfo transformDBObject(final DBObject object, final boolean includeBody) { |
|
36 |
if ((object == null) || object.keySet().isEmpty()) return null; |
|
37 |
String id = (String) object.get(OAIConfigurationReader.ID_FIELD); |
|
38 |
// need to escape the identifier, otherwise the XML breaks |
|
39 |
id = StringEscapeUtils.escapeXml(id); |
|
40 |
boolean deleted = (Boolean) object.get("deleted"); |
|
41 |
RecordInfo record = new RecordInfo(); |
|
42 |
record.setIdentifier(id); |
|
43 |
record.setInternalId(object.get("_id").toString()); |
|
44 |
record.setDatestamp((Date) object.get(OAIConfigurationReader.DATESTAMP_FIELD)); |
|
45 |
record.setDeleted(deleted); |
|
46 |
List<String> sets = (List<String>) object.get(OAIConfigurationReader.SET_FIELD); |
|
47 |
if (sets != null) { |
|
48 |
record.setSetspecs(Sets.newHashSet(sets)); |
|
49 |
} |
|
50 |
if (includeBody && !deleted) { |
|
51 |
String body = (String) object.get(OAIConfigurationReader.BODY_FIELD); |
|
52 |
final SAXReader reader = new SAXReader(); |
|
53 |
Document doc; |
|
54 |
try { |
|
55 |
doc = reader.read(new StringReader(body)); |
|
56 |
record.setMetadata(this.metadataExtractor.evaluate(doc)); |
|
57 |
record.setProvenance(this.provenanceExtractor.evaluate(doc)); |
|
58 |
} catch (DocumentException e) { |
|
59 |
throw new OaiPublisherRuntimeException(e); |
|
60 |
} |
|
61 |
} |
|
62 |
return record; |
|
63 |
|
|
64 |
} |
|
65 |
|
|
66 |
public MetadataExtractor getMetadataExtractor() { |
|
67 |
return metadataExtractor; |
|
68 |
} |
|
69 |
|
|
70 |
public void setMetadataExtractor(final MetadataExtractor metadataExtractor) { |
|
71 |
this.metadataExtractor = metadataExtractor; |
|
72 |
} |
|
73 |
|
|
74 |
public ProvenanceExtractor getProvenanceExtractor() { |
|
75 |
return provenanceExtractor; |
|
76 |
} |
|
77 |
|
|
78 |
public void setProvenanceExtractor(final ProvenanceExtractor provenanceExtractor) { |
|
79 |
this.provenanceExtractor = provenanceExtractor; |
|
80 |
} |
|
81 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/mongo/MongoPublisherStore.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.mongo; |
|
2 |
|
|
3 |
import java.util.Collection; |
|
4 |
import java.util.Date; |
|
5 |
import java.util.List; |
|
6 |
import java.util.concurrent.ArrayBlockingQueue; |
|
7 |
import java.util.concurrent.BlockingQueue; |
|
8 |
import java.util.concurrent.TimeUnit; |
|
9 |
|
|
10 |
import org.apache.commons.lang.StringUtils; |
|
11 |
import org.apache.commons.logging.Log; |
|
12 |
import org.apache.commons.logging.LogFactory; |
|
13 |
|
|
14 |
import com.google.common.base.Function; |
|
15 |
import com.google.common.base.Predicate; |
|
16 |
import com.google.common.base.Stopwatch; |
|
17 |
import com.google.common.collect.Iterables; |
|
18 |
import com.google.common.collect.Multimap; |
|
19 |
import com.mongodb.BasicDBObject; |
|
20 |
import com.mongodb.BasicDBObjectBuilder; |
|
21 |
import com.mongodb.DBCollection; |
|
22 |
import com.mongodb.DBCursor; |
|
23 |
import com.mongodb.DBObject; |
|
24 |
import com.mongodb.WriteConcern; |
|
25 |
import com.mongodb.WriteResult; |
|
26 |
|
|
27 |
import eu.dnetlib.data.information.oai.publisher.PublisherField; |
|
28 |
import eu.dnetlib.data.information.oai.publisher.conf.OAIConfigurationReader; |
|
29 |
import eu.dnetlib.data.information.oai.publisher.info.RecordInfo; |
|
30 |
import eu.dnetlib.data.information.oai.publisher.info.SetInfo; |
|
31 |
import eu.dnetlib.data.oai.store.PublisherStore; |
|
32 |
import eu.dnetlib.data.oai.store.RecordChangeDetector; |
|
33 |
import eu.dnetlib.data.oai.store.parser.MongoQueryParser; |
|
34 |
import eu.dnetlib.data.oai.store.parser.PublisherRecordParser; |
|
35 |
import eu.dnetlib.data.oai.store.sets.MongoSetCollection; |
|
36 |
import eu.dnetlib.miscutils.functional.UnaryFunction; |
|
37 |
|
|
38 |
public class MongoPublisherStore implements PublisherStore<MongoCursor> { |
|
39 |
|
|
40 |
private static final Log log = LogFactory.getLog(MongoPublisherStore.class); // NOPMD by marko on 11/24/08 5:02 PM |
|
41 |
|
|
42 |
private String id, metadataFormat, interpretation, layout; |
|
43 |
/** Keeps information about the fields to be created in mongo. **/ |
|
44 |
private List<PublisherField> mongoFields; |
|
45 |
|
|
46 |
private DBCollection collection; |
|
47 |
private DBCollection discardedCollection; |
|
48 |
|
|
49 |
private RecordInfoGenerator recordInfoGenerator; |
|
50 |
private MetadataExtractor metadataExtractor; |
|
51 |
|
|
52 |
private MongoQueryParser queryParser; |
|
53 |
|
|
54 |
private RecordChangeDetector recordChangeDetector; |
|
55 |
|
|
56 |
private MongoSetCollection mongoSetCollection; |
|
57 |
|
|
58 |
/** |
|
59 |
* Used to generate the OAI identifiers compliant to the protocol. See |
|
60 |
* http://www.openarchives.org/OAI/openarchivesprotocol.html#UniqueIdentifier. |
|
61 |
*/ |
|
62 |
private String idScheme; |
|
63 |
/** |
|
64 |
* Used to generate the OAI identifiers compliant to the protocol. See |
|
65 |
* http://www.openarchives.org/OAI/openarchivesprotocol.html#UniqueIdentifier. |
|
66 |
*/ |
|
67 |
private String idNamespace; |
|
68 |
|
|
69 |
private boolean alwaysNewRecord; |
|
70 |
|
|
71 |
@Override |
|
72 |
public RecordInfo getRecord(final String recordId) { |
|
73 |
DBObject query = new BasicDBObject(OAIConfigurationReader.ID_FIELD, recordId); |
|
74 |
DBObject result = this.collection.findOne(query); |
|
75 |
log.debug(result); |
|
76 |
return this.recordInfoGenerator.transformDBObject(result, true); |
|
77 |
} |
|
78 |
|
|
79 |
@Override |
|
80 |
public RecordInfo getRecord(final String recordId, final UnaryFunction<String, String> unaryFunction) { |
|
81 |
RecordInfo result = this.getRecord(recordId); |
|
82 |
if (result != null) { |
|
83 |
String transformedBody = unaryFunction.evaluate(result.getMetadata()); |
|
84 |
result.setMetadata(transformedBody); |
|
85 |
} |
|
86 |
return result; |
|
87 |
} |
|
88 |
|
|
89 |
@Override |
|
90 |
public MongoCursor getRecords(final String queryString, final boolean bodyIncluded, final int limit) { |
|
91 |
DBCursor cursor = loggedFindByQuery(queryString, limit); |
|
92 |
return new MongoCursor(cursor, bodyIncluded, this.recordInfoGenerator, this.metadataExtractor); |
|
93 |
} |
|
94 |
|
|
95 |
@Override |
|
96 |
public MongoCursor getRecords(final String queryString, final UnaryFunction<String, String> unaryFunction, final boolean bodyIncluded, final int limit) { |
|
97 |
DBCursor cursor = loggedFindByQuery(queryString, limit); |
|
98 |
return new MongoCursor(cursor, unaryFunction, bodyIncluded, this.recordInfoGenerator, this.metadataExtractor); |
|
99 |
} |
|
100 |
|
|
101 |
private DBCursor loggedFindByQuery(final String queryString, final int limit) { |
|
102 |
DBObject query = this.queryParser.parse(queryString); |
|
103 |
long start = System.currentTimeMillis(); |
|
104 |
DBCursor cursor = this.collection.find(query).sort(new BasicDBObject("_id", 1)).limit(limit); |
|
105 |
long end = System.currentTimeMillis(); |
|
106 |
log.debug("Query:" + query + "\ntime to get mongo cursor (ms): " + (end - start)); |
|
107 |
return cursor; |
|
108 |
} |
|
109 |
|
|
110 |
@Override |
|
111 |
public List<PublisherField> getIndices() { |
|
112 |
return this.mongoFields; |
|
113 |
} |
|
114 |
|
|
115 |
/** |
|
116 |
* <p> |
|
117 |
* Ensure indices on the configuration-defined fields and on the system fields DATESTAMP_FIELD and LAST_COLLECTION_DATE_FIELD. |
|
118 |
* <p> |
|
119 |
* <p> |
|
120 |
* Note that by default ID_FIELD, SET_FIELD, DELETED_FIELD, BODY_FIELD, UPDATED_FIELD are not indexed. If you want an index on those, |
|
121 |
* then you have to specify it in the configuration file of the OAI Publisher: <br> |
|
122 |
* <INDEX name="deleted"> |
|
123 |
* </p> |
|
124 |
* |
|
125 |
* {@inheritDoc} |
|
126 |
* |
|
127 |
* @see eu.dnetlib.data.information.oai.publisher.store.PublisherStore#ensureIndices() |
|
128 |
*/ |
|
129 |
@Override |
|
130 |
public void ensureIndices() { |
|
131 |
List<DBObject> indexInfoList = this.collection.getIndexInfo(); |
|
132 |
Stopwatch sw = new Stopwatch(); |
|
133 |
sw.start(); |
|
134 |
// I want to keep the composite indexes that might have been defined manually |
|
135 |
log.debug("Ensuring currently defined composite indexes:"); |
|
136 |
DBObject indexOptions = new BasicDBObject("background", true); |
|
137 |
for (DBObject o : indexInfoList) { |
|
138 |
DBObject fieldIndexed = (DBObject) o.get("key"); |
|
139 |
if (fieldIndexed.keySet().size() > 1) { |
|
140 |
log.debug(o); |
|
141 |
this.collection.createIndex(fieldIndexed, indexOptions); |
|
142 |
} |
|
143 |
} |
|
144 |
// Indexes on single fields. |
|
145 |
for (PublisherField field : this.mongoFields) { |
|
146 |
DBObject mongoIdx = new BasicDBObject(field.getFieldName(), 1); |
|
147 |
log.debug("Creating index : " + mongoIdx); |
|
148 |
this.collection.createIndex(mongoIdx, indexOptions); |
|
149 |
} |
|
150 |
log.debug("Creating index over : " + OAIConfigurationReader.DATESTAMP_FIELD); |
|
151 |
this.collection.createIndex(new BasicDBObject(OAIConfigurationReader.DATESTAMP_FIELD, 1), indexOptions); |
|
152 |
log.debug("Creating index over : " + OAIConfigurationReader.LAST_COLLECTION_DATE_FIELD); |
|
153 |
this.collection.createIndex(new BasicDBObject(OAIConfigurationReader.LAST_COLLECTION_DATE_FIELD, 1), indexOptions); |
|
154 |
sw.stop(); |
|
155 |
log.info("All indexes have been updated in " + sw.elapsed(TimeUnit.MILLISECONDS) + " milliseconds"); |
|
156 |
} |
|
157 |
|
|
158 |
/** |
|
159 |
* Creates a compound index over the specified fields on the given store. |
|
160 |
* <p> |
|
161 |
* The creation is performed on the background |
|
162 |
* </p> |
|
163 |
* |
|
164 |
* @param fieldNames |
|
165 |
* List of fields to be included in the compound index |
|
166 |
* @theStore MongoPublisherStore where to create the index |
|
167 |
*/ |
|
168 |
public void createCompoundIndex(final List<String> fieldNames) { |
|
169 |
if ((fieldNames == null) || fieldNames.isEmpty()) { |
|
170 |
log.fatal("No fields specified for the creation of the compound index"); |
|
171 |
} |
|
172 |
DBObject indexOptions = new BasicDBObject("background", true); |
|
173 |
BasicDBObjectBuilder theIndexBuilder = BasicDBObjectBuilder.start(); |
|
174 |
for (String f : fieldNames) { |
|
175 |
theIndexBuilder.add(f, 1); |
|
176 |
} |
|
177 |
DBObject theIndex = theIndexBuilder.get(); |
|
178 |
log.info("Creating index " + theIndex + " on " + this.getId()); |
|
179 |
this.getCollection().createIndex(theIndex, indexOptions); |
|
180 |
} |
|
181 |
|
|
182 |
private void dropDiscarded(final String source) { |
|
183 |
if (StringUtils.isBlank(source)) { |
|
184 |
log.debug("Dropping discarded records from publisherStore " + id); |
|
185 |
discardedCollection.drop(); |
|
186 |
} else { |
|
187 |
log.debug("Dropping discarded records for source " + source + " from publisherStore " + id); |
|
188 |
discardedCollection.remove(new BasicDBObject(OAIConfigurationReader.SET_FIELD, source)); |
|
189 |
} |
|
190 |
} |
|
191 |
|
|
192 |
@Override |
|
193 |
public int feed(final Iterable<String> records, final String source) { |
|
194 |
final BlockingQueue<Object> queue = new ArrayBlockingQueue<Object>(80); |
|
195 |
final Object sentinel = new Object(); |
|
196 |
this.dropDiscarded(source); |
|
197 |
final Date feedDate = new Date(); |
|
198 |
Thread background = new Thread(new Runnable() { |
|
199 |
|
|
200 |
@Override |
|
201 |
public void run() { |
|
202 |
while (true) { |
|
203 |
try { |
|
204 |
Object record = queue.take(); |
|
205 |
if (record == sentinel) { |
|
206 |
break; |
|
207 |
} |
|
208 |
safeFeedRecord((String) record, source, feedDate); |
|
209 |
} catch (InterruptedException e) { |
|
210 |
log.fatal("got exception in background thread", e); |
|
211 |
throw new IllegalStateException(e); |
|
212 |
} |
|
213 |
} |
|
214 |
} |
|
215 |
}); |
|
216 |
background.start(); |
|
217 |
long startFeed = feedDate.getTime(); |
|
218 |
try { |
|
219 |
log.info("feeding publisherStore " + id); |
|
220 |
for (final String record : records) { |
|
221 |
queue.put(record); |
|
222 |
} |
|
223 |
queue.put(sentinel); |
|
224 |
log.info("finished feeding publisherStore " + id); |
|
225 |
|
|
226 |
background.join(); |
|
227 |
} catch (InterruptedException e) { |
|
228 |
throw new IllegalStateException(e); |
|
229 |
} |
|
230 |
long endFeed = System.currentTimeMillis(); |
|
231 |
log.fatal("OAI STORE " + id + " FEEDING COMPLETED IN " + (endFeed - startFeed) + "ms"); |
|
232 |
this.setDeletedFlags(feedDate, source); |
|
233 |
return this.count(); |
|
234 |
} |
|
235 |
|
|
236 |
/** |
|
237 |
* Launches the thread that flags the records to be considered as 'deleted'. |
|
238 |
* <p> |
|
239 |
* The datestamp of the deleted records must be updated as well, according to the OAI specs available at |
|
240 |
* http://www.openarchives.org/OAI/openarchivesprotocol.html#DeletedRecords: if a repository does keep track of deletions then the |
|
241 |
* datestamp of the deleted record must be the date and time that it was deleted. |
|
242 |
* </p> |
|
243 |
* |
|
244 |
* @param feedDate |
|
245 |
* @param source |
|
246 |
*/ |
|
247 |
private void setDeletedFlags(final Date feedDate, final String source) { |
|
248 |
Thread deletedSetter = new Thread(new Runnable() { |
|
249 |
|
|
250 |
@Override |
|
251 |
public void run() { |
|
252 |
DBObject query = BasicDBObjectBuilder.start(OAIConfigurationReader.DELETED_FIELD, false) |
|
253 |
.append(OAIConfigurationReader.LAST_COLLECTION_DATE_FIELD, new BasicDBObject("$lt", feedDate)).get(); |
|
254 |
if (!StringUtils.isBlank(source)) { |
|
255 |
query.put(OAIConfigurationReader.SET_FIELD, source); |
|
256 |
} |
|
257 |
log.debug("Delete flag query: " + query.toString()); |
|
258 |
DBObject update = new BasicDBObject("$set", BasicDBObjectBuilder.start(OAIConfigurationReader.DELETED_FIELD, true) |
|
259 |
.append(OAIConfigurationReader.DATESTAMP_FIELD, feedDate).append(OAIConfigurationReader.UPDATED_FIELD, true).get()); |
|
260 |
log.debug("Updating as: " + update.toString()); |
|
261 |
WriteResult wr = collection.update(query, update, false, true); |
|
262 |
log.debug("Deleted flags set for source: " + source + " #records = " + wr.getN()); |
|
263 |
} |
|
264 |
}); |
|
265 |
|
|
266 |
deletedSetter.start(); |
|
267 |
try { |
|
268 |
deletedSetter.join(); |
|
269 |
} catch (InterruptedException e) { |
|
270 |
throw new IllegalStateException(e); |
|
271 |
} |
|
272 |
} |
|
273 |
|
|
274 |
@Override |
|
275 |
public void drop() { |
|
276 |
this.collection.drop(); |
|
277 |
} |
|
278 |
|
|
279 |
@Override |
|
280 |
public void drop(final String queryString) { |
|
281 |
DBObject query = this.queryParser.parse(queryString); |
|
282 |
this.collection.remove(query); |
|
283 |
} |
|
284 |
|
|
285 |
@Override |
|
286 |
public int count() { |
|
287 |
return (int) this.collection.count(); |
|
288 |
} |
|
289 |
|
|
290 |
@Override |
|
291 |
public int count(final String queryString) { |
|
292 |
if (StringUtils.isBlank(queryString)) return (int) this.collection.count(); |
|
293 |
DBObject query = this.queryParser.parse(queryString); |
|
294 |
return (int) this.collection.count(query); |
|
295 |
} |
|
296 |
|
|
297 |
public List<String> getDistinctSetNamesFromRecords() { |
|
298 |
log.info("Going to ask for all distinct sets in the oaistore " + id + ": this may take a long time..."); |
|
299 |
@SuppressWarnings("unchecked") |
|
300 |
List<String> distinctSets = this.collection.distinct(OAIConfigurationReader.SET_FIELD); |
|
301 |
return distinctSets; |
|
302 |
} |
|
303 |
|
|
304 |
// ***********************************************************************************************// |
|
305 |
// Feed utilities |
|
306 |
// ***********************************************************************************************// |
|
307 |
private boolean safeFeedRecord(final String record, final String source, final Date feedDate) { |
|
308 |
try { |
|
309 |
if (!record.isEmpty()) return feedRecord(record, source, feedDate); |
|
310 |
} catch (final Throwable e) { |
|
311 |
log.error("Got unhandled exception while parsing record", e); |
|
312 |
discardedCollection.insert(new BasicDBObject(OAIConfigurationReader.SET_FIELD, source).append(OAIConfigurationReader.BODY_FIELD, record)); |
|
313 |
} |
|
314 |
return false; |
|
315 |
} |
|
316 |
|
|
317 |
/** |
|
318 |
* Feed the record to the store. |
|
319 |
* |
|
320 |
* @return true if the record is new, false otherwise |
|
321 |
*/ |
|
322 |
private boolean feedRecord(final String record, final String source, final Date feedDate) { |
|
323 |
PublisherRecordParser parser = new PublisherRecordParser(this.mongoFields); |
|
324 |
final Multimap<String, String> recordProperties = parser.parseRecord(record); |
|
325 |
String id = ""; |
|
326 |
String oaiID = ""; |
|
327 |
if (recordProperties.containsKey(OAIConfigurationReader.ID_FIELD)) { |
|
328 |
id = recordProperties.get(OAIConfigurationReader.ID_FIELD).iterator().next(); |
|
329 |
oaiID = getOAIIdentifier(id); |
|
330 |
if (isNewRecord(oaiID)) { |
|
331 |
feedNew(oaiID, record, recordProperties, feedDate); |
|
332 |
return true; |
|
333 |
} else { |
|
334 |
if (isChanged(oaiID, record)) { |
|
335 |
updateRecord(oaiID, record, recordProperties, feedDate); |
|
336 |
} else { |
|
337 |
// it is not changed, I only have to update the last collection date |
|
338 |
handleRecord(oaiID, feedDate); |
|
339 |
} |
|
340 |
} |
|
341 |
} else { |
|
342 |
log.error("parsed record seems invalid -- no identifier property with name: " + OAIConfigurationReader.ID_FIELD); |
|
343 |
discardedCollection.insert(new BasicDBObject(OAIConfigurationReader.SET_FIELD, source).append(OAIConfigurationReader.BODY_FIELD, record).append( |
|
344 |
OAIConfigurationReader.DATESTAMP_FIELD, feedDate)); |
|
345 |
} |
|
346 |
return false; |
|
347 |
} |
|
348 |
|
|
349 |
private DBObject createBasicObject(final String oaiID, final String record, final Multimap<String, String> recordProperties) { |
|
350 |
DBObject obj = new BasicDBObject(); |
|
351 |
for (final String key : recordProperties.keySet()) { |
|
352 |
if (key.equals(OAIConfigurationReader.ID_FIELD)) { |
|
353 |
obj.put(key, oaiID); |
|
354 |
} else { |
|
355 |
Collection<String> values = recordProperties.get(key); |
|
356 |
if (key.equals(OAIConfigurationReader.SET_FIELD)) { |
|
357 |
|
|
358 |
Iterable<String> setSpecs = Iterables.transform(values, new Function<String, String>() { |
|
359 |
|
|
360 |
@Override |
|
361 |
public String apply(final String s) { |
|
362 |
return mongoSetCollection.normalizeSetSpec(s); |
|
363 |
} |
|
364 |
|
|
365 |
}); |
|
366 |
obj.put(key, setSpecs); |
|
367 |
} else { |
|
368 |
// let's check if the key is the name of a repeatable field or not |
|
369 |
PublisherField keyField = Iterables.find(this.mongoFields, new Predicate<PublisherField>() { |
|
370 |
|
|
371 |
@Override |
|
372 |
public boolean apply(final PublisherField field) { |
|
373 |
return field.getFieldName().equals(key); |
|
374 |
} |
|
375 |
}, null); |
|
376 |
if (keyField == null) { |
|
377 |
log.warn("Expected field to index: " + key + " could not be found, but we keep going..."); |
|
378 |
} |
|
379 |
if ((keyField != null) && !keyField.isRepeatable()) { |
|
380 |
if ((values != null) && !values.isEmpty()) { |
|
381 |
obj.put(key, values.iterator().next()); |
|
382 |
} |
|
383 |
} else { |
|
384 |
obj.put(key, values); |
|
385 |
} |
|
386 |
} |
|
387 |
} |
|
388 |
} |
|
389 |
obj.put(OAIConfigurationReader.BODY_FIELD, record); |
|
390 |
obj.put(OAIConfigurationReader.DELETED_FIELD, false); |
|
391 |
return obj; |
|
392 |
} |
|
393 |
|
|
394 |
private void feedNew(final String oaiID, final String record, final Multimap<String, String> recordProperties, final Date feedDate) { |
|
395 |
log.debug("New record received. Assigned oai id: " + oaiID); |
|
396 |
DBObject obj = this.createBasicObject(oaiID, record, recordProperties); |
|
397 |
obj.put(OAIConfigurationReader.LAST_COLLECTION_DATE_FIELD, feedDate); |
|
398 |
obj.put(OAIConfigurationReader.DATESTAMP_FIELD, feedDate); |
|
399 |
obj.put(OAIConfigurationReader.UPDATED_FIELD, false); |
|
400 |
collection.insert(obj, WriteConcern.UNACKNOWLEDGED); |
|
401 |
this.upsertSets(recordProperties.get(OAIConfigurationReader.SET_FIELD)); |
|
402 |
} |
|
403 |
|
|
404 |
private void updateRecord(final String oaiID, final String record, final Multimap<String, String> recordProperties, final Date feedDate) { |
|
405 |
log.debug("updating record " + oaiID); |
|
406 |
DBObject obj = this.createBasicObject(oaiID, record, recordProperties); |
|
407 |
obj.put(OAIConfigurationReader.LAST_COLLECTION_DATE_FIELD, feedDate); |
|
408 |
obj.put(OAIConfigurationReader.DATESTAMP_FIELD, feedDate); |
|
409 |
obj.put(OAIConfigurationReader.UPDATED_FIELD, true); |
|
410 |
DBObject oldObj = new BasicDBObject(OAIConfigurationReader.ID_FIELD, oaiID); |
|
411 |
collection.update(oldObj, obj, true, false); |
|
412 |
this.upsertSets(recordProperties.get(OAIConfigurationReader.SET_FIELD)); |
|
413 |
} |
|
414 |
|
|
415 |
public void upsertSets(final Iterable<String> setNames) { |
|
416 |
// feed the list of sets, if any |
|
417 |
if (setNames != null) { |
|
418 |
for (String setName : setNames) { |
|
419 |
if (StringUtils.isNotBlank(setName)) { |
|
420 |
final SetInfo set = new SetInfo(); |
|
421 |
String setSpec = this.mongoSetCollection.normalizeSetSpec(setName); |
|
422 |
set.setSetSpec(setSpec); |
|
423 |
set.setSetName(setName); |
|
424 |
set.setSetDescription("This set contains metadata records whose provenance is " + setName); |
|
425 |
set.setEnabled(true); |
|
426 |
String query = "(" + OAIConfigurationReader.SET_FIELD + " = \"" + setSpec + "\") "; |
|
427 |
set.setQuery(query); |
|
428 |
this.mongoSetCollection.upsertSet(set, false, getCollection().getDB().getName()); |
|
429 |
} |
|
430 |
} |
|
431 |
} |
|
432 |
} |
|
433 |
|
|
434 |
private void handleRecord(final String oaiID, final Date lastCollectionDate) { |
|
435 |
log.debug("handling unchanged record " + oaiID); |
|
436 |
DBObject oldObj = new BasicDBObject(OAIConfigurationReader.ID_FIELD, oaiID); |
|
437 |
DBObject update = new BasicDBObject("$set", new BasicDBObject(OAIConfigurationReader.LAST_COLLECTION_DATE_FIELD, lastCollectionDate)); |
|
438 |
collection.update(oldObj, update, true, false); |
|
439 |
} |
|
440 |
|
|
441 |
private boolean isNewRecord(final String oaiIdentifier) { |
|
442 |
if (alwaysNewRecord) return true; |
|
443 |
return this.collection.findOne(new BasicDBObject(OAIConfigurationReader.ID_FIELD, oaiIdentifier)) == null; |
|
444 |
} |
|
445 |
|
|
446 |
private boolean isChanged(final String oaiID, final String record) { |
|
447 |
RecordInfo oldRecord = getRecord(oaiID); |
|
448 |
if (oldRecord == null) return StringUtils.isBlank(record); |
|
449 |
return this.recordChangeDetector.differs(oldRecord.getMetadata(), record); |
|
450 |
} |
|
451 |
|
|
452 |
private String getOAIIdentifier(final String id) { |
|
453 |
return this.idScheme + ":" + this.idNamespace + ":" + id; |
|
454 |
} |
|
455 |
|
|
456 |
// ***********************************************************************************************// |
|
457 |
// Setters / Getters / Basic utilities |
|
458 |
// ***********************************************************************************************// |
|
459 |
|
|
460 |
@Override |
|
461 |
public int hashCode() { |
|
462 |
final int prime = 31; |
|
463 |
int result = 1; |
|
464 |
result = (prime * result) + ((collection == null) ? 0 : collection.hashCode()); |
|
465 |
result = (prime * result) + ((id == null) ? 0 : id.hashCode()); |
|
466 |
result = (prime * result) + ((interpretation == null) ? 0 : interpretation.hashCode()); |
|
467 |
result = (prime * result) + ((layout == null) ? 0 : layout.hashCode()); |
|
468 |
result = (prime * result) + ((metadataFormat == null) ? 0 : metadataFormat.hashCode()); |
|
469 |
return result; |
|
470 |
} |
|
471 |
|
|
472 |
@Override |
|
473 |
public boolean equals(final Object obj) { |
|
474 |
if (this == obj) return true; |
|
475 |
if (obj == null) return false; |
|
476 |
if (!(obj instanceof MongoPublisherStore)) return false; |
|
477 |
MongoPublisherStore other = (MongoPublisherStore) obj; |
|
478 |
if (collection == null) { |
|
479 |
if (other.collection != null) return false; |
|
480 |
} else if (!collection.equals(other.collection)) return false; |
|
481 |
if (id == null) { |
|
482 |
if (other.id != null) return false; |
|
483 |
} else if (!id.equals(other.id)) return false; |
|
484 |
if (interpretation == null) { |
|
485 |
if (other.interpretation != null) return false; |
|
486 |
} else if (!interpretation.equals(other.interpretation)) return false; |
|
487 |
if (layout == null) { |
|
488 |
if (other.layout != null) return false; |
|
489 |
} else if (!layout.equals(other.layout)) return false; |
|
490 |
if (metadataFormat == null) { |
|
491 |
if (other.metadataFormat != null) return false; |
|
492 |
} else if (!metadataFormat.equals(other.metadataFormat)) return false; |
|
493 |
return true; |
|
494 |
} |
|
495 |
|
|
496 |
public MongoPublisherStore() { |
|
497 |
super(); |
|
498 |
} |
|
499 |
|
|
500 |
public MongoPublisherStore(final String id, final String metadataFormat, final String interpretation, final String layout, final DBCollection collection, |
|
501 |
final List<PublisherField> mongoFields, final MongoQueryParser queryParser, final RecordInfoGenerator recordInfoGenerator, final String idScheme, |
|
502 |
final String idNamespace, final MetadataExtractor metadataExtractor, final RecordChangeDetector recordChangeDetector, final boolean alwaysNewRecord) { |
|
503 |
super(); |
|
504 |
this.id = id; |
|
505 |
this.metadataFormat = metadataFormat; |
|
506 |
this.interpretation = interpretation; |
|
507 |
this.layout = layout; |
|
508 |
this.collection = collection; |
|
509 |
this.discardedCollection = collection.getDB().getCollection("discarded-" + collection.getName()); |
|
510 |
this.mongoFields = mongoFields; |
|
511 |
this.queryParser = queryParser; |
|
512 |
this.recordInfoGenerator = recordInfoGenerator; |
|
513 |
this.idScheme = idScheme; |
|
514 |
this.idNamespace = idNamespace; |
|
515 |
this.recordChangeDetector = recordChangeDetector; |
|
516 |
this.alwaysNewRecord = alwaysNewRecord; |
|
517 |
} |
|
518 |
|
|
519 |
public void setId(final String id) { |
|
520 |
this.id = id; |
|
521 |
} |
|
522 |
|
|
523 |
public void setMetadataFormat(final String metadataFormat) { |
|
524 |
this.metadataFormat = metadataFormat; |
|
525 |
} |
|
526 |
|
|
527 |
public void setInterpretation(final String interpretation) { |
|
528 |
this.interpretation = interpretation; |
|
529 |
} |
|
530 |
|
|
531 |
public void setLayout(final String layout) { |
|
532 |
this.layout = layout; |
|
533 |
} |
|
534 |
|
|
535 |
public DBCollection getCollection() { |
|
536 |
return collection; |
|
537 |
} |
|
538 |
|
|
539 |
public void setCollection(final DBCollection collection) { |
|
540 |
this.collection = collection; |
|
541 |
} |
|
542 |
|
|
543 |
public MongoQueryParser getQueryParser() { |
|
544 |
return queryParser; |
|
545 |
} |
|
546 |
|
|
547 |
public void setQueryParser(final MongoQueryParser queryParser) { |
|
548 |
this.queryParser = queryParser; |
|
549 |
} |
|
550 |
|
|
551 |
public DBCollection getDiscardedCollection() { |
|
552 |
return discardedCollection; |
|
553 |
} |
|
554 |
|
|
555 |
public void setDiscardedCollection(final DBCollection discardedCollection) { |
|
556 |
this.discardedCollection = discardedCollection; |
|
557 |
} |
|
558 |
|
|
559 |
public String getIdScheme() { |
|
560 |
return idScheme; |
|
561 |
} |
|
562 |
|
|
563 |
public void setIdScheme(final String idScheme) { |
|
564 |
this.idScheme = idScheme; |
|
565 |
} |
|
566 |
|
|
567 |
public String getIdNamespace() { |
|
568 |
return idNamespace; |
|
569 |
} |
|
570 |
|
|
571 |
public void setIdNamespace(final String idNamespace) { |
|
572 |
this.idNamespace = idNamespace; |
|
573 |
} |
|
574 |
|
|
575 |
public RecordInfoGenerator getRecordInfoGenerator() { |
|
576 |
return recordInfoGenerator; |
|
577 |
} |
|
578 |
|
|
579 |
public void setRecordInfoGenerator(final RecordInfoGenerator recordInfoGenerator) { |
|
580 |
this.recordInfoGenerator = recordInfoGenerator; |
|
581 |
} |
|
582 |
|
|
583 |
public MetadataExtractor getMetadataExtractor() { |
|
584 |
return metadataExtractor; |
|
585 |
} |
|
586 |
|
|
587 |
public void setMetadataExtractor(final MetadataExtractor metadataExtractor) { |
|
588 |
this.metadataExtractor = metadataExtractor; |
|
589 |
} |
|
590 |
|
|
591 |
public RecordChangeDetector getRecordChangeDetector() { |
|
592 |
return recordChangeDetector; |
|
593 |
} |
|
594 |
|
|
595 |
public void setRecordChangeDetector(final RecordChangeDetector recordChangeDetector) { |
|
596 |
this.recordChangeDetector = recordChangeDetector; |
|
597 |
} |
|
598 |
|
|
599 |
@Override |
|
600 |
public String getId() { |
|
601 |
return this.id; |
|
602 |
} |
|
603 |
|
|
604 |
@Override |
|
605 |
public String getMetadataFormat() { |
|
606 |
return this.metadataFormat; |
|
607 |
} |
|
608 |
|
|
609 |
@Override |
|
610 |
public String getInterpretation() { |
|
611 |
return this.interpretation; |
|
612 |
} |
|
613 |
|
|
614 |
@Override |
|
615 |
public String getLayout() { |
|
616 |
return this.layout; |
|
617 |
} |
|
618 |
|
|
619 |
public MongoSetCollection getMongoSetCollection() { |
|
620 |
return mongoSetCollection; |
|
621 |
} |
|
622 |
|
|
623 |
public void setMongoSetCollection(final MongoSetCollection mongoSetCollection) { |
|
624 |
this.mongoSetCollection = mongoSetCollection; |
|
625 |
} |
|
626 |
|
|
627 |
public List<PublisherField> getMongoFields() { |
|
628 |
return mongoFields; |
|
629 |
} |
|
630 |
|
|
631 |
public void setMongoFields(final List<PublisherField> mongoFields) { |
|
632 |
this.mongoFields = mongoFields; |
|
633 |
} |
|
634 |
|
|
635 |
public boolean isAlwaysNewRecord() { |
|
636 |
return alwaysNewRecord; |
|
637 |
} |
|
638 |
|
|
639 |
public void setAlwaysNewRecord(final boolean alwaysNewRecord) { |
|
640 |
this.alwaysNewRecord = alwaysNewRecord; |
|
641 |
} |
|
642 |
|
|
643 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/mongo/MetadataExtractor.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.mongo; |
|
2 |
|
|
3 |
import org.dom4j.Document; |
|
4 |
import org.dom4j.Node; |
|
5 |
|
|
6 |
import eu.dnetlib.miscutils.functional.UnaryFunction; |
|
7 |
|
|
8 |
/** |
|
9 |
* Function to skip the header of the record and deliver only its metadata content as XML String. |
|
10 |
*/ |
|
11 |
public class MetadataExtractor implements UnaryFunction<String, Document> { |
|
12 |
|
|
13 |
@Override |
|
14 |
public String evaluate(final Document xmlDoc) { |
|
15 |
Node metadataNode = xmlDoc.selectSingleNode("//*[local-name() = 'metadata']/*"); |
|
16 |
return metadataNode.asXML(); |
|
17 |
} |
|
18 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/mongo/ProvenanceExtractor.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.mongo; |
|
2 |
|
|
3 |
import org.dom4j.Document; |
|
4 |
import org.dom4j.Node; |
|
5 |
|
|
6 |
import eu.dnetlib.miscutils.functional.UnaryFunction; |
|
7 |
|
|
8 |
/** |
|
9 |
* Function to deliver only the about/provenance content of a record as XML String. |
|
10 |
*/ |
|
11 |
public class ProvenanceExtractor implements UnaryFunction<String, Document> { |
|
12 |
|
|
13 |
@Override |
|
14 |
public String evaluate(final Document xmlDoc) { |
|
15 |
Node metadataNode = xmlDoc.selectSingleNode("//*[local-name() = 'about']/*[local-name() = 'provenance']"); |
|
16 |
if (metadataNode != null) return metadataNode.asXML(); |
|
17 |
else return null; |
|
18 |
} |
|
19 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/mongo/MongoCursor.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.mongo; |
|
2 |
|
|
3 |
import java.util.Iterator; |
|
4 |
|
|
5 |
import com.mongodb.DBCursor; |
|
6 |
import com.mongodb.DBObject; |
|
7 |
|
|
8 |
import eu.dnetlib.data.information.oai.publisher.info.RecordInfo; |
|
9 |
import eu.dnetlib.data.oai.store.Cursor; |
|
10 |
import eu.dnetlib.miscutils.functional.UnaryFunction; |
|
11 |
|
|
12 |
public class MongoCursor implements Cursor { |
|
13 |
|
|
14 |
/** |
|
15 |
* Underlying mongo cursor. |
|
16 |
*/ |
|
17 |
private DBCursor dbCursor; |
|
18 |
/** |
|
19 |
* Function to apply to records before delivering. |
|
20 |
*/ |
|
21 |
private UnaryFunction<String, String> function; |
|
22 |
|
|
23 |
/** |
|
24 |
* true if the RecordInfo returned by this Cursor must include the record body, false otherwise. |
|
25 |
*/ |
|
26 |
private boolean bodyIncluded; |
|
27 |
|
|
28 |
private RecordInfoGenerator recordInfoGenerator; |
|
29 |
private MetadataExtractor metadataExtractor; |
|
30 |
private ProvenanceExtractor provenanceExtractor; |
|
31 |
|
|
32 |
/** |
|
33 |
* |
|
34 |
* {@inheritDoc} |
|
35 |
* |
|
36 |
* @see eu.dnetlib.data.information.oai.publisher.store.Cursor#count() |
|
37 |
*/ |
|
38 |
@Override |
|
39 |
public int count() { |
|
40 |
return this.dbCursor.count(); |
|
41 |
} |
|
42 |
|
|
43 |
/** |
|
44 |
* |
|
45 |
* {@inheritDoc} |
|
46 |
* |
|
47 |
* @see java.lang.Iterable#iterator() |
|
48 |
*/ |
|
49 |
@Override |
|
50 |
public Iterator<RecordInfo> iterator() { |
|
51 |
|
|
52 |
return new Iterator<RecordInfo>() { |
|
53 |
|
|
54 |
@Override |
|
55 |
public boolean hasNext() { |
|
56 |
return dbCursor.hasNext(); |
|
57 |
} |
|
58 |
|
|
59 |
@Override |
|
60 |
public RecordInfo next() { |
|
61 |
DBObject res = dbCursor.next(); |
|
62 |
RecordInfo info = recordInfoGenerator.transformDBObject(res, bodyIncluded); |
|
63 |
if ((function != null) && bodyIncluded && (info != null)) { |
|
64 |
info.setMetadata(function.evaluate(info.getMetadata())); |
|
65 |
} |
|
66 |
return info; |
|
67 |
} |
|
68 |
|
|
69 |
@Override |
|
70 |
public void remove() { |
|
71 |
throw new UnsupportedOperationException(); |
|
72 |
} |
|
73 |
|
|
74 |
}; |
|
75 |
} |
|
76 |
|
|
77 |
public MongoCursor() { |
|
78 |
super(); |
|
79 |
} |
|
80 |
|
|
81 |
public MongoCursor(final DBCursor dbCursor, final boolean bodyIncluded, final RecordInfoGenerator recordInfoGenerator, |
|
82 |
final MetadataExtractor metadataExtractor) { |
|
83 |
this(dbCursor, null, bodyIncluded, recordInfoGenerator, metadataExtractor); |
|
84 |
} |
|
85 |
|
|
86 |
public MongoCursor(final DBCursor dbCursor, final UnaryFunction<String, String> function, final boolean bodyIncluded, |
|
87 |
final RecordInfoGenerator recordInfoGenerator, final MetadataExtractor metadataExtractor) { |
|
88 |
super(); |
|
89 |
this.dbCursor = dbCursor; |
|
90 |
this.function = function; |
|
91 |
this.bodyIncluded = bodyIncluded; |
|
92 |
this.recordInfoGenerator = recordInfoGenerator; |
|
93 |
this.metadataExtractor = metadataExtractor; |
|
94 |
} |
|
95 |
|
|
96 |
public UnaryFunction<String, String> getFunction() { |
|
97 |
return function; |
|
98 |
} |
|
99 |
|
|
100 |
public void setFunction(final UnaryFunction<String, String> function) { |
|
101 |
this.function = function; |
|
102 |
} |
|
103 |
|
|
104 |
public DBCursor getDbCursor() { |
|
105 |
return dbCursor; |
|
106 |
} |
|
107 |
|
|
108 |
public void setDbCursor(final DBCursor dbCursor) { |
|
109 |
this.dbCursor = dbCursor; |
|
110 |
} |
|
111 |
|
|
112 |
@Override |
|
113 |
public boolean isBodyIncluded() { |
|
114 |
return this.bodyIncluded; |
|
115 |
} |
|
116 |
|
|
117 |
@Override |
|
118 |
public void setBodyIncluded(final boolean bodyIncluded) { |
|
119 |
this.bodyIncluded = bodyIncluded; |
|
120 |
} |
|
121 |
|
|
122 |
public RecordInfoGenerator getRecordInfoGenerator() { |
|
123 |
return recordInfoGenerator; |
|
124 |
} |
|
125 |
|
|
126 |
public void setRecordInfoGenerator(final RecordInfoGenerator recordInfoGenerator) { |
|
127 |
this.recordInfoGenerator = recordInfoGenerator; |
|
128 |
} |
|
129 |
|
|
130 |
public MetadataExtractor getMetadataExtractor() { |
|
131 |
return metadataExtractor; |
|
132 |
} |
|
133 |
|
|
134 |
public void setMetadataExtractor(final MetadataExtractor metadataExtractor) { |
|
135 |
this.metadataExtractor = metadataExtractor; |
|
136 |
} |
|
137 |
|
|
138 |
public ProvenanceExtractor getProvenanceExtractor() { |
|
139 |
return provenanceExtractor; |
|
140 |
} |
|
141 |
|
|
142 |
public void setProvenanceExtractor(final ProvenanceExtractor provenanceExtractor) { |
|
143 |
this.provenanceExtractor = provenanceExtractor; |
|
144 |
} |
|
145 |
|
|
146 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/mongo/MongoPublisherStoreDAO.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.mongo; |
|
2 |
|
|
3 |
import java.util.List; |
|
4 |
|
|
5 |
import javax.annotation.Resource; |
|
6 |
|
|
7 |
import org.apache.commons.logging.Log; |
|
8 |
import org.apache.commons.logging.LogFactory; |
|
9 |
import org.springframework.beans.factory.annotation.Autowired; |
|
10 |
import org.springframework.beans.factory.annotation.Required; |
|
11 |
|
|
12 |
import com.google.common.collect.Lists; |
|
13 |
import com.mongodb.BasicDBObject; |
|
14 |
import com.mongodb.BasicDBObjectBuilder; |
|
15 |
import com.mongodb.DB; |
|
16 |
import com.mongodb.DBCollection; |
|
17 |
import com.mongodb.DBCursor; |
|
18 |
import com.mongodb.DBObject; |
|
19 |
import com.mongodb.Mongo; |
|
20 |
|
|
21 |
import eu.dnetlib.data.information.oai.publisher.OaiPublisherException; |
|
22 |
import eu.dnetlib.data.information.oai.publisher.OaiPublisherRuntimeException; |
|
23 |
import eu.dnetlib.data.information.oai.publisher.conf.OAIConfigurationReader; |
|
24 |
import eu.dnetlib.data.information.oai.publisher.info.MDFInfo; |
|
25 |
import eu.dnetlib.data.oai.store.PublisherStoreDAO; |
|
26 |
import eu.dnetlib.data.oai.store.RecordChangeDetector; |
|
27 |
import eu.dnetlib.data.oai.store.parser.MongoQueryParser; |
|
28 |
import eu.dnetlib.data.oai.store.sets.MongoSetCollection; |
|
29 |
|
|
30 |
public class MongoPublisherStoreDAO implements PublisherStoreDAO<MongoPublisherStore, MongoCursor> { |
|
31 |
|
|
32 |
private static final Log log = LogFactory.getLog(MongoPublisherStoreDAO.class); // NOPMD by marko on 11/24/08 5:02 PM |
|
33 |
|
|
34 |
@Autowired |
|
35 |
private Mongo publisherMongoServer; |
|
36 |
|
|
37 |
/** Name of the collection with information about the OAI stores. **/ |
|
38 |
private String metadataCollection; |
|
39 |
|
|
40 |
@Resource |
|
41 |
private RecordInfoGenerator recordInfoGenerator; |
|
42 |
@Resource |
|
43 |
private MetadataExtractor metadataExtractor; |
|
44 |
@Resource |
|
45 |
private MongoQueryParser queryParser; |
|
46 |
@Resource(name = "oaiConfigurationExistReader") |
|
47 |
private OAIConfigurationReader configuration; |
|
48 |
@Resource |
|
49 |
private RecordChangeDetector recordChangeDetector; |
|
50 |
@Resource |
|
51 |
private MongoSetCollection mongoSetCollection; |
|
52 |
|
|
53 |
private boolean alwaysNewRecord; |
|
54 |
|
|
55 |
protected DB getDB(final String dbName) { |
|
56 |
return this.publisherMongoServer.getDB(dbName); |
|
57 |
} |
|
58 |
|
|
59 |
@Override |
|
60 |
public List<MongoPublisherStore> listPublisherStores(final String dbName) { |
|
61 |
List<MongoPublisherStore> stores = Lists.newArrayList(); |
|
62 |
DB db = getDB(dbName); |
|
63 |
DBCursor cursor = db.getCollection(this.metadataCollection).find(); |
|
64 |
for (DBObject storeInfo : cursor) { |
|
65 |
stores.add(this.createFromDBObject(storeInfo, db)); |
|
66 |
} |
|
67 |
return stores; |
|
68 |
} |
|
69 |
|
|
70 |
@Override |
|
71 |
public MongoPublisherStore getStore(final String storeId, final String dbName) { |
|
72 |
DBObject storeInfo = getDB(dbName).getCollection(this.metadataCollection).findOne(new BasicDBObject("id", storeId)); |
|
73 |
return this.createFromDBObject(storeInfo, getDB(dbName)); |
|
74 |
} |
|
75 |
|
|
76 |
@Override |
|
77 |
public MongoPublisherStore getStore(final String mdfName, final String mdfInterpretation, final String mdfLayout, final String dbName) { |
|
78 |
return this.getStore(this.generateStoreId(mdfName, mdfInterpretation, mdfLayout), dbName); |
|
79 |
} |
|
80 |
|
|
81 |
@Override |
|
82 |
public MongoPublisherStore getStoreFor(final String targetMetadataPrefix, final String dbName) { |
|
83 |
MDFInfo info = this.configuration.getMetadataFormatInfo(targetMetadataPrefix); |
|
84 |
return this.getStore(info.getSourceFormatName(), info.getSourceFormatInterpretation(), info.getSourceFormatLayout(), dbName); |
|
85 |
} |
|
86 |
|
|
87 |
@Override |
|
88 |
public MongoPublisherStore createStore(final String mdfName, final String mdfInterpretation, final String mdfLayout, final String dbName) |
|
89 |
throws OaiPublisherException { |
|
90 |
DB db = getDB(dbName); |
|
91 |
DBObject store = createMetadataEntry(mdfName, mdfInterpretation, mdfLayout); |
|
92 |
DBCollection metadata = db.getCollection(this.metadataCollection); |
|
93 |
metadata.insert(store); |
|
94 |
MongoPublisherStore theStore = this.createFromDBObject(store, db); |
|
95 |
return theStore; |
|
96 |
|
|
97 |
} |
|
98 |
|
|
99 |
@Override |
|
100 |
public boolean deleteStore(final String storeId, final String dbName) { |
|
101 |
DB db = getDB(dbName); |
|
102 |
DBCollection metadata = db.getCollection(this.metadataCollection); |
|
103 |
DBObject storeInfo = metadata.findOne(new BasicDBObject("id", storeId)); |
|
104 |
if (storeInfo == null) return false; |
|
105 |
else { |
|
106 |
db.getCollection(storeId).drop(); |
|
107 |
metadata.remove(storeInfo); |
|
108 |
// TODO: should drop entries related to mdPrefix served by the store we are deleting, not all of them. |
|
109 |
this.mongoSetCollection.dropOAISets(dbName); |
|
110 |
log.debug("Deleted oaistore " + storeId + ", db: " + dbName); |
|
111 |
return true; |
|
112 |
} |
|
113 |
} |
|
114 |
|
|
115 |
@Override |
|
116 |
public boolean deleteFromStore(final String storeId, final String dbName, final String set) { |
|
117 |
DB db = getDB(dbName); |
|
118 |
DBCollection metadata = db.getCollection(this.metadataCollection); |
|
119 |
DBObject storeInfo = metadata.findOne(new BasicDBObject("id", storeId)); |
|
120 |
if (storeInfo == null) return false; |
|
121 |
else { |
|
122 |
db.getCollection(storeId).remove(new BasicDBObject(OAIConfigurationReader.SET_FIELD, set)); |
|
123 |
this.mongoSetCollection.dropSet(dbName, set); |
|
124 |
log.debug("Deleted set " + set + " from oaistore " + storeId + ", db: " + dbName); |
|
125 |
return true; |
|
126 |
} |
|
127 |
} |
|
128 |
|
|
129 |
@Override |
|
130 |
public boolean deleteFromStore(final String mdfName, final String mdfInterpretation, final String mdfLayout, final String dbName, final String set) { |
|
131 |
return this.deleteFromStore(this.generateStoreId(mdfName, mdfInterpretation, mdfLayout), dbName, set); |
|
132 |
} |
|
133 |
|
|
134 |
@Override |
|
135 |
public boolean deleteStore(final String mdfName, final String mdfInterpretation, final String mdfLayout, final String dbName) { |
|
136 |
return this.deleteStore(this.generateStoreId(mdfName, mdfInterpretation, mdfLayout), dbName); |
|
137 |
} |
|
138 |
|
|
139 |
public void ensureIndex(final MongoPublisherStore store) { |
|
140 |
if (store == null) throw new OaiPublisherRuntimeException("Can't ensure index on null store"); |
|
141 |
Thread t = new Thread() { |
|
142 |
|
|
143 |
@Override |
|
144 |
public void run() { |
|
145 |
store.ensureIndices(); |
|
146 |
} |
|
147 |
}; |
|
148 |
t.start(); |
|
149 |
} |
|
150 |
|
|
151 |
public void ensureIndex(final String dbName) { |
|
152 |
List<MongoPublisherStore> stores = this.listPublisherStores(dbName); |
|
153 |
for (final MongoPublisherStore s : stores) { |
|
154 |
s.ensureIndices(); |
|
155 |
} |
|
156 |
|
|
157 |
} |
|
158 |
|
|
159 |
private MongoPublisherStore createFromDBObject(final DBObject storeInfo, final DB db) { |
|
160 |
if (storeInfo == null) return null; |
|
161 |
String storeId = (String) storeInfo.get("id"); |
|
162 |
String mdFormat = (String) storeInfo.get("metadataFormat"); |
|
163 |
String mdInterpreation = (String) storeInfo.get("interpretation"); |
|
164 |
String mdLayout = (String) storeInfo.get("layout"); |
|
165 |
|
|
166 |
MongoPublisherStore store = new MongoPublisherStore(storeId, mdFormat, mdInterpreation, mdLayout, db.getCollection(storeId), |
|
167 |
this.configuration.getFields(mdFormat, mdInterpreation, mdLayout), queryParser, recordInfoGenerator, this.configuration.getIdScheme(), |
|
168 |
this.configuration.getIdNamespace(), this.metadataExtractor, this.recordChangeDetector, alwaysNewRecord); |
|
169 |
store.setMongoSetCollection(mongoSetCollection); |
|
170 |
return store; |
|
171 |
} |
|
172 |
|
|
173 |
private DBObject createMetadataEntry(final String mdfName, final String mdfInterpretation, final String mdfLayout) { |
|
174 |
DBObject info = BasicDBObjectBuilder.start("id", generateStoreId(mdfName, mdfInterpretation, mdfLayout)).append("metadataFormat", mdfName) |
|
175 |
.append("interpretation", mdfInterpretation).append("layout", mdfLayout).get(); |
|
176 |
return info; |
|
177 |
|
|
178 |
} |
|
179 |
|
|
180 |
private String generateStoreId(final String mdfName, final String mdfInterpretation, final String mdfLayout) { |
|
181 |
return mdfName + "-" + mdfLayout + "-" + mdfInterpretation; |
|
182 |
} |
|
183 |
|
|
184 |
public String getMetadataCollection() { |
|
185 |
return metadataCollection; |
|
186 |
} |
|
187 |
|
|
188 |
@Required |
|
189 |
public void setMetadataCollection(final String metadataCollection) { |
|
190 |
this.metadataCollection = metadataCollection; |
|
191 |
} |
|
192 |
|
|
193 |
public MongoQueryParser getQueryParser() { |
|
194 |
return queryParser; |
|
195 |
} |
|
196 |
|
|
197 |
public void setQueryParser(final MongoQueryParser queryParser) { |
|
198 |
this.queryParser = queryParser; |
|
199 |
} |
|
200 |
|
|
201 |
public OAIConfigurationReader getConfiguration() { |
|
202 |
return configuration; |
|
203 |
} |
|
204 |
|
|
205 |
public void setConfiguration(final OAIConfigurationReader configuration) { |
|
206 |
this.configuration = configuration; |
|
207 |
} |
|
208 |
|
|
209 |
public RecordInfoGenerator getRecordInfoGenerator() { |
|
210 |
return recordInfoGenerator; |
|
211 |
} |
|
212 |
|
|
213 |
public void setRecordInfoGenerator(final RecordInfoGenerator recordInfoGenerator) { |
|
214 |
this.recordInfoGenerator = recordInfoGenerator; |
|
215 |
} |
|
216 |
|
|
217 |
public MetadataExtractor getMetadataExtractor() { |
|
218 |
return metadataExtractor; |
|
219 |
} |
|
220 |
|
|
221 |
public void setMetadataExtractor(final MetadataExtractor metadataExtractor) { |
|
222 |
this.metadataExtractor = metadataExtractor; |
|
223 |
} |
|
224 |
|
|
225 |
public RecordChangeDetector getRecordChangeDetector() { |
|
226 |
return recordChangeDetector; |
|
227 |
} |
|
228 |
|
|
229 |
public void setRecordChangeDetector(final RecordChangeDetector recordChangeDetector) { |
|
230 |
this.recordChangeDetector = recordChangeDetector; |
|
231 |
} |
|
232 |
|
|
233 |
public MongoSetCollection getMongoSetCollection() { |
|
234 |
return mongoSetCollection; |
|
235 |
} |
|
236 |
|
|
237 |
public void setMongoSetCollection(final MongoSetCollection mongoSetCollection) { |
|
238 |
this.mongoSetCollection = mongoSetCollection; |
|
239 |
} |
|
240 |
|
|
241 |
public Mongo getPublisherMongoServer() { |
|
242 |
return publisherMongoServer; |
|
243 |
} |
|
244 |
|
|
245 |
public void setPublisherMongoServer(final Mongo publisherMongoServer) { |
|
246 |
this.publisherMongoServer = publisherMongoServer; |
|
247 |
} |
|
248 |
|
|
249 |
public boolean isAlwaysNewRecord() { |
|
250 |
return alwaysNewRecord; |
|
251 |
} |
|
252 |
|
|
253 |
public void setAlwaysNewRecord(final boolean alwaysNewRecord) { |
|
254 |
this.alwaysNewRecord = alwaysNewRecord; |
|
255 |
} |
|
256 |
|
|
257 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/actions/DropStoreAction.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.actions; |
|
2 |
|
|
3 |
import javax.annotation.Resource; |
|
4 |
|
|
5 |
import org.apache.commons.lang.StringUtils; |
|
6 |
import org.apache.commons.logging.Log; |
|
7 |
import org.apache.commons.logging.LogFactory; |
|
8 |
|
|
9 |
import eu.dnetlib.data.oai.store.mongo.MongoPublisherStoreDAO; |
|
10 |
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob; |
|
11 |
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler; |
|
12 |
|
|
13 |
public class DropStoreAction extends AbstractOAIStoreAction { |
|
14 |
|
|
15 |
private static final Log log = LogFactory.getLog(DropStoreAction.class); // NOPMD by marko on 11/24/08 5:02 PM |
|
16 |
|
|
17 |
@Resource |
|
18 |
private MongoPublisherStoreDAO mongoPublisherStoreDAO; |
|
19 |
|
|
20 |
@Override |
|
21 |
public void execute(final BlackboardServerHandler handler, final BlackboardJob job) throws Exception { |
|
22 |
String mdformat = job.getParameters().get("format"); |
|
23 |
String layout = job.getParameters().get("layout"); |
|
24 |
String interp = job.getParameters().get("interpretation"); |
|
25 |
String dbName = job.getParameters().get("oai_dbName"); |
|
26 |
String set = job.getParameters().get("set"); |
|
27 |
boolean deleted = false; |
|
28 |
if (StringUtils.isNotBlank(set)) { |
|
29 |
deleted = mongoPublisherStoreDAO.deleteFromStore(mdformat, interp, layout, dbName, set); |
|
30 |
} else { |
|
31 |
deleted = this.mongoPublisherStoreDAO.deleteStore(mdformat, interp, layout, dbName); |
|
32 |
} |
|
33 |
if (deleted) { |
|
34 |
log.info("OAI Deletion succeded on db " + dbName); |
|
35 |
} else { |
|
36 |
log.info("OAI Deletion failed on db " + dbName); |
|
37 |
} |
|
38 |
handler.done(job); |
|
39 |
} |
|
40 |
} |
modules/dnet-oai-store-service/tags/dnet-oai-store-service-4.3.0/src/main/java/eu/dnetlib/data/oai/store/actions/EnsureIndexesAction.java | ||
---|---|---|
1 |
package eu.dnetlib.data.oai.store.actions; |
|
2 |
|
|
3 |
import javax.annotation.Resource; |
Also available in: Unified diff
[maven-release-plugin] copy for tag dnet-oai-store-service-4.3.0