Project

General

Profile

1
package eu.dnetlib.data.mdstore.modular.mongodb;
2

    
3
import java.util.List;
4
import java.util.Map;
5
import javax.annotation.Nullable;
6

    
7
import com.google.common.base.Predicate;
8
import com.google.common.collect.Lists;
9
import com.google.common.collect.Maps;
10
import com.mongodb.BasicDBObject;
11
import com.mongodb.DBObject;
12
import com.mongodb.WriteConcern;
13
import com.mongodb.client.MongoCollection;
14
import com.mongodb.client.model.*;
15
import eu.dnetlib.data.mdstore.MDStoreServiceException;
16
import eu.dnetlib.data.mdstore.modular.MDFormatDescription;
17
import eu.dnetlib.data.mdstore.modular.RecordParser;
18
import eu.dnetlib.data.mdstore.modular.mongodb.utils.IndexFieldRecordParser;
19
import eu.dnetlib.data.mdstore.modular.mongodb.utils.IndexFieldRecordParserException;
20
import org.apache.commons.logging.Log;
21
import org.apache.commons.logging.LogFactory;
22

    
23
import static eu.dnetlib.data.mdstore.modular.MDStoreConstants.*;
24

    
25
public class MongoBulkWritesManager {
26

    
27
	private static final Log log = LogFactory.getLog(MongoBulkWritesManager.class);
28
	private final boolean discardRecords;
29
	private final boolean indexRecords;
30
	private final IndexFieldRecordParser indexFieldRecordParser = new IndexFieldRecordParser();
31
	private final List<MDFormatDescription> mdref;
32
	private RecordParser recordParser;
33
	private MongoCollection<DBObject> validCollection;
34
	private List<WriteModel<DBObject>> validBulkOperationList;
35
	private BulkWriteOptions writeOptions;
36
	private MongoCollection<DBObject> discardedCollection;
37
	private List<WriteModel<DBObject>> discardedBulkOperationList;
38
	private int bulkSize;
39

    
40
	public MongoBulkWritesManager(final MongoCollection<DBObject> collection,
41
			final MongoCollection<DBObject> discardedCollection,
42
			final List<MDFormatDescription> mdref,
43
			final int bulkSize,
44
			final RecordParser parser,
45
			final boolean discardRecords) {
46
		this.validCollection = collection.withWriteConcern(WriteConcern.ACKNOWLEDGED);
47
		this.validBulkOperationList = Lists.newArrayList();
48

    
49
		this.discardedCollection = discardedCollection.withWriteConcern(WriteConcern.ACKNOWLEDGED);
50
		this.discardedBulkOperationList = Lists.newArrayList();
51

    
52
		this.bulkSize = bulkSize;
53
		this.recordParser = parser;
54
		this.discardRecords = discardRecords;
55
		this.mdref = mdref;
56

    
57
		this.indexRecords = (this.mdref != null &&  !this.mdref.isEmpty());
58
		this.writeOptions = new BulkWriteOptions().ordered(false);
59
	}
60

    
61
	public void insert(final String record) throws MDStoreServiceException {
62
		Map<String, String> recordProperties = null;
63
		try {
64
			recordProperties = recordParser.parseRecord(record);
65

    
66
		} catch (Throwable e) {
67
			if (discardRecords) {
68
				log.debug("unhandled exception: " + e.getMessage());
69
				discardRecord(record);
70
			}
71
		}
72
		Map<String, List<String>> indexRecordField = null;
73
		try {
74
			if (indexRecords) {
75
				indexRecordField = indexFieldRecordParser.parseRecord(record, mdref);
76
			}
77
		} catch (IndexFieldRecordParserException e) {
78
			// could not index record fields
79
			throw new MDStoreServiceException("Are you using the correct type of store / index definition for the records in " + validCollection.getNamespace() + " ?", e);
80
		}
81

    
82
		log.debug("found props: " + recordProperties);
83
		if (recordProperties.containsKey(ID)) {
84
			final DBObject obj = buildDBObject(record, recordProperties, indexRecordField);
85
			if (log.isDebugEnabled()) {
86
				log.debug("Saving object" + obj);
87
			}
88
			validBulkOperationList.add(new ReplaceOneModel(new BasicDBObject(ID, obj.get(ID)), obj, new UpdateOptions().upsert(true)));
89
			if (((validBulkOperationList.size() % bulkSize) == 0) && !validBulkOperationList.isEmpty()) {
90
				validCollection.bulkWrite(validBulkOperationList, writeOptions);
91
				validBulkOperationList.clear();
92
			}
93
		} else {
94
			if (discardRecords) {
95
				log.debug("parsed record seems invalid");
96
				discardRecord(record);
97
			}
98
		}
99
	}
100

    
101
	private void discardRecord(final String record) {
102
		discardedBulkOperationList.add(new InsertOneModel(new BasicDBObject(BODY, record)));
103

    
104
		if (((discardedBulkOperationList.size() % bulkSize) == 0) && !discardedBulkOperationList.isEmpty()) {
105
			discardedCollection.bulkWrite(discardedBulkOperationList, writeOptions);
106
			discardedBulkOperationList.clear();
107
		}
108
	}
109

    
110
	public void flushBulks() {
111
		//setting to journaled write concern to be sure that when the write returns everything has been flushed to disk (https://docs.mongodb.org/manual/faq/developers/#when-does-mongodb-write-updates-to-disk)
112
		//the explicit fsync command can't be run anymore: 'Command failed with error 13: 'fsync may only be run against the admin database.'
113
		if (!validBulkOperationList.isEmpty()) {
114
			validCollection = getCollectionWithWriteConcern(validCollection, WriteConcern.JOURNALED);
115
			validCollection.bulkWrite(validBulkOperationList, writeOptions);
116
		}
117
		if (!discardedBulkOperationList.isEmpty()) {
118
			discardedCollection = getCollectionWithWriteConcern(discardedCollection, WriteConcern.JOURNALED);
119
			discardedCollection.bulkWrite(discardedBulkOperationList, writeOptions);
120
		}
121
		//setting write concern back to ACKNOWLEDGE to avoid the execution of future writes all in Journaled mode
122
		validCollection = getCollectionWithWriteConcern(validCollection, WriteConcern.ACKNOWLEDGED);
123
		discardedCollection = getCollectionWithWriteConcern(discardedCollection, WriteConcern.ACKNOWLEDGED);
124
	}
125

    
126
	protected DBObject buildDBObject(final String record, final Map<String, String> recordProperties, final Map<String, List<String>> indexFieldProperties) {
127
		final DBObject obj = new BasicDBObject();
128
		obj.put(ID, recordProperties.get(ID));
129
		obj.put(ORIGINALID, recordProperties.get(ORIGINALID));
130
		obj.put(BODY, record);
131
		obj.put(TIMESTAMP, Long.valueOf(recordProperties.get(TIMESTAMP)));
132
		if (indexFieldProperties != null)
133
			obj.putAll(Maps.filterKeys(indexFieldProperties, new Predicate<String>() {
134
				//ensure we do not override the mandatory fields above with some unexpected value
135
				@Override
136
				public boolean apply(@Nullable final String s) {
137
					return !s.equalsIgnoreCase(ID) && !s.equalsIgnoreCase(ORIGINALID) && !s.equalsIgnoreCase(BODY) && !s.equalsIgnoreCase(TIMESTAMP);
138
				}
139
			}));
140
		return obj;
141
	}
142

    
143
	private MongoCollection<DBObject> getCollectionWithWriteConcern(MongoCollection<DBObject> collection, WriteConcern writeConcern) {
144
		return collection.withWriteConcern(writeConcern);
145
	}
146

    
147
}
(3-3/5)