Project

General

Profile

1
package eu.dnetlib.data.mapreduce.hbase.index;
2

    
3
import java.io.IOException;
4
import java.util.List;
5
import java.util.Map;
6
import java.util.Map.Entry;
7

    
8
import com.google.common.collect.Lists;
9
import com.googlecode.protobuf.format.JsonFormat;
10
import eu.dnetlib.data.mapreduce.JobParams;
11
import eu.dnetlib.data.mapreduce.util.DedupUtils;
12
import eu.dnetlib.data.proto.OafProtos.Oaf;
13
import eu.dnetlib.data.transform.SolrProtoMapper;
14
import eu.dnetlib.functionality.index.solr.feed.InputDocumentFactory;
15
import eu.dnetlib.miscutils.datetime.HumanTime;
16
import org.apache.commons.collections.MapUtils;
17
import org.apache.commons.logging.Log;
18
import org.apache.commons.logging.LogFactory;
19
import org.apache.hadoop.conf.Configuration;
20
import org.apache.hadoop.hbase.client.Result;
21
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
22
import org.apache.hadoop.hbase.mapreduce.TableMapper;
23
import org.apache.hadoop.hbase.util.Bytes;
24
import org.apache.hadoop.io.Text;
25
import org.apache.solr.client.solrj.SolrServerException;
26
import org.apache.solr.client.solrj.impl.CloudSolrClient;
27
import org.apache.solr.client.solrj.response.SolrPingResponse;
28
import org.apache.solr.client.solrj.response.UpdateResponse;
29
import org.apache.solr.common.SolrInputDocument;
30
import org.dom4j.DocumentException;
31

    
32
public class DedupIndexFeedMapper extends TableMapper<Text, Text> {
33

    
34
	private static final Log log = LogFactory.getLog(DedupIndexFeedMapper.class); // NOPMD by marko on 11/24/08 5:02 PM
35

    
36
	private CloudSolrClient solrServer;
37

    
38
	private String dsId;
39

    
40
	private String version;
41

    
42
	private int shutdownWaitTime = 10000;
43

    
44
	private int bufferFlushThreshold = 100;
45

    
46
	private List<SolrInputDocument> buffer;
47

    
48
	private int backoffTimeMs = 5000;
49

    
50
	private boolean simulation = false;
51

    
52
	private String entityType = null;
53

    
54
	private String actionset = null;
55

    
56
	private SolrProtoMapper mapper = null;
57

    
58
	private final static int MAX_RETRIES = 10;
59

    
60
	@Override
61
	protected void setup(final Context context) throws IOException, InterruptedException {
62

    
63
		logConfiguration(context.getConfiguration());
64

    
65
		shutdownWaitTime = Integer.parseInt(context.getConfiguration().get(JobParams.INDEX_SHUTDOWN_WAIT));
66
		bufferFlushThreshold = Integer.parseInt(context.getConfiguration().get(JobParams.INDEX_BUFFER_FLUSH_TRESHOLD));
67
		dsId = context.getConfiguration().get(JobParams.INDEX_DSID);
68
		version = InputDocumentFactory.getParsedDateField(context.getConfiguration().get(JobParams.INDEX_FEED_TIME));
69
		buffer = Lists.newArrayList();
70
		simulation = Boolean.parseBoolean(context.getConfiguration().get(JobParams.INDEX_FEED_SIMULATION_MODE));
71
		entityType = context.getConfiguration().get("entityType");
72
		actionset = context.getConfiguration().get("actionset");
73

    
74
		final String fields = context.getConfiguration().get("index.fields");
75

    
76
		log.info("got fields: \n" + fields);
77
		log.info("got dsId: " + dsId);
78
		log.info("got version: " + version);
79
		log.info("simulation: " + simulation);
80
		log.info("entityType: " + entityType);
81
		log.info("actionset: " + actionset);
82
		log.info("buffer size: " + bufferFlushThreshold);
83

    
84
		try {
85
			mapper = new SolrProtoMapper(fields);
86
		} catch (final DocumentException e) {
87
			log.error("unable to parse fields: " + fields);
88
			throw new IllegalArgumentException(e);
89
		}
90

    
91
		final String baseURL = context.getConfiguration().get(JobParams.INDEX_SOLR_URL);
92
		log.info("solr server baseURL: " + baseURL);
93

    
94
		final String collection = context.getConfiguration().get(JobParams.INDEX_SOLR_COLLECTION);
95
		log.info("solr server collection: " + collection);
96

    
97
		while (true) {
98
			try {
99
				log.info("initializing solr server...");
100
				solrServer = new CloudSolrClient.Builder()
101
					.withZkHost(baseURL)
102
					.build();
103

    
104
				solrServer.connect();
105

    
106
				solrServer.setParallelUpdates(true);
107
				solrServer.setDefaultCollection(collection);
108

    
109
				final SolrPingResponse rsp = solrServer.ping();
110

    
111
				if (rsp.getStatus() != 0) throw new SolrServerException("bad init status: " + rsp.getStatus());
112
				else {
113
					break;
114
				}
115

    
116
			} catch (final Throwable e) {
117
				if (solrServer != null) {
118
					solrServer.close();
119
				}
120
				context.getCounter("index init", e.getMessage()).increment(1);
121
				log.info(String.format("failed to init solr client wait %dms", backoffTimeMs));
122
				Thread.sleep(backoffTimeMs);
123
			}
124
		}
125
	}
126

    
127
	@Override
128
	protected void map(final ImmutableBytesWritable key, final Result value, final Context context) throws IOException, InterruptedException {
129

    
130
		SolrInputDocument doc = null;
131

    
132
		final Map<byte[], byte[]> bMap = value.getFamilyMap(Bytes.toBytes(entityType));
133

    
134
		if (MapUtils.isEmpty(bMap) || !bMap.containsKey(DedupUtils.BODY_B)) {
135
			context.getCounter(entityType, "missing body");
136
			return;
137
		}
138

    
139
		final Oaf oaf = Oaf.parseFrom(bMap.get(DedupUtils.BODY_B));
140

    
141
		try {
142
			doc = getDocument(oaf);
143
		} catch (final Throwable e) {
144
			handleError(key, new JsonFormat().printToString(oaf), context, doc, e);
145
			return;
146
		}
147

    
148
		int retries = 0;
149
		while (retries < MAX_RETRIES) {
150
			try {
151
				addDocument(context, doc);
152
				return;
153
			} catch (final Throwable e) {
154
				retries++;
155
				context.getCounter("index feed", "retries").increment(1);
156
				handleError(key, new JsonFormat().printToString(oaf), context, doc, e);
157
				log.info(String.format("failed to feed documents, waiting %dms", backoffTimeMs));
158
				Thread.sleep(backoffTimeMs);
159
			}
160
		}
161
		if (retries >= MAX_RETRIES)
162
			throw new IOException("too many retries: " + retries);
163
	}
164

    
165
	private SolrInputDocument getDocument(final Oaf oaf) throws DocumentException {
166
		final SolrInputDocument document = mapper.map(oaf, version, dsId, actionset);
167
		document.addField("actionset", actionset);
168
		return document;
169
	}
170

    
171
	private void addDocument(final Context context, final SolrInputDocument doc) throws SolrServerException, IOException {
172
		if (!doc.isEmpty()) {
173

    
174
			buffer.add(doc);
175
			if (buffer.size() >= bufferFlushThreshold) {
176
				doAdd(buffer, context);
177
				// Thread.sleep(100);
178
			}
179
		} else {
180
			context.getCounter("index feed", "skipped records").increment(1);
181
		}
182
	}
183

    
184
	private void doAdd(final List<SolrInputDocument> buffer, final Context context) throws SolrServerException, IOException {
185
		if (!simulation) {
186
			final long start = System.currentTimeMillis();
187
			final UpdateResponse rsp = solrServer.add(buffer);
188
			final long stop = System.currentTimeMillis() - start;
189
			log.info("feed time for " + buffer.size() + " records : " + HumanTime.exactly(stop) + "\n");
190

    
191
			final int status = rsp.getStatus();
192
			context.getCounter("index feed", "status code: " + status).increment(buffer.size());
193

    
194
			if (status != 0) throw new SolrServerException("bad status: " + status);
195
		}
196
		buffer.clear();
197
	}
198

    
199
	@Override
200
	protected void cleanup(final Context context) throws IOException, InterruptedException {
201
		super.cleanup(context);
202
		try {
203
			if (!buffer.isEmpty()) {
204
				doAdd(buffer, context);
205
			}
206
			log.info("\nwaiting " + shutdownWaitTime + "ms before shutdown");
207
			Thread.sleep(shutdownWaitTime);
208
			solrServer.close();
209
		} catch (final SolrServerException e) {
210
			System.err.println("couldn't shutdown server " + e.getMessage());
211
		}
212
	}
213

    
214
	private void handleError(final ImmutableBytesWritable key, final String value, final Context context, final SolrInputDocument doc, final Throwable e)
215
			throws IOException, InterruptedException {
216
		context.getCounter("index feed", e.getClass().getName()).increment(1);
217
		context.write(new Text(key.copyBytes()), printRottenRecord(context.getTaskAttemptID().toString(), value, doc));
218
		// e.printStackTrace(System.err);
219
	}
220

    
221
	private Text printRottenRecord(final String taskid, final String value, final SolrInputDocument doc) {
222
		return new Text("\n**********************************\n" + "task: " + taskid + "\n"
223
				+ check("original", value.toString() + check("solrDoc", doc)));
224
	}
225

    
226
	private String check(final String label, final Object value) {
227
		if ((value != null) && !value.toString().isEmpty()) return "\n " + label + ":\n" + value + "\n";
228
		return "\n";
229
	}
230

    
231
	private void logConfiguration(final Configuration conf) {
232
		log.info("job configutation #################");
233
		for (final Entry<String, String> e : conf) {
234
			log.info("'" + e.getKey() + "' : '" + e.getValue() + "'");
235
		}
236
		log.info("end of job configutation #################\n\n");
237
	}
238

    
239
}
(1-1/7)