1
|
package eu.dnetlib.data.hadoop.blackboard;
|
2
|
|
3
|
import java.io.IOException;
|
4
|
|
5
|
import eu.dnetlib.enabling.resultset.client.ResultSetClient;
|
6
|
import eu.dnetlib.rmi.common.ResultSet;
|
7
|
import eu.dnetlib.rmi.data.hadoop.ClusterName;
|
8
|
import org.apache.commons.lang3.StringUtils;
|
9
|
import org.apache.commons.logging.Log;
|
10
|
import org.apache.commons.logging.LogFactory;
|
11
|
import org.apache.hadoop.conf.Configuration;
|
12
|
import org.apache.hadoop.fs.Path;
|
13
|
import org.apache.hadoop.io.SequenceFile;
|
14
|
import org.apache.hadoop.io.Text;
|
15
|
import org.springframework.beans.factory.annotation.Autowired;
|
16
|
|
17
|
import eu.dnetlib.data.hadoop.config.ConfigurationEnumerator;
|
18
|
import eu.dnetlib.data.hadoop.hdfs.SequenceFileWriterFactory;
|
19
|
import org.springframework.beans.factory.annotation.Value;
|
20
|
|
21
|
public class SequenceFileFeeder {
|
22
|
|
23
|
private static final Log log = LogFactory.getLog(SequenceFileFeeder.class); // NOPMD by marko on 11/24/08 5:02 PM
|
24
|
|
25
|
@Autowired
|
26
|
protected ConfigurationEnumerator configurationEnumerator;
|
27
|
|
28
|
@Autowired
|
29
|
protected SequenceFileWriterFactory sequenceFileWriterFactory;
|
30
|
|
31
|
@Autowired
|
32
|
private ResultSetClient resultSetClient;
|
33
|
|
34
|
@Value("${services.hadoop.hdfs.writer.bulk.size}")
|
35
|
private int bulkSize;
|
36
|
|
37
|
public int feed(final ResultSet<String> resultSet, final ClusterName clusterName, final String path) throws IOException {
|
38
|
return doWrite(resultSet, clusterName, path);
|
39
|
}
|
40
|
|
41
|
private int doWrite(final ResultSet<String> resultSet, final ClusterName clusterName, final String path) throws IOException {
|
42
|
try(final SequenceFile.Writer writer = sequenceFileWriterFactory.getSequenceFileWriter(Text.class, Text.class, getConf(clusterName), new Path(path))) {
|
43
|
log.debug("Opened sequence file writer: " + writer.toString());
|
44
|
final Text idText = new Text();
|
45
|
final Text bodyText = new Text();
|
46
|
int count = 0;
|
47
|
int nulls = 0;
|
48
|
for (String record : resultSetClient.iter(resultSet, String.class)) {
|
49
|
if (StringUtils.isBlank(record)) {
|
50
|
nulls++;
|
51
|
} else {
|
52
|
idText.set(String.valueOf(count++));
|
53
|
bodyText.set(record);
|
54
|
writer.append(idText, bodyText);
|
55
|
if (count % bulkSize == 0) {
|
56
|
writer.hflush();
|
57
|
writer.hsync();
|
58
|
log.debug(String.format("%s records so far %s", writer.toString(), count));
|
59
|
}
|
60
|
}
|
61
|
}
|
62
|
log.info("written " + count + " records in sequence file: " + path);
|
63
|
if (nulls > 0) {
|
64
|
log.warn("found " + nulls + " records in epr!");
|
65
|
}
|
66
|
return count;
|
67
|
}
|
68
|
}
|
69
|
|
70
|
protected Configuration getConf(final ClusterName clusterName) {
|
71
|
return configurationEnumerator.get(clusterName);
|
72
|
}
|
73
|
|
74
|
}
|