Revision 34286
Added by Claudio Atzori over 9 years ago
ImportEprHbaseAction.java | ||
---|---|---|
1 | 1 |
package eu.dnetlib.data.hadoop.action; |
2 | 2 |
|
3 |
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException; |
|
3 |
import java.io.IOException; |
|
4 |
|
|
4 | 5 |
import org.apache.commons.logging.Log; |
5 | 6 |
import org.apache.commons.logging.LogFactory; |
6 | 7 |
import org.springframework.beans.factory.annotation.Required; |
7 | 8 |
|
8 | 9 |
import eu.dnetlib.data.hadoop.config.ClusterName; |
10 |
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException; |
|
9 | 11 |
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob; |
10 | 12 |
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler; |
11 | 13 |
import eu.dnetlib.miscutils.functional.xml.DnetXsltFunctions; |
12 | 14 |
|
13 |
import java.io.IOException; |
|
14 |
|
|
15 | 15 |
public class ImportEprHbaseAction extends AbstractHadoopAction { |
16 | 16 |
|
17 | 17 |
private static final Log log = LogFactory.getLog(ImportEprHbaseAction.class); // NOPMD by marko on 11/24/08 5:02 PM |
... | ... | |
21 | 21 |
@Override |
22 | 22 |
public void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException { |
23 | 23 |
|
24 |
final String epr = DnetXsltFunctions.decodeBase64(job.getParameters().get("input_epr")); |
|
25 |
final String xsl = DnetXsltFunctions.decodeBase64(job.getParameters().get("xslt")); |
|
26 |
final ClusterName clusterName = ClusterName.valueOf(job.getParameters().get("cluster")); |
|
27 |
final String table = job.getParameters().get("table"); |
|
24 |
final String epr = DnetXsltFunctions.decodeBase64(job.getParameters().get("input_epr")); |
|
25 |
final String xsl = DnetXsltFunctions.decodeBase64(job.getParameters().get("xslt")); |
|
26 |
final ClusterName clusterName = ClusterName.valueOf(job.getParameters().get("cluster")); |
|
27 |
final String table = job.getParameters().get("table"); |
|
28 |
final boolean simulation = Boolean.valueOf(job.getParameters().get("simulation")); |
|
28 | 29 |
|
29 |
log.info("Starting import in table " + table);
|
|
30 |
log.info("Starting import in table " + table);
|
|
30 | 31 |
|
31 |
try {
|
|
32 |
Integer count = getHbaseTableFeeder().feed(epr, xsl, clusterName, table);
|
|
32 |
try {
|
|
33 |
final Integer count = getHbaseTableFeeder().feed(epr, xsl, clusterName, table, simulation);
|
|
33 | 34 |
|
34 |
if (count != null) {
|
|
35 |
log.info("Import completed successfully");
|
|
36 |
job.getParameters().put("count", String.valueOf(count));
|
|
37 |
}
|
|
35 |
if (count != null) {
|
|
36 |
log.info("Import completed successfully");
|
|
37 |
job.getParameters().put("count", String.valueOf(count));
|
|
38 |
}
|
|
38 | 39 |
|
39 |
handler.done(job);
|
|
40 |
} catch (IOException e) {
|
|
41 |
throw new HadoopServiceException("Import failed", e);
|
|
42 |
}
|
|
40 |
handler.done(job);
|
|
41 |
} catch (final IOException e) {
|
|
42 |
throw new HadoopServiceException("Import failed", e);
|
|
43 |
}
|
|
43 | 44 |
} |
44 | 45 |
|
45 | 46 |
public HbaseTableFeeder getHbaseTableFeeder() { |
Also available in: Unified diff
added simulation mode