1
|
package eu.dnetlib.msro.workflows.nodes.hadoop;
|
2
|
|
3
|
import java.util.Set;
|
4
|
|
5
|
import eu.dnetlib.msro.workflows.graph.Arc;
|
6
|
import eu.dnetlib.msro.workflows.procs.Env;
|
7
|
import eu.dnetlib.rmi.data.hadoop.HadoopService;
|
8
|
import eu.dnetlib.rmi.manager.MSROException;
|
9
|
import org.apache.commons.lang3.StringUtils;
|
10
|
import org.apache.commons.logging.Log;
|
11
|
import org.apache.commons.logging.LogFactory;
|
12
|
|
13
|
public class CreateHBaseTableJobNode extends AbstractHBaseAdminJobNode {
|
14
|
|
15
|
private static final Log log = LogFactory.getLog(CreateHBaseTableJobNode.class); // NOPMD by marko on 11/24/08 5:02 PM
|
16
|
|
17
|
private boolean reuseRegionInfo = false;
|
18
|
|
19
|
@Override
|
20
|
protected String execute(final Env env) throws Exception {
|
21
|
final Set<String> columns = getColumns(env);
|
22
|
|
23
|
log.info("Ensuring table " + getTableName() + " on cluster: '" + getCluster() + "' - columns: " + columns);
|
24
|
|
25
|
final HadoopService hadoopService = getServiceLocator().getService(HadoopService.class);
|
26
|
log.info(String.format("reuse region info: %s", isReuseRegionInfo()));
|
27
|
if (isReuseRegionInfo()) {
|
28
|
final String jsonConf = env.getAttribute(getTableConfigurationParamName(), String.class);
|
29
|
log.debug(String.format("region info: %s", jsonConf));
|
30
|
if (StringUtils.isBlank(jsonConf)) {
|
31
|
throw new MSROException("cannot find HBase table configuration in workflow env");
|
32
|
}
|
33
|
|
34
|
hadoopService.createConfiguredHbaseTable(getCluster(), getTableName(), jsonConf);
|
35
|
} else {
|
36
|
hadoopService.createHbaseTable(getCluster(), getTableName(), columns);
|
37
|
}
|
38
|
return Arc.DEFAULT_ARC;
|
39
|
}
|
40
|
|
41
|
public boolean isReuseRegionInfo() {
|
42
|
return reuseRegionInfo;
|
43
|
}
|
44
|
|
45
|
public void setReuseRegionInfo(final boolean reuseRegionInfo) {
|
46
|
this.reuseRegionInfo = reuseRegionInfo;
|
47
|
}
|
48
|
|
49
|
}
|