1
|
package eu.dnetlib.msro.workflows.hadoop.hbase;
|
2
|
|
3
|
import java.util.Set;
|
4
|
|
5
|
import com.googlecode.sarasvati.Arc;
|
6
|
import com.googlecode.sarasvati.NodeToken;
|
7
|
import eu.dnetlib.data.hadoop.rmi.HadoopService;
|
8
|
import eu.dnetlib.msro.rmi.MSROException;
|
9
|
import org.apache.commons.lang.StringUtils;
|
10
|
import org.apache.commons.logging.Log;
|
11
|
import org.apache.commons.logging.LogFactory;
|
12
|
|
13
|
public class CreateHBaseTableJobNode extends AbstractHBaseAdminJobNode {
|
14
|
|
15
|
private static final Log log = LogFactory.getLog(CreateHBaseTableJobNode.class); // NOPMD by marko on 11/24/08 5:02 PM
|
16
|
|
17
|
private boolean reuseRegionInfo = false;
|
18
|
|
19
|
@Override
|
20
|
protected String execute(final NodeToken token) throws Exception {
|
21
|
final Set<String> columns = getColumns(token);
|
22
|
final String tableName = tableName(token);
|
23
|
final String cluster = cluster(token);
|
24
|
|
25
|
log.info("Ensuring table " + tableName + " on cluster: '" + cluster + "' - columns: " + columns);
|
26
|
|
27
|
if (isReuseRegionInfo()) {
|
28
|
String jsonConf = token.getEnv().getAttribute(getTableConfigurationParamName());
|
29
|
if (StringUtils.isBlank(jsonConf)) {
|
30
|
throw new MSROException("cannot find HBase table configuration in workflow env");
|
31
|
}
|
32
|
|
33
|
getServiceLocator().getService(HadoopService.class).createConfiguredHbaseTable(cluster, tableName, jsonConf);
|
34
|
} else {
|
35
|
getServiceLocator().getService(HadoopService.class).createHbaseTable(cluster, tableName, columns);
|
36
|
}
|
37
|
return Arc.DEFAULT_ARC;
|
38
|
}
|
39
|
|
40
|
public boolean isReuseRegionInfo() {
|
41
|
return reuseRegionInfo;
|
42
|
}
|
43
|
|
44
|
public void setReuseRegionInfo(final boolean reuseRegionInfo) {
|
45
|
this.reuseRegionInfo = reuseRegionInfo;
|
46
|
}
|
47
|
}
|