1 |
37506
|
claudio.at
|
package eu.dnetlib.msro.workflows.hadoop.hbase;
|
2 |
|
|
|
3 |
|
|
import java.util.List;
|
4 |
|
|
|
5 |
|
|
import org.apache.commons.logging.Log;
|
6 |
|
|
import org.apache.commons.logging.LogFactory;
|
7 |
|
|
|
8 |
|
|
import com.googlecode.sarasvati.Arc;
|
9 |
|
|
import com.googlecode.sarasvati.NodeToken;
|
10 |
|
|
|
11 |
|
|
import eu.dnetlib.data.hadoop.rmi.HadoopService;
|
12 |
|
|
|
13 |
|
|
public class GetHBaseTableDescriptionJobNode extends AbstractHBaseAdminJobNode {
|
14 |
|
|
|
15 |
|
|
private static final Log log = LogFactory.getLog(GetHBaseTableDescriptionJobNode.class); // NOPMD by marko on 11/24/08 5:02 PM
|
16 |
|
|
|
17 |
|
|
@Override
|
18 |
|
|
protected String execute(final NodeToken token) throws Exception {
|
19 |
|
|
final String tableName = tableName(token);
|
20 |
|
|
final String cluster = cluster(token);
|
21 |
|
|
|
22 |
|
|
log.info("getting table description: '" + tableName + "' on cluster: '" + cluster + "'");
|
23 |
|
|
|
24 |
|
|
final List<String> columns = getServiceLocator().getService(HadoopService.class).describeHbaseTable(cluster, tableName);
|
25 |
|
|
log.debug(String.format("table '%s': " + columns, tableName));
|
26 |
|
|
token.getEnv().setAttribute(getTableColumnsParamName(), asCSV(columns));
|
27 |
|
|
|
28 |
|
|
return Arc.DEFAULT_ARC;
|
29 |
|
|
}
|
30 |
|
|
|
31 |
|
|
}
|