1 |
26600
|
sandro.lab
|
package eu.dnetlib.data.mapreduce.hbase.dataexport;
|
2 |
|
|
|
3 |
|
|
import java.util.Properties;
|
4 |
|
|
|
5 |
|
|
import org.apache.hadoop.hbase.mapreduce.CopyTable;
|
6 |
|
|
import org.apache.hadoop.mapreduce.Job;
|
7 |
|
|
|
8 |
|
|
import com.google.common.collect.Iterables;
|
9 |
|
|
import com.google.common.collect.Lists;
|
10 |
|
|
|
11 |
|
|
import eu.dnetlib.data.mapreduce.hbase.AbstractHBaseMapReduceJob;
|
12 |
|
|
|
13 |
|
|
/**
|
14 |
|
|
* HBaseCopyTableJob is a simple wrapper over org.apache.hadoop.hbase.mapreduce.CopyTable that makes it invokable from a
|
15 |
|
|
* D-Net workflow, passing the required parameters.
|
16 |
|
|
*
|
17 |
|
|
* Copies the given table to the remote hbase instance.
|
18 |
|
|
*
|
19 |
|
|
* @author claudio
|
20 |
|
|
*
|
21 |
|
|
*/
|
22 |
|
|
public class HBaseCopyTableJob extends AbstractHBaseMapReduceJob {
|
23 |
|
|
|
24 |
|
|
@Override
|
25 |
|
|
protected Job setJobDetails(Job job, Properties p) throws Exception {
|
26 |
|
|
|
27 |
|
|
String remoteCluster = "--peer.adr=" + p.getProperty("peername");
|
28 |
|
|
String tablename = p.getProperty("tablename");
|
29 |
|
|
|
30 |
|
|
String[] args = Iterables.toArray(Lists.newArrayList(remoteCluster, tablename), String.class);
|
31 |
|
|
|
32 |
|
|
return CopyTable.createSubmittableJob(job.getConfiguration(), args);
|
33 |
|
|
}
|
34 |
|
|
|
35 |
|
|
}
|