1
|
package eu.dnetlib.data.mapreduce;
|
2
|
|
3
|
import java.io.FileInputStream;
|
4
|
import java.util.Properties;
|
5
|
|
6
|
import org.apache.commons.logging.Log;
|
7
|
import org.apache.commons.logging.LogFactory;
|
8
|
import org.apache.hadoop.conf.Configuration;
|
9
|
import org.apache.hadoop.mapreduce.Job;
|
10
|
|
11
|
public class JobDriver {
|
12
|
|
13
|
private static final Log log = LogFactory.getLog(JobDriver.class);
|
14
|
|
15
|
public static void main(String[] args) throws Exception {
|
16
|
|
17
|
final Configuration conf = new Configuration();
|
18
|
|
19
|
final String path = args[0];
|
20
|
|
21
|
Properties p = new Properties();
|
22
|
p.load(new FileInputStream(path));
|
23
|
|
24
|
log.info(String.format("loaded %s properties from %s", p.size(), path));
|
25
|
|
26
|
for (final String name : p.stringPropertyNames()) {
|
27
|
conf.set(name, p.getProperty(name));
|
28
|
}
|
29
|
|
30
|
Job job = Job.getInstance(conf, conf.get("job.name", "unknown"));
|
31
|
|
32
|
System.exit(job.waitForCompletion(true) ? 0 : 1);
|
33
|
}
|
34
|
|
35
|
}
|