Project

General

Profile

1
package eu.dnetlib.data.hadoop.action;
2

    
3
import java.io.StringReader;
4
import java.util.Map;
5
import java.util.Map.Entry;
6
import java.util.Set;
7
import java.util.concurrent.Executor;
8
import java.util.concurrent.Executors;
9

    
10
import javax.annotation.Resource;
11

    
12
import org.apache.commons.logging.Log;
13
import org.apache.commons.logging.LogFactory;
14
import org.apache.hadoop.conf.Configuration;
15
import org.dom4j.Document;
16
import org.dom4j.DocumentException;
17
import org.dom4j.Node;
18
import org.dom4j.io.SAXReader;
19
import org.springframework.beans.factory.annotation.Autowired;
20

    
21
import com.google.common.collect.Sets;
22

    
23
import eu.dnetlib.data.hadoop.config.ClusterName;
24
import eu.dnetlib.data.hadoop.config.ConfigurationEnumerator;
25
import eu.dnetlib.data.hadoop.rmi.HadoopBlackboardActions;
26
import eu.dnetlib.data.hadoop.rmi.HadoopJobType;
27
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException;
28
import eu.dnetlib.data.hadoop.utils.JobProfile;
29
import eu.dnetlib.data.hadoop.utils.ScanFactory;
30
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpDocumentNotFoundException;
31
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
32
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
33
import eu.dnetlib.enabling.locators.UniqueServiceLocator;
34
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
35
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerAction;
36
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler;
37

    
38
public abstract class AbstractHadoopAction implements BlackboardServerAction<HadoopBlackboardActions> {
39

    
40
	private static final Log log = LogFactory.getLog(AbstractHadoopAction.class); // NOPMD by marko on 11/24/08 5:02 PM
41

    
42
	/**
43
	 * Special I/O hdfs property names for which we support relative paths.
44
	 */
45
	public final static Set<String> HDFS_SPECIAL_PROPERTIES = Sets.newHashSet("mapred.input.dir", "mapred.output.dir");
46

    
47
	@Resource
48
	protected UniqueServiceLocator serviceLocator;
49

    
50
	@Autowired
51
	protected ConfigurationEnumerator configurationEnumerator;
52

    
53
	private final Executor executor = Executors.newCachedThreadPool();
54

    
55
	protected abstract void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException;
56

    
57
	@Override
58
	public void execute(final BlackboardServerHandler handler, final BlackboardJob job) {
59
		executor.execute(new Runnable() {
60

    
61
			@Override
62
			public void run() {
63
				try {
64
					handler.ongoing(job);
65
					executeAsync(handler, job);
66
				} catch (Throwable e) {
67
					log.error("exception: " + e);
68
					log.error("cause: " + e.getCause());
69
					handler.failed(job, e);
70
				}
71
			}
72
		});
73
	}
74

    
75
	protected JobProfile loadISJobConfiguration(final String jobName, final Map<String, String> bbParams) throws HadoopServiceException {
76

    
77
		log.info("reading job configuration profile: " + jobName);
78

    
79
		try {
80
			String profile = serviceLocator.getService(ISLookUpService.class).getResourceProfileByQuery(
81
					"/RESOURCE_PROFILE[.//RESOURCE_TYPE/@value = 'HadoopJobConfigurationDSResourceType' and .//HADOOP_JOB/@name='" + jobName + "']");
82
			return parseJobProfile(profile, bbParams);
83
		} catch (ISLookUpDocumentNotFoundException e) {
84
			throw new HadoopServiceException("cannot find job profile: " + jobName, e);
85
		} catch (ISLookUpException e) {
86
			throw new HadoopServiceException("unable to read job profile: " + jobName, e);
87
		}
88
	}
89

    
90
	private JobProfile parseJobProfile(final String profile, final Map<String, String> bbParams) throws HadoopServiceException {
91
		JobProfile jobProfile = new JobProfile();
92
		try {
93
			Document doc = new SAXReader().read(new StringReader(profile));
94

    
95
			log.debug("setting job description");
96
			jobProfile.setDescription(doc.valueOf("//DESCRIPTION"));
97

    
98
			log.debug("setting job name");
99
			jobProfile.setName(doc.valueOf("//HADOOP_JOB/@name"));
100

    
101
			log.debug("setting job type");
102
			jobProfile.setJobType(HadoopJobType.valueOf(doc.valueOf("//HADOOP_JOB/@type")));
103

    
104
			log.debug("setting job static configuration");
105
			for (Object o : doc.selectNodes("//STATIC_CONFIGURATION/PROPERTY")) {
106
				Node node = (Node) o;
107
				jobProfile.getJobDefinition().put(node.valueOf("./@key"), node.valueOf("./@value"));
108
			}
109

    
110
			log.debug("setting job required parameters");
111
			for (Object o : doc.selectNodes("//JOB_INTERFACE/PARAM[./@required = 'true']")) {
112
				Node node = (Node) o;
113
				jobProfile.getRequiredParams().add(node.valueOf("./@name"));
114
			}
115

    
116
			if (doc.selectSingleNode("//SCAN") != null) {
117
				jobProfile.setScanProperties(ScanFactory.parseScanProperties(doc, bbParams));
118
			}
119

    
120
		} catch (DocumentException e) {
121
			throw new HadoopServiceException("cannot parse job profile");
122
		}
123

    
124
		if (jobProfile.isEmpty()) { throw new HadoopServiceException("job configuration is empty"); }
125

    
126
		return jobProfile;
127
	}
128

    
129
	protected Configuration getConf(final ClusterName clusterName) {
130
		return configurationEnumerator.get(clusterName);
131
	}
132

    
133
	protected boolean checkHdfsProperty(final Entry<String, String> e) {
134
		return HDFS_SPECIAL_PROPERTIES.contains(e.getKey()) && !e.getValue().isEmpty() && !e.getValue().startsWith("hdfs://");
135
	}
136

    
137
	public UniqueServiceLocator getServiceLocator() {
138
		return serviceLocator;
139
	}
140

    
141
}
(1-1/13)