Project

General

Profile

1
package eu.dnetlib.data.hadoop.action;
2

    
3
import java.io.StringReader;
4
import java.util.Map;
5
import java.util.Map.Entry;
6
import java.util.Set;
7
import java.util.concurrent.Executor;
8
import java.util.concurrent.Executors;
9

    
10
import javax.annotation.Resource;
11

    
12
import org.apache.commons.lang.exception.ExceptionUtils;
13
import org.apache.commons.logging.Log;
14
import org.apache.commons.logging.LogFactory;
15
import org.apache.hadoop.conf.Configuration;
16
import org.dom4j.Document;
17
import org.dom4j.DocumentException;
18
import org.dom4j.Node;
19
import org.dom4j.io.SAXReader;
20
import org.springframework.beans.factory.annotation.Autowired;
21

    
22
import com.google.common.collect.Sets;
23

    
24
import eu.dnetlib.data.hadoop.config.ClusterName;
25
import eu.dnetlib.data.hadoop.config.ConfigurationEnumerator;
26
import eu.dnetlib.data.hadoop.rmi.HadoopBlackboardActions;
27
import eu.dnetlib.data.hadoop.rmi.HadoopJobType;
28
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException;
29
import eu.dnetlib.data.hadoop.utils.JobProfile;
30
import eu.dnetlib.data.hadoop.utils.ScanFactory;
31
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpDocumentNotFoundException;
32
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
33
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
34
import eu.dnetlib.enabling.locators.UniqueServiceLocator;
35
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
36
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerAction;
37
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler;
38

    
39
public abstract class AbstractHadoopAction implements BlackboardServerAction<HadoopBlackboardActions> {
40

    
41
	private static final Log log = LogFactory.getLog(AbstractHadoopAction.class); // NOPMD by marko on 11/24/08 5:02 PM
42

    
43
	/**
44
	 * Special I/O hdfs property names for which we support relative paths.
45
	 */
46
	public final static Set<String> HDFS_SPECIAL_PROPERTIES = Sets.newHashSet("mapred.input.dir", "mapred.output.dir");
47

    
48
	@Resource
49
	protected UniqueServiceLocator serviceLocator;
50

    
51
	@Autowired
52
	protected ConfigurationEnumerator configurationEnumerator;
53

    
54
	private final Executor executor = Executors.newCachedThreadPool();
55

    
56
	protected abstract void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException;
57

    
58
	@Override
59
	public void execute(final BlackboardServerHandler handler, final BlackboardJob job) {
60
		executor.execute(() -> {
61
			try {
62
				handler.ongoing(job);
63
				executeAsync(handler, job);
64
			} catch (final Throwable e) {
65
				log.error(ExceptionUtils.getFullStackTrace(e));
66
				handler.failed(job, e);
67
			}
68
		});
69
	}
70

    
71
	protected JobProfile loadISJobConfiguration(final String jobName, final Map<String, String> bbParams) throws HadoopServiceException {
72

    
73
		log.info("reading job configuration profile: " + jobName);
74

    
75
		try {
76
			final String profile = serviceLocator.getService(ISLookUpService.class).getResourceProfileByQuery(
77
					"/RESOURCE_PROFILE[.//RESOURCE_TYPE/@value = 'HadoopJobConfigurationDSResourceType' and .//HADOOP_JOB/@name='" + jobName + "']");
78
			return parseJobProfile(profile, bbParams);
79
		} catch (final ISLookUpDocumentNotFoundException e) {
80
			throw new HadoopServiceException("cannot find job profile: " + jobName, e);
81
		} catch (final ISLookUpException e) {
82
			throw new HadoopServiceException("unable to read job profile: " + jobName, e);
83
		}
84
	}
85

    
86
	private JobProfile parseJobProfile(final String profile, final Map<String, String> bbParams) throws HadoopServiceException {
87
		final JobProfile jobProfile = new JobProfile();
88
		try {
89
			final Document doc = new SAXReader().read(new StringReader(profile));
90

    
91
			log.debug("setting job description");
92
			jobProfile.setDescription(doc.valueOf("//DESCRIPTION"));
93

    
94
			log.debug("setting job name");
95
			jobProfile.setName(doc.valueOf("//HADOOP_JOB/@name"));
96

    
97
			log.debug("setting job type");
98
			jobProfile.setJobType(HadoopJobType.valueOf(doc.valueOf("//HADOOP_JOB/@type")));
99

    
100
			log.debug("setting job static configuration");
101
			for (final Object o : doc.selectNodes("//STATIC_CONFIGURATION/PROPERTY")) {
102
				final Node node = (Node) o;
103
				jobProfile.getJobDefinition().put(node.valueOf("./@key"), node.valueOf("./@value"));
104
			}
105

    
106
			log.debug("setting job required parameters");
107
			for (final Object o : doc.selectNodes("//JOB_INTERFACE/PARAM[./@required = 'true']")) {
108
				final Node node = (Node) o;
109
				jobProfile.getRequiredParams().add(node.valueOf("./@name"));
110
			}
111

    
112
			if (doc.selectSingleNode("//SCAN") != null) {
113
				jobProfile.setScanProperties(ScanFactory.parseScanProperties(doc, bbParams));
114
			}
115

    
116
		} catch (final DocumentException e) {
117
			throw new HadoopServiceException("cannot parse job profile");
118
		}
119

    
120
		if (jobProfile.isEmpty()) throw new HadoopServiceException("job configuration is empty");
121

    
122
		return jobProfile;
123
	}
124

    
125
	protected Configuration getConf(final ClusterName clusterName) {
126
		return configurationEnumerator.get(clusterName);
127
	}
128

    
129
	protected boolean checkHdfsProperty(final Entry<String, String> e) {
130
		return HDFS_SPECIAL_PROPERTIES.contains(e.getKey()) && !e.getValue().isEmpty() && !e.getValue().startsWith("hdfs://");
131
	}
132

    
133
	public UniqueServiceLocator getServiceLocator() {
134
		return serviceLocator;
135
	}
136

    
137
}
(1-1/13)