Project

General

Profile

1
package eu.dnetlib.data.hadoop.blackboard;
2

    
3
import java.io.StringReader;
4
import java.util.Map;
5
import java.util.Map.Entry;
6
import java.util.Set;
7
import java.util.concurrent.Executor;
8
import java.util.concurrent.Executors;
9

    
10
import javax.annotation.Resource;
11

    
12
import eu.dnetlib.rmi.data.hadoop.ClusterName;
13
import org.apache.commons.lang3.exception.ExceptionUtils;
14
import org.apache.commons.logging.Log;
15
import org.apache.commons.logging.LogFactory;
16
import org.apache.hadoop.conf.Configuration;
17
import org.dom4j.Document;
18
import org.dom4j.DocumentException;
19
import org.dom4j.Node;
20
import org.dom4j.io.SAXReader;
21
import org.springframework.beans.factory.annotation.Autowired;
22

    
23
import com.google.common.collect.Sets;
24

    
25
import eu.dnetlib.data.hadoop.config.ConfigurationEnumerator;
26
import eu.dnetlib.rmi.data.hadoop.HadoopBlackboardActions;
27
import eu.dnetlib.rmi.data.hadoop.HadoopJobType;
28
import eu.dnetlib.rmi.data.hadoop.HadoopServiceException;
29
import eu.dnetlib.data.hadoop.utils.JobProfile;
30
import eu.dnetlib.data.hadoop.utils.ScanFactory;
31
import eu.dnetlib.rmi.enabling.ISLookUpDocumentNotFoundException;
32
import eu.dnetlib.rmi.enabling.ISLookUpException;
33
import eu.dnetlib.rmi.enabling.ISLookUpService;
34
import eu.dnetlib.enabling.locators.UniqueServiceLocator;
35
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
36
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerAction;
37
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler;
38

    
39
public abstract class AbstractHadoopAction implements BlackboardServerAction<HadoopBlackboardActions> {
40

    
41
	private static final Log log = LogFactory.getLog(AbstractHadoopAction.class); // NOPMD by marko on 11/24/08 5:02 PM
42

    
43
	/**
44
	 * Special I/O hdfs property names for which we support relative paths.
45
	 */
46
	public final static Set<String> HDFS_SPECIAL_PROPERTIES = Sets.newHashSet("mapred.input.dir", "mapred.output.dir");
47

    
48
	@Resource
49
	protected UniqueServiceLocator serviceLocator;
50

    
51
	@Autowired
52
	protected ConfigurationEnumerator configurationEnumerator;
53

    
54
	@Autowired
55
	private ScanFactory scanFactory;
56

    
57
	private final Executor executor = Executors.newCachedThreadPool();
58

    
59
	protected abstract void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException;
60

    
61
	@Override
62
	public void execute(final BlackboardServerHandler handler, final BlackboardJob job) {
63
		executor.execute(() -> {
64
			try {
65
				handler.ongoing(job);
66
				executeAsync(handler, job);
67
			} catch (final Throwable e) {
68
				log.error(ExceptionUtils.getStackTrace(e));
69
				handler.failed(job, e);
70
			}
71
		});
72
	}
73

    
74
	protected JobProfile loadISJobConfiguration(final String jobName, final Map<String, String> bbParams) throws HadoopServiceException {
75

    
76
		log.info("reading job configuration profile: " + jobName);
77

    
78
		try {
79
			final String profile = serviceLocator.getService(ISLookUpService.class).getResourceProfileByQuery(
80
					"/RESOURCE_PROFILE[.//RESOURCE_TYPE/@value = 'HadoopJobConfigurationDSResourceType' and .//HADOOP_JOB/@name='" + jobName + "']");
81
			return parseJobProfile(profile, bbParams);
82
		} catch (final ISLookUpDocumentNotFoundException e) {
83
			throw new HadoopServiceException("cannot find job profile: " + jobName, e);
84
		} catch (final ISLookUpException e) {
85
			throw new HadoopServiceException("unable to read job profile: " + jobName, e);
86
		}
87
	}
88

    
89
	private JobProfile parseJobProfile(final String profile, final Map<String, String> bbParams) throws HadoopServiceException {
90
		final JobProfile jobProfile = new JobProfile();
91
		try {
92
			final Document doc = new SAXReader().read(new StringReader(profile));
93

    
94
			log.debug("setting job description");
95
			jobProfile.setDescription(doc.valueOf("//DESCRIPTION"));
96

    
97
			log.debug("setting job name");
98
			jobProfile.setName(doc.valueOf("//HADOOP_JOB/@name"));
99

    
100
			log.debug("setting job type");
101
			jobProfile.setJobType(HadoopJobType.valueOf(doc.valueOf("//HADOOP_JOB/@type")));
102

    
103
			log.debug("setting job static configuration");
104
			for (final Object o : doc.selectNodes("//STATIC_CONFIGURATION/PROPERTY")) {
105
				final Node node = (Node) o;
106
				jobProfile.getJobDefinition().put(node.valueOf("./@key"), node.valueOf("./@value"));
107
			}
108

    
109
			log.debug("setting job required parameters");
110
			for (final Object o : doc.selectNodes("//JOB_INTERFACE/PARAM[./@required = 'true']")) {
111
				final Node node = (Node) o;
112
				jobProfile.getRequiredParams().add(node.valueOf("./@name"));
113
			}
114

    
115
			if (doc.selectSingleNode("//SCAN") != null) {
116
				jobProfile.setScanProperties(scanFactory.parseScanProperties(doc, bbParams));
117
			}
118

    
119
		} catch (final DocumentException e) {
120
			throw new HadoopServiceException("cannot parse job profile");
121
		}
122

    
123
		if (jobProfile.isEmpty()) throw new HadoopServiceException("job configuration is empty");
124

    
125
		return jobProfile;
126
	}
127

    
128
	protected Configuration getConf(final ClusterName clusterName) {
129
		return configurationEnumerator.get(clusterName);
130
	}
131

    
132
	protected boolean checkHdfsProperty(final Entry<String, String> e) {
133
		return HDFS_SPECIAL_PROPERTIES.contains(e.getKey()) && !e.getValue().isEmpty() && !e.getValue().startsWith("hdfs://");
134
	}
135

    
136
	public UniqueServiceLocator getServiceLocator() {
137
		return serviceLocator;
138
	}
139

    
140
}
(1-1/13)