Project

General

Profile

1
package eu.dnetlib.data.hadoop.blackboard;
2

    
3
import java.io.StringReader;
4
import java.util.Map;
5
import java.util.Map.Entry;
6
import java.util.Set;
7
import java.util.concurrent.Executor;
8
import java.util.concurrent.Executors;
9

    
10
import javax.annotation.Resource;
11

    
12
import org.apache.commons.lang.exception.ExceptionUtils;
13
import org.apache.commons.logging.Log;
14
import org.apache.commons.logging.LogFactory;
15
import org.apache.hadoop.conf.Configuration;
16
import org.dom4j.Document;
17
import org.dom4j.DocumentException;
18
import org.dom4j.Node;
19
import org.dom4j.io.SAXReader;
20
import org.springframework.beans.factory.annotation.Autowired;
21

    
22
import com.google.common.collect.Sets;
23

    
24
import eu.dnetlib.data.hadoop.config.ClusterName;
25
import eu.dnetlib.data.hadoop.config.ConfigurationEnumerator;
26
import eu.dnetlib.rmi.data.hadoop.HadoopBlackboardActions;
27
import eu.dnetlib.rmi.data.hadoop.HadoopJobType;
28
import eu.dnetlib.rmi.data.hadoop.HadoopServiceException;
29
import eu.dnetlib.data.hadoop.utils.JobProfile;
30
import eu.dnetlib.data.hadoop.utils.ScanFactory;
31
import eu.dnetlib.rmi.enabling.ISLookUpDocumentNotFoundException;
32
import eu.dnetlib.rmi.enabling.ISLookUpException;
33
import eu.dnetlib.rmi.enabling.ISLookUpService;
34
import eu.dnetlib.enabling.locators.UniqueServiceLocator;
35
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
36
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerAction;
37
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler;
38

    
39
public abstract class AbstractHadoopAction implements BlackboardServerAction<HadoopBlackboardActions> {
40

    
41
	private static final Log log = LogFactory.getLog(AbstractHadoopAction.class); // NOPMD by marko on 11/24/08 5:02 PM
42

    
43
	/**
44
	 * Special I/O hdfs property names for which we support relative paths.
45
	 */
46
	public final static Set<String> HDFS_SPECIAL_PROPERTIES = Sets.newHashSet("mapred.input.dir", "mapred.output.dir");
47

    
48
	@Resource
49
	protected UniqueServiceLocator serviceLocator;
50

    
51
	@Autowired
52
	protected ConfigurationEnumerator configurationEnumerator;
53

    
54
	private final Executor executor = Executors.newCachedThreadPool();
55

    
56
	protected abstract void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException;
57

    
58
	@Override
59
	public void execute(final BlackboardServerHandler handler, final BlackboardJob job) {
60
		executor.execute(new Runnable() {
61

    
62
			@Override
63
			public void run() {
64
				try {
65
					handler.ongoing(job);
66
					executeAsync(handler, job);
67
				} catch (final Throwable e) {
68
					log.error(ExceptionUtils.getFullStackTrace(e));
69
					handler.failed(job, e);
70
				}
71
			}
72
		});
73
	}
74

    
75
	protected JobProfile loadISJobConfiguration(final String jobName, final Map<String, String> bbParams) throws HadoopServiceException {
76

    
77
		log.info("reading job configuration profile: " + jobName);
78

    
79
		try {
80
			final String profile = serviceLocator.getService(ISLookUpService.class).getResourceProfileByQuery(
81
					"/RESOURCE_PROFILE[.//RESOURCE_TYPE/@value = 'HadoopJobConfigurationDSResourceType' and .//HADOOP_JOB/@name='" + jobName + "']");
82
			return parseJobProfile(profile, bbParams);
83
		} catch (final ISLookUpDocumentNotFoundException e) {
84
			throw new HadoopServiceException("cannot find job profile: " + jobName, e);
85
		} catch (final ISLookUpException e) {
86
			throw new HadoopServiceException("unable to read job profile: " + jobName, e);
87
		}
88
	}
89

    
90
	private JobProfile parseJobProfile(final String profile, final Map<String, String> bbParams) throws HadoopServiceException {
91
		final JobProfile jobProfile = new JobProfile();
92
		try {
93
			final Document doc = new SAXReader().read(new StringReader(profile));
94

    
95
			log.debug("setting job description");
96
			jobProfile.setDescription(doc.valueOf("//DESCRIPTION"));
97

    
98
			log.debug("setting job name");
99
			jobProfile.setName(doc.valueOf("//HADOOP_JOB/@name"));
100

    
101
			log.debug("setting job type");
102
			jobProfile.setJobType(HadoopJobType.valueOf(doc.valueOf("//HADOOP_JOB/@type")));
103

    
104
			log.debug("setting job static configuration");
105
			for (final Object o : doc.selectNodes("//STATIC_CONFIGURATION/PROPERTY")) {
106
				final Node node = (Node) o;
107
				jobProfile.getJobDefinition().put(node.valueOf("./@key"), node.valueOf("./@value"));
108
			}
109

    
110
			log.debug("setting job required parameters");
111
			for (final Object o : doc.selectNodes("//JOB_INTERFACE/PARAM[./@required = 'true']")) {
112
				final Node node = (Node) o;
113
				jobProfile.getRequiredParams().add(node.valueOf("./@name"));
114
			}
115

    
116
			if (doc.selectSingleNode("//SCAN") != null) {
117
				jobProfile.setScanProperties(ScanFactory.parseScanProperties(doc, bbParams));
118
			}
119

    
120
		} catch (final DocumentException e) {
121
			throw new HadoopServiceException("cannot parse job profile");
122
		}
123

    
124
		if (jobProfile.isEmpty()) throw new HadoopServiceException("job configuration is empty");
125

    
126
		return jobProfile;
127
	}
128

    
129
	protected Configuration getConf(final ClusterName clusterName) {
130
		return configurationEnumerator.get(clusterName);
131
	}
132

    
133
	protected boolean checkHdfsProperty(final Entry<String, String> e) {
134
		return HDFS_SPECIAL_PROPERTIES.contains(e.getKey()) && !e.getValue().isEmpty() && !e.getValue().startsWith("hdfs://");
135
	}
136

    
137
	public UniqueServiceLocator getServiceLocator() {
138
		return serviceLocator;
139
	}
140

    
141
}
(1-1/13)