Project

General

Profile

1
package eu.dnetlib.data.hadoop.oozie;
2

    
3
import java.io.IOException;
4
import java.util.*;
5

    
6
import com.google.common.collect.Maps;
7
import com.google.common.collect.Sets;
8
import org.apache.commons.io.IOUtils;
9
import org.apache.commons.lang.StringUtils;
10
import org.apache.commons.logging.Log;
11
import org.apache.commons.logging.LogFactory;
12
import org.apache.oozie.client.OozieClient;
13
import org.apache.oozie.client.OozieClientException;
14
import org.apache.oozie.client.WorkflowAction;
15
import org.apache.oozie.client.WorkflowJob;
16
import org.apache.oozie.client.WorkflowJob.Status;
17

    
18
import eu.dnetlib.data.hadoop.action.JobCompletion;
19
import eu.dnetlib.data.hadoop.action.JobMonitor;
20
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException;
21

    
22
import static java.lang.String.format;
23

    
24
public class OozieJobMonitor extends JobMonitor {
25

    
26
	private static final Log log = LogFactory.getLog(JobMonitor.class); // NOPMD by marko on 11/24/08 5:02 PM
27

    
28
	private final OozieClient oozieClient;
29

    
30
	private final String jobId;
31

    
32
	public static final String ACTION_TYPE_SUBWORKFLOW = "sub-workflow";
33

    
34
	private Set<String> workflowActions = Sets.newHashSet();
35

    
36
	@Deprecated
37
	public OozieJobMonitor(final OozieClient oozieClient, String jobId, final JobCompletion callback) {
38
		super(callback);
39
		this.oozieClient = oozieClient;
40
		this.jobId = jobId;
41
	}
42

    
43
	public OozieJobMonitor(final OozieClient oozieClient, String jobId, final JobCompletion callback, final Set<String> workflowActions) {
44
		super(callback);
45
		this.oozieClient = oozieClient;
46
		this.jobId = jobId;
47
		this.workflowActions = workflowActions;
48
	}
49

    
50
	@Override
51
	public void run() {
52
		try {
53
			log.info("waiting for oozie job completion: " + getHadoopId());
54

    
55
			Status status = Status.PREP;
56
			while (status.equals(Status.PREP) || status.equals(Status.RUNNING)) {
57
				Thread.sleep(monitorSleepTimeSecs * 1000);
58

    
59
				try {
60
					final Status currentStatus = doGetStatus();
61

    
62
					if (!currentStatus.equals(status)) {
63
						status = currentStatus;
64
						lastActivity = new Date();
65
					}
66
				} catch (Throwable e) {
67
					log.warn(format("error polling status for job %s", jobId), e);
68
				}
69
			}
70

    
71
			log.debug(format("job %s finihsed with status: %s", jobId, status));
72
			if (Status.SUCCEEDED.equals(status)) {
73
				// TODO set some content to return to the blackboard msg.
74

    
75
				log.info(format("looking for oozie job(%s) output values: %s", getHadoopId(), workflowActions));
76
				final Properties report = getReport(getOozieClient(), getHadoopId(), workflowActions);
77
				if (report != null) {
78
					final Map<String, String> map = Maps.newHashMap();
79
					report.forEach((k, v) -> map.put(k.toString(), v.toString()));
80
					log.info("found oozie job report, size: " + map.size());
81
					getCallback().done(map);
82
				} else {
83
					log.warn("cannot find oozie job report!");
84
					getCallback().done(new HashMap<>());
85
				}
86
            } else {
87
				// TODO retrieve some failure information from the oozie client.
88
				String msg = format("hadoop job: %s failed with status: %s, oozie log:\n %s\n", getHadoopId(), getStatus(), getOozieClient().getJobLog(getHadoopId()));
89
				getCallback().failed(msg, new HadoopServiceException(msg));
90
            }
91
		} catch (Throwable e) {
92
			getCallback().failed(getHadoopId(), e);
93
		}
94
	}
95

    
96
	/**
97
	 * Provides report entries when found for given oozie job identifier. Returns null when report not found.
98
	 */
99
	private static Properties getReport(final OozieClient oozieClient, final String oozieJobId, final Set<String> workflowActions) throws OozieClientException, IOException {
100
		WorkflowJob oozieJob = oozieClient.getJobInfo(oozieJobId);
101
		for (WorkflowAction currentAction : oozieJob.getActions()) {
102
			log.info(String.format("looking for workflow actions to report, current: '%s'", currentAction.getName()));
103
			if (workflowActions.contains(currentAction.getName())) {
104
				log.info(String.format("found workflow action %s", currentAction.getName()));
105
				if (ACTION_TYPE_SUBWORKFLOW.equals(currentAction.getType())) {
106
					log.info(String.format("looking for sub-workflow actions external id: %s", currentAction.getExternalId()));
107
					Properties subworkflowProperties = getReport(oozieClient, currentAction.getExternalId(), workflowActions);
108
					if (subworkflowProperties != null) {
109
						return subworkflowProperties;
110
					}
111
				} else if (StringUtils.isNotBlank(currentAction.getData())) {
112
					Properties properties = new Properties();
113
					properties.load(IOUtils.toInputStream(currentAction.getData()));
114
					log.info(String.format("found workflow action(%s) properties size %s", currentAction.getName(), properties.values().size()));
115
					return properties;
116
				}
117
			} else {
118
				log.info(String.format("cannot find workflow action(%s) properties", currentAction.getName()));
119
			}
120
		}
121
		return null;
122
	}
123

    
124
	@Override
125
	public String getHadoopId() {
126
		return jobId;
127
	}
128

    
129
	public OozieClient getOozieClient() {
130
		return oozieClient;
131
	}
132

    
133
	@Override
134
	public String getStatus() {
135
		try {
136
			return doGetStatus().toString();
137
		} catch (OozieClientException e) {
138
			log.error("error accessing job status", e);
139
			return "UNKNOWN";
140
		}
141
	}
142

    
143
	private Status doGetStatus() throws OozieClientException {
144
		return getOozieClient().getJobInfo(getHadoopId()).getStatus();
145
	}
146

    
147
	@Override
148
	public Date getLastActivity() {
149
		return lastActivity;
150
	}
151

    
152
	@Override
153
	public Date getStartTime() throws HadoopServiceException {
154
		try {
155
			return getOozieClient().getJobInfo(getHadoopId()).getStartTime();
156
		} catch (OozieClientException e) {
157
			throw new HadoopServiceException("unable to read job start time", e);
158
		}
159
	}
160

    
161
	@Override
162
	public String getTrackerUrl() {
163
		return getOozieClient().getOozieUrl();
164
	}
165

    
166
	@Override
167
	public void kill() {
168
		try {
169
			getOozieClient().kill(getHadoopId());
170
		} catch (OozieClientException e) {
171
			log.error("unable to kill job: " + getHadoopId(), e);
172
		}
173
	}
174

    
175
}
(2-2/2)