Project

General

Profile

« Previous | Next » 

Revision 39575

added new rmi methods: deleteHdfsPath, createHdfsDirectory
added blackboard action: DELETE_HDFS_PATH

View differences:

DeleteHdfsPathAction.java
1 1
package eu.dnetlib.data.hadoop.action;
2 2

  
3
import java.io.IOException;
4
import java.net.URI;
5

  
6
import org.apache.commons.lang.StringUtils;
7
import org.apache.commons.logging.Log;
8
import org.apache.commons.logging.LogFactory;
9
import org.apache.hadoop.conf.Configuration;
10
import org.apache.hadoop.fs.FileSystem;
11
import org.apache.hadoop.fs.Path;
12

  
3
import eu.dnetlib.data.hadoop.HadoopServiceCore;
13 4
import eu.dnetlib.data.hadoop.config.ClusterName;
14 5
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException;
15 6
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
16 7
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler;
8
import org.apache.commons.logging.Log;
9
import org.apache.commons.logging.LogFactory;
10
import org.springframework.beans.factory.annotation.Autowired;
17 11

  
18 12
/**
19 13
 * Delete a path on hdfs.
20 14
 *
21
 * Params - cluster: cluster name, path: the path to delete recursively
15
 * Params - cluster: cluster name, path: the path to deleteFromHBase recursively
22 16
 *
23 17
 * @author claudio
24 18
 *
......
30 24
	 */
31 25
	private static final Log log = LogFactory.getLog(DeleteHdfsPathAction.class); // NOPMD by marko on 11/24/08 5:02 PM
32 26

  
27
	/**
28
	 * The hadoop service core.
29
	 */
30
	@Autowired
31
	private HadoopServiceCore hadoopServiceCore;
32

  
33 33
	@Override
34 34
	protected void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException {
35
		try {
36
			final ClusterName clusterName = ClusterName.valueOf(job.getParameters().get("cluster"));
37
			log.info("delete request on cluster: " + clusterName.toString());
38
			delete(job.getParameters().get("path"), configurationEnumerator.get(clusterName));
35
		final ClusterName clusterName = ClusterName.valueOf(job.getParameters().get("cluster"));
36
		log.info("deleteFromHBase request on cluster: " + clusterName.toString());
39 37

  
40
			handler.done(job);
41
		} catch (final IOException e) {
42
			throw new HadoopServiceException(e);
43
		}
44
	}
38
		final String path = job.getParameters().get("path");
39
		hadoopServiceCore.deleteFromHdfs(clusterName, path);
45 40

  
46
	private void delete(final String path, final Configuration conf) throws IOException {
47
		if (StringUtils.isBlank(path))
48
			throw new IOException("Cannot delete an empty HDFS path.");
49
		final FileSystem hdfs = FileSystem.get(conf);
50
		final Path absolutePath = new Path(URI.create(conf.get("fs.defaultFS") + path));
51

  
52
		if (hdfs.exists(absolutePath)) {
53
			log.debug("delete path: " + absolutePath.toString());
54
			hdfs.delete(absolutePath, true);
55
			log.info("deleted path: " + absolutePath.toString());
56
		} else {
57
			log.warn("cannot delete unexisting path: " + absolutePath.toString());
58
		}
41
		handler.done(job);
59 42
	}
60 43

  
61 44
}

Also available in: Unified diff