Project

General

Profile

« Previous | Next » 

Revision 33106

cleanup, more logging

View differences:

ImportEprHdfsAction.java
1 1
package eu.dnetlib.data.hadoop.action;
2 2

  
3
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException;
3
import java.io.IOException;
4

  
5
import org.apache.commons.lang.StringUtils;
4 6
import org.apache.commons.logging.Log;
5 7
import org.apache.commons.logging.LogFactory;
6 8
import org.springframework.beans.factory.annotation.Required;
7 9

  
8 10
import eu.dnetlib.data.hadoop.config.ClusterName;
11
import eu.dnetlib.data.hadoop.rmi.HadoopServiceException;
9 12
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
10 13
import eu.dnetlib.enabling.tools.blackboard.BlackboardServerHandler;
11 14
import eu.dnetlib.miscutils.functional.xml.DnetXsltFunctions;
12 15

  
13
import java.io.IOException;
14

  
15 16
public class ImportEprHdfsAction extends AbstractHadoopAction {
16 17

  
17 18
	private static final Log log = LogFactory.getLog(ImportEprHdfsAction.class); // NOPMD by marko on 11/24/08 5:02 PM
......
21 22
	@Override
22 23
	public void executeAsync(final BlackboardServerHandler handler, final BlackboardJob job) throws HadoopServiceException {
23 24

  
24
        final String epr = DnetXsltFunctions.decodeBase64(job.getParameters().get("input_epr"));
25
        final ClusterName clusterName = ClusterName.valueOf(job.getParameters().get("cluster"));
26
        final String path = job.getParameters().get("path");
25
		final String epr = DnetXsltFunctions.decodeBase64(job.getParameters().get("input_epr"));
26
		final ClusterName clusterName = ClusterName.valueOf(job.getParameters().get("cluster"));
27
		final String path = job.getParameters().get("path");
27 28

  
28
        log.info("Starting import in hdfs sequence file: " + path);
29

  
30
        try {
29
		log.info(String.format("Starting import in hdfs sequence file '%s', cluster '%s', epr '%s...", path, clusterName.toString(),
30
				StringUtils.substring(epr, 0, 20)));
31
		try {
31 32
			Integer count = getSequenceFileFeeder().feed(epr, clusterName, path);
33
			if (count != null) {
34
				log.info("Import completed successfully");
35
				job.getParameters().put("count", String.valueOf(count));
36
			}
32 37

  
33
            if (count != null) {
34
                log.info("Import completed successfully");
35
                job.getParameters().put("count", String.valueOf(count));
36
            }
37

  
38
            handler.done(job);
38
			handler.done(job);
39 39
		} catch (IOException e) {
40 40
			throw new HadoopServiceException("Import failed", e);
41 41
		}
......
46 46
	}
47 47

  
48 48
	@Required
49
	public void setSequenceFileFeeder(SequenceFileFeeder sequenceFileFeeder) {
49
	public void setSequenceFileFeeder(final SequenceFileFeeder sequenceFileFeeder) {
50 50
		this.sequenceFileFeeder = sequenceFileFeeder;
51 51
	}
52 52

  

Also available in: Unified diff