1
|
package eu.dnetlib.data.mapreduce.hbase.statsExport.utils;
|
2
|
|
3
|
import java.io.BufferedWriter;
|
4
|
import java.io.OutputStreamWriter;
|
5
|
import java.text.SimpleDateFormat;
|
6
|
import java.util.ArrayList;
|
7
|
import java.util.Date;
|
8
|
import java.util.List;
|
9
|
|
10
|
import org.apache.hadoop.conf.Configuration;
|
11
|
import org.apache.hadoop.fs.FileSystem;
|
12
|
import org.apache.hadoop.fs.Path;
|
13
|
import org.apache.log4j.Logger;
|
14
|
|
15
|
public class HdfsWriter {
|
16
|
|
17
|
|
18
|
public static void write(String data, String filename) throws Exception {
|
19
|
|
20
|
Configuration conf = new Configuration();
|
21
|
|
22
|
FileSystem hdfs = FileSystem.get(conf);
|
23
|
|
24
|
try {
|
25
|
|
26
|
|
27
|
Path exportPath = new Path(hdfs.getUri() + filename);
|
28
|
BufferedWriter br = new BufferedWriter(new OutputStreamWriter(hdfs.create(exportPath, false)));
|
29
|
// TO append data to a file, use fs.append(Path f)
|
30
|
|
31
|
br.write(data);
|
32
|
br.close();
|
33
|
|
34
|
} catch (Exception e) {
|
35
|
|
36
|
|
37
|
throw new Exception("Error while writing file ", e);
|
38
|
}
|
39
|
|
40
|
}
|
41
|
|
42
|
|
43
|
}
|