1
|
package eu.dnetlib.data.hadoop.hbase;
|
2
|
|
3
|
import java.util.List;
|
4
|
|
5
|
import org.apache.commons.logging.Log;
|
6
|
import org.apache.commons.logging.LogFactory;
|
7
|
import org.apache.hadoop.hbase.client.Delete;
|
8
|
import org.apache.hadoop.hbase.client.Mutation;
|
9
|
import org.apache.hadoop.hbase.util.Bytes;
|
10
|
|
11
|
import eu.dnetlib.data.transform.Column;
|
12
|
import eu.dnetlib.data.transform.Row;
|
13
|
|
14
|
/**
|
15
|
* The Class HBaseDeleteFeeder performs a batch of Delete operations.
|
16
|
*/
|
17
|
public class HBaseDeleteFeeder extends HbaseTableFeeder {
|
18
|
|
19
|
/**
|
20
|
* Logger.
|
21
|
*/
|
22
|
private static final Log log = LogFactory.getLog(HBaseDeleteFeeder.class); // NOPMD by marko on 11/24/08 5:02 PM
|
23
|
|
24
|
/*
|
25
|
* (non-Javadoc)
|
26
|
*
|
27
|
* @see eu.dnetlib.data.hadoop.hbase.HbaseTableFeeder#addOperation(java.util.List, eu.dnetlib.data.transform.Row)
|
28
|
*/
|
29
|
@Override
|
30
|
protected void addOperation(final List<Mutation> buffer, final Row row) {
|
31
|
final Delete delete = new Delete(Bytes.toBytes(row.getKey()));
|
32
|
delete.setWriteToWAL(true);
|
33
|
|
34
|
for (final Column<String, byte[]> col : row) {
|
35
|
log.debug(String.format("deleting K: '%s' CF:'%s' Q:'%s'", row.getKey(), row.getColumnFamily(), col.getName()));
|
36
|
delete.deleteColumns(Bytes.toBytes(row.getColumnFamily()), Bytes.toBytes(col.getName()));
|
37
|
}
|
38
|
|
39
|
buffer.add(delete);
|
40
|
}
|
41
|
|
42
|
}
|