Revision 57443
Added by Claudio Atzori over 4 years ago
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/DedupMarkDeletedEntityMapper.java | ||
---|---|---|
14 | 14 |
import eu.dnetlib.pace.config.DedupConfig; |
15 | 15 |
import org.apache.commons.logging.Log; |
16 | 16 |
import org.apache.commons.logging.LogFactory; |
17 |
import org.apache.hadoop.hbase.client.Durability; |
|
17 | 18 |
import org.apache.hadoop.hbase.client.Put; |
18 | 19 |
import org.apache.hadoop.hbase.client.Result; |
19 | 20 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
... | ... | |
68 | 69 |
oafRoot.getDataInfoBuilder().setDeletedbyinference(true).setInferred(true).setInferenceprovenance(dedupConf.getWf().getConfigurationId()); |
69 | 70 |
final byte[] family = Bytes.toBytes(type); |
70 | 71 |
final Put put = new Put(row).add(family, DedupUtils.BODY_B, oafRoot.build().toByteArray()); |
71 |
put.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
72 |
put.setDurability(Durability.USE_DEFAULT);
|
|
72 | 73 |
context.write(new ImmutableBytesWritable(row), put); |
73 | 74 |
context.getCounter(type, "bodies marked deleted").increment(1); |
74 | 75 |
} |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/cc/ConnectedComponentsReducer.java | ||
---|---|---|
66 | 66 |
|
67 | 67 |
private void emitDedupRel(final Context context, final byte[] cf, final byte[] from, final byte[] to, final byte[] value) { |
68 | 68 |
final Put put = new Put(from).add(cf, to, value); |
69 |
put.setDurability(Durability.SKIP_WAL);
|
|
69 |
put.setDurability(Durability.USE_DEFAULT);
|
|
70 | 70 |
try { |
71 | 71 |
context.write(new ImmutableBytesWritable(from), put); |
72 | 72 |
} catch (IOException | InterruptedException e) { |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/DedupGrouperMapper.java | ||
---|---|---|
5 | 5 |
import java.util.List; |
6 | 6 |
|
7 | 7 |
import org.apache.hadoop.hbase.KeyValue; |
8 |
import org.apache.hadoop.hbase.client.Durability; |
|
8 | 9 |
import org.apache.hadoop.hbase.client.Put; |
9 | 10 |
import org.apache.hadoop.hbase.client.Result; |
10 | 11 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
... | ... | |
52 | 53 |
if (!Arrays.equals(nq, jq)) { |
53 | 54 |
|
54 | 55 |
final Put put = new Put(nq).add(family, jq, cellValue); |
55 |
put.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
56 |
put.setDurability(Durability.USE_DEFAULT);
|
|
56 | 57 |
rowKey.set(nq); |
57 | 58 |
context.write(rowKey, put); |
58 | 59 |
|
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/SimpleDedupPersonReducer.java | ||
---|---|---|
19 | 19 |
import eu.dnetlib.data.transform.xml.AbstractDNetXsltFunctions; |
20 | 20 |
import eu.dnetlib.pace.config.DedupConfig; |
21 | 21 |
import org.apache.commons.lang.StringUtils; |
22 |
import org.apache.hadoop.hbase.client.Durability; |
|
22 | 23 |
import org.apache.hadoop.hbase.client.Put; |
23 | 24 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
24 | 25 |
import org.apache.hadoop.hbase.mapreduce.TableReducer; |
... | ... | |
122 | 123 |
InterruptedException { |
123 | 124 |
|
124 | 125 |
final Put put = new Put(OafRowKeyDecoder.decode(rowkey).getKey().getBytes()); |
125 |
put.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
126 |
put.setDurability(Durability.USE_DEFAULT);
|
|
126 | 127 |
put.add(Bytes.toBytes(family), qualifier, value); |
127 | 128 |
|
128 | 129 |
context.write(new ImmutableBytesWritable(rowkey), put); |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/DedupBuildRootsReducer.java | ||
---|---|---|
118 | 118 |
private void emit(final String msg, final Context context, final byte[] rowkey, final String family, final String qualifier, final byte[] value, final String label) { |
119 | 119 |
|
120 | 120 |
final Put put = new Put(rowkey).add(Bytes.toBytes(family), Bytes.toBytes(qualifier), value); |
121 |
put.setDurability(Durability.SKIP_WAL);
|
|
121 |
put.setDurability(Durability.USE_DEFAULT);
|
|
122 | 122 |
|
123 | 123 |
try { |
124 | 124 |
context.write(new ImmutableBytesWritable(rowkey), put); |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/FindDedupCandidatePersonsReducer.java | ||
---|---|---|
8 | 8 |
import java.util.Set; |
9 | 9 |
|
10 | 10 |
import eu.dnetlib.data.mapreduce.JobParams; |
11 |
import org.apache.hadoop.hbase.client.Durability; |
|
11 | 12 |
import org.apache.hadoop.hbase.client.Put; |
12 | 13 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
13 | 14 |
import org.apache.hadoop.hbase.mapreduce.TableReducer; |
... | ... | |
105 | 106 |
for (final String s : resultIds.get(personId)) { |
106 | 107 |
final byte[] id = Bytes.toBytes(s); |
107 | 108 |
final Put put = new Put(id).add(cf, col, val); |
108 |
put.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
109 |
put.setDurability(Durability.USE_DEFAULT);
|
|
109 | 110 |
context.write(new ImmutableBytesWritable(id), put); |
110 | 111 |
} |
111 | 112 |
context.getCounter(getClass().getSimpleName(), "N. Put. (results)").increment(resultIds.get(personId).size()); |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/DedupDeleteRelMapper.java | ||
---|---|---|
8 | 8 |
import eu.dnetlib.data.proto.TypeProtos.Type; |
9 | 9 |
import eu.dnetlib.pace.config.DedupConfig; |
10 | 10 |
import org.apache.hadoop.hbase.client.Delete; |
11 |
import org.apache.hadoop.hbase.client.Durability; |
|
11 | 12 |
import org.apache.hadoop.hbase.client.Result; |
12 | 13 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
13 | 14 |
import org.apache.hadoop.hbase.mapreduce.TableMapper; |
... | ... | |
47 | 48 |
|
48 | 49 |
final byte[] row = rowkey.copyBytes(); |
49 | 50 |
final Delete delete = new Delete(row); |
50 |
delete.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
51 |
delete.setDurability(Durability.USE_DEFAULT);
|
|
51 | 52 |
|
52 | 53 |
delete.deleteFamily(cf); |
53 | 54 |
|
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dedup/DedupFindRootsMapper.java | ||
---|---|---|
14 | 14 |
import eu.dnetlib.data.proto.TypeProtos.Type; |
15 | 15 |
import eu.dnetlib.data.transform.xml.AbstractDNetXsltFunctions; |
16 | 16 |
import eu.dnetlib.pace.config.DedupConfig; |
17 |
import org.apache.hadoop.hbase.client.Durability; |
|
17 | 18 |
import org.apache.hadoop.hbase.client.Put; |
18 | 19 |
import org.apache.hadoop.hbase.client.Result; |
19 | 20 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
... | ... | |
83 | 84 |
oafRoot.getDataInfoBuilder().setDeletedbyinference(true).setInferred(true).setInferenceprovenance(dedupConf.getWf().getConfigurationId()); |
84 | 85 |
final byte[] family = Bytes.toBytes(dedupConf.getWf().getEntityType()); |
85 | 86 |
final Put put = new Put(row).add(family, DedupUtils.BODY_B, oafRoot.build().toByteArray()); |
86 |
put.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
87 |
put.setDurability(Durability.USE_DEFAULT);
|
|
87 | 88 |
context.write(new ImmutableBytesWritable(row), put); |
88 | 89 |
context.getCounter(dedupConf.getWf().getEntityType(), "bodies marked deleted").increment(1); |
89 | 90 |
} |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/actions2/PromoteActionSetFromHDFSMapper.java | ||
---|---|---|
4 | 4 |
|
5 | 5 |
import eu.dnetlib.actionmanager.actions.AtomicAction; |
6 | 6 |
import eu.dnetlib.data.mapreduce.JobParams; |
7 |
import org.apache.hadoop.hbase.client.Durability; |
|
7 | 8 |
import org.apache.hadoop.hbase.client.Put; |
8 | 9 |
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
9 | 10 |
import org.apache.hadoop.hbase.util.Bytes; |
... | ... | |
31 | 32 |
final byte[] keyOutBytes = Bytes.toBytes(aa.getTargetRowKey()); |
32 | 33 |
keyOut.set(keyOutBytes); |
33 | 34 |
final Put put = new Put(keyOutBytes); |
34 |
put.setWriteToWAL(JobParams.WRITE_TO_WAL);
|
|
35 |
put.setDurability(Durability.USE_DEFAULT);
|
|
35 | 36 |
put.add(Bytes.toBytes(aa.getTargetColumnFamily()), Bytes.toBytes(aa.getTargetColumn()), aa.getTargetValue()); |
36 | 37 |
|
37 | 38 |
context.write(keyOut, put); |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/hbase/dataimport/ImportRecordsMapper.java | ||
---|---|---|
60 | 60 |
|
61 | 61 |
final byte[] rowKey = Bytes.toBytes(row.getKey()); |
62 | 62 |
final Put put = new Put(rowKey); |
63 |
put.setDurability(Durability.SKIP_WAL);
|
|
63 |
put.setDurability(Durability.USE_DEFAULT);
|
|
64 | 64 |
|
65 | 65 |
for (final Column<String, byte[]> col : row) { |
66 | 66 |
final byte[] family = Bytes.toBytes(row.getColumnFamily()); |
modules/dnet-mapreduce-jobs/trunk/src/main/java/eu/dnetlib/data/mapreduce/util/OafHbaseUtils.java | ||
---|---|---|
182 | 182 |
|
183 | 183 |
private static Put getPut(final String rowkey) { |
184 | 184 |
final Put put = new Put(Bytes.toBytes(rowkey)); |
185 |
put.setDurability(Durability.SKIP_WAL);
|
|
185 |
put.setDurability(Durability.USE_DEFAULT);
|
|
186 | 186 |
return put; |
187 | 187 |
} |
188 | 188 |
|
Also available in: Unified diff
put operations WAS SYNC