1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.omid.transaction;
19
20 import java.io.IOException;
21 import java.util.ArrayList;
22 import java.util.HashMap;
23 import java.util.HashSet;
24 import java.util.List;
25 import java.util.Map;
26 import java.util.Set;
27
28 import org.apache.hadoop.hbase.client.Delete;
29 import org.apache.hadoop.hbase.client.Mutation;
30 import org.apache.hadoop.hbase.client.Table;
31 import org.slf4j.Logger;
32 import org.slf4j.LoggerFactory;
33
34 public class HBaseTransaction extends AbstractTransaction<HBaseCellId> {
35 private static final Logger LOG = LoggerFactory.getLogger(HBaseTransaction.class);
36 static final int MAX_DELETE_BATCH_SIZE = 1000;
37
38 public HBaseTransaction(long transactionId, long epoch, Set<HBaseCellId> writeSet,
39 Set<HBaseCellId> conflictFreeWriteSet, AbstractTransactionManager tm, boolean isLowLatency) {
40 super(transactionId, epoch, writeSet, conflictFreeWriteSet, tm, isLowLatency);
41 }
42
43 public HBaseTransaction(long transactionId, long epoch, Set<HBaseCellId> writeSet,
44 Set<HBaseCellId> conflictFreeWriteSet, AbstractTransactionManager tm,
45 long readTimestamp, long writeTimestamp, boolean isLowLatency) {
46 super(transactionId, epoch, writeSet, conflictFreeWriteSet, tm, readTimestamp, writeTimestamp, isLowLatency);
47 }
48
49 public HBaseTransaction(long transactionId, long readTimestamp, VisibilityLevel visibilityLevel, long epoch,
50 Set<HBaseCellId> writeSet, Set<HBaseCellId> conflictFreeWriteSet,
51 AbstractTransactionManager tm, boolean isLowLatency) {
52 super(transactionId, readTimestamp, visibilityLevel, epoch, writeSet, conflictFreeWriteSet, tm, isLowLatency);
53 }
54
55
56 private void flushMutations(Table table, List<Mutation> mutations) throws IOException, InterruptedException {
57 table.batch(mutations, new Object[mutations.size()]);
58 }
59
60 private void deleteCell(HBaseCellId cell, Map<Table,List<Mutation>> mutations) throws IOException, InterruptedException {
61
62 Delete delete = new Delete(cell.getRow());
63 delete.addColumn(cell.getFamily(), cell.getQualifier(), cell.getTimestamp());
64
65 Table table = cell.getTable().getHTable();
66 List<Mutation> tableMutations = mutations.get(table);
67 if (tableMutations == null) {
68 ArrayList<Mutation> newList = new ArrayList<>();
69 newList.add(delete);
70 mutations.put(table, newList);
71 } else {
72 tableMutations.add(delete);
73 if (tableMutations.size() > MAX_DELETE_BATCH_SIZE) {
74 flushMutations(table, tableMutations);
75 mutations.remove(table);
76 }
77 }
78 }
79
80 @Override
81 public void cleanup() {
82
83 Map<Table,List<Mutation>> mutations = new HashMap<>();
84
85 try {
86 for (final HBaseCellId cell : getWriteSet()) {
87 deleteCell(cell, mutations);
88 }
89
90 for (final HBaseCellId cell : getConflictFreeWriteSet()) {
91 deleteCell(cell, mutations);
92 }
93
94 for (Map.Entry<Table,List<Mutation>> entry: mutations.entrySet()) {
95 flushMutations(entry.getKey(), entry.getValue());
96 }
97
98 } catch (InterruptedException | IOException e) {
99 LOG.warn("Failed cleanup for Tx {}. This issue has been ignored", getTransactionId(), e);
100 }
101 }
102
103
104
105
106
107 public void flushTables() throws IOException {
108
109 for (TTable writtenTable : getWrittenTables()) {
110 writtenTable.flushCommits();
111 }
112
113 }
114
115
116
117
118
119 private Set<TTable> getWrittenTables() {
120 HashSet<HBaseCellId> writeSet = (HashSet<HBaseCellId>) getWriteSet();
121 Set<TTable> tables = new HashSet<TTable>();
122 for (HBaseCellId cell : writeSet) {
123 tables.add(cell.getTable());
124 }
125 writeSet = (HashSet<HBaseCellId>) getConflictFreeWriteSet();
126 for (HBaseCellId cell : writeSet) {
127 tables.add(cell.getTable());
128 }
129 return tables;
130 }
131
132 }