Search in sources :

Example 11 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMessageTable method persist.

@Override
protected void persist(Iterator<RawMessageTableEntry> entries) throws IOException {
    List<Put> batchPuts = new ArrayList<>();
    while (entries.hasNext()) {
        RawMessageTableEntry entry = entries.next();
        PutBuilder putBuilder = tableUtil.buildPut(rowKeyDistributor.getDistributedKey(entry.getKey().getRowKey()));
        if (entry.getTxPtr() != null) {
            putBuilder.add(columnFamily, TX_COL, entry.getTxPtr());
        }
        if (entry.getPayload() != null) {
            putBuilder.add(columnFamily, PAYLOAD_COL, entry.getPayload());
        }
        batchPuts.add(putBuilder.build());
    }
    try {
        if (!batchPuts.isEmpty()) {
            mutator.mutate(batchPuts);
            mutator.flush();
        }
    } catch (IOException e) {
        throw exceptionHandler.handle(e);
    }
}
Also used : PutBuilder(io.cdap.cdap.data2.util.hbase.PutBuilder) ArrayList(java.util.ArrayList) RawMessageTableEntry(io.cdap.cdap.messaging.store.RawMessageTableEntry) IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put)

Example 12 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMessageTable method rollback.

@Override
public void rollback(RollbackRequest rollbackRequest) throws IOException {
    Scan scan = tableUtil.buildScan().setStartRow(rollbackRequest.getStartRow()).setStopRow(rollbackRequest.getStopRow()).setCaching(scanCacheRows).build();
    List<Put> batchPuts = new ArrayList<>();
    try (ResultScanner scanner = DistributedScanner.create(table, scan, rowKeyDistributor, scanExecutor)) {
        for (Result result : scanner) {
            // No need to turn the key back to the original row key because we want to put with the actual row key
            PutBuilder putBuilder = tableUtil.buildPut(result.getRow());
            putBuilder.add(columnFamily, TX_COL, rollbackRequest.getTxWritePointer());
            batchPuts.add(putBuilder.build());
        }
    }
    try {
        if (!batchPuts.isEmpty()) {
            mutator.mutate(batchPuts);
            mutator.flush();
        }
    } catch (IOException e) {
        throw exceptionHandler.handle(e);
    }
}
Also used : PutBuilder(io.cdap.cdap.data2.util.hbase.PutBuilder) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result)

Example 13 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMetricsTable method put.

@Override
public void put(SortedMap<byte[], ? extends SortedMap<byte[], Long>> updates) {
    List<Put> puts = Lists.newArrayList();
    for (Map.Entry<byte[], ? extends SortedMap<byte[], Long>> row : updates.entrySet()) {
        byte[] distributedKey = createDistributedRowKey(row.getKey());
        PutBuilder put = tableUtil.buildPut(distributedKey);
        for (Map.Entry<byte[], Long> column : row.getValue().entrySet()) {
            put.add(columnFamily, column.getKey(), Bytes.toBytes(column.getValue()));
        }
        puts.add(put.build());
    }
    try {
        mutator.mutate(puts);
        mutator.flush();
    } catch (IOException e) {
        throw new DataSetException("Put failed on table " + tableId, e);
    }
}
Also used : PutBuilder(io.cdap.cdap.data2.util.hbase.PutBuilder) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap) Put(org.apache.hadoop.hbase.client.Put)

Example 14 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by cdapio.

the class HBaseMetricsTable method put.

@Override
public void put(SortedMap<byte[], ? extends SortedMap<byte[], Long>> updates) {
    List<Put> puts = Lists.newArrayList();
    for (Map.Entry<byte[], ? extends SortedMap<byte[], Long>> row : updates.entrySet()) {
        byte[] distributedKey = createDistributedRowKey(row.getKey());
        PutBuilder put = tableUtil.buildPut(distributedKey);
        for (Map.Entry<byte[], Long> column : row.getValue().entrySet()) {
            put.add(columnFamily, column.getKey(), Bytes.toBytes(column.getValue()));
        }
        puts.add(put.build());
    }
    try {
        mutator.mutate(puts);
        mutator.flush();
    } catch (IOException e) {
        throw new DataSetException("Put failed on table " + tableId, e);
    }
}
Also used : PutBuilder(io.cdap.cdap.data2.util.hbase.PutBuilder) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap) Put(org.apache.hadoop.hbase.client.Put)

Example 15 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by cdapio.

the class HBaseTable method persist.

@Override
protected void persist(NavigableMap<byte[], NavigableMap<byte[], Update>> updates) throws Exception {
    if (updates.isEmpty()) {
        return;
    }
    byte[] txId = tx == null ? null : Bytes.toBytes(tx.getTransactionId());
    byte[] txWritePointer = tx == null ? null : Bytes.toBytes(tx.getWritePointer());
    List<Mutation> mutations = new ArrayList<>();
    List<Increment> increments = new ArrayList<>();
    for (Map.Entry<byte[], NavigableMap<byte[], Update>> row : updates.entrySet()) {
        // create these only when they are needed
        PutBuilder put = null;
        PutBuilder incrementPut = null;
        IncrementBuilder increment = null;
        for (Map.Entry<byte[], Update> column : row.getValue().entrySet()) {
            // we want support tx and non-tx modes
            if (tx != null) {
                // TODO: hijacking timestamp... bad
                Update val = column.getValue();
                if (val instanceof IncrementValue) {
                    if (safeReadlessIncrements) {
                        increment = getIncrement(increment, row.getKey(), txId, txWritePointer);
                        increment.add(columnFamily, column.getKey(), tx.getWritePointer(), ((IncrementValue) val).getValue());
                    } else {
                        incrementPut = getPutForIncrement(incrementPut, row.getKey(), txId);
                        incrementPut.add(columnFamily, column.getKey(), tx.getWritePointer(), Bytes.toBytes(((IncrementValue) val).getValue()));
                    }
                } else if (val instanceof PutValue) {
                    put = getPut(put, row.getKey(), txId);
                    put.add(columnFamily, column.getKey(), tx.getWritePointer(), wrapDeleteIfNeeded(((PutValue) val).getValue()));
                }
            } else {
                Update val = column.getValue();
                if (val instanceof IncrementValue) {
                    incrementPut = getPutForIncrement(incrementPut, row.getKey(), txId);
                    incrementPut.add(columnFamily, column.getKey(), Bytes.toBytes(((IncrementValue) val).getValue()));
                } else if (val instanceof PutValue) {
                    put = getPut(put, row.getKey(), txId);
                    put.add(columnFamily, column.getKey(), ((PutValue) val).getValue());
                }
            }
        }
        if (incrementPut != null) {
            mutations.add(incrementPut.build());
        }
        if (increment != null) {
            increments.add(increment.build());
        }
        if (put != null) {
            mutations.add(put.build());
        }
    }
    if (!hbaseFlush(mutations) && increments.isEmpty()) {
        LOG.info("No writes to persist!");
    }
    if (!increments.isEmpty()) {
        table.batch(increments, new Object[increments.size()]);
    }
}
Also used : NavigableMap(java.util.NavigableMap) ArrayList(java.util.ArrayList) Update(io.cdap.cdap.data2.dataset2.lib.table.Update) IncrementValue(io.cdap.cdap.data2.dataset2.lib.table.IncrementValue) PutValue(io.cdap.cdap.data2.dataset2.lib.table.PutValue) PutBuilder(io.cdap.cdap.data2.util.hbase.PutBuilder) IncrementBuilder(io.cdap.cdap.data2.util.hbase.IncrementBuilder) Increment(org.apache.hadoop.hbase.client.Increment) Mutation(org.apache.hadoop.hbase.client.Mutation) Map(java.util.Map) NavigableMap(java.util.NavigableMap)

Aggregations

IOException (java.io.IOException)15 PutBuilder (io.cdap.cdap.data2.util.hbase.PutBuilder)12 Put (org.apache.hadoop.hbase.client.Put)12 ArrayList (java.util.ArrayList)9 Map (java.util.Map)9 NavigableMap (java.util.NavigableMap)9 PutBuilder (co.cask.cdap.data2.util.hbase.PutBuilder)6 SortedMap (java.util.SortedMap)6 Result (org.apache.hadoop.hbase.client.Result)6 DataSetException (io.cdap.cdap.api.dataset.DataSetException)4 TreeMap (java.util.TreeMap)3 Get (org.apache.hadoop.hbase.client.Get)3 Mutation (org.apache.hadoop.hbase.client.Mutation)3 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)3 Scan (org.apache.hadoop.hbase.client.Scan)3 DataSetException (co.cask.cdap.api.dataset.DataSetException)2 TopicAlreadyExistsException (io.cdap.cdap.api.messaging.TopicAlreadyExistsException)2 IncrementValue (io.cdap.cdap.data2.dataset2.lib.table.IncrementValue)2 PutValue (io.cdap.cdap.data2.dataset2.lib.table.PutValue)2 Update (io.cdap.cdap.data2.dataset2.lib.table.Update)2