Search in sources :

Example 1 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMetadataTable method createTopic.

@Override
public void createTopic(TopicMetadata topicMetadata) throws TopicAlreadyExistsException, IOException {
    TopicId topicId = topicMetadata.getTopicId();
    byte[] rowKey = MessagingUtils.toMetadataRowKey(topicId);
    PutBuilder putBuilder = tableUtil.buildPut(rowKey);
    Get get = tableUtil.buildGet(rowKey).addFamily(columnFamily).build();
    try {
        boolean completed = false;
        while (!completed) {
            Result result = hTable.get(get);
            byte[] value = result.getValue(columnFamily, COL);
            if (value == null) {
                TreeMap<String, String> properties = new TreeMap<>(topicMetadata.getProperties());
                properties.put(TopicMetadata.GENERATION_KEY, MessagingUtils.Constants.DEFAULT_GENERATION);
                putBuilder.add(columnFamily, COL, Bytes.toBytes(GSON.toJson(properties, MAP_TYPE)));
                completed = hTable.checkAndPut(rowKey, columnFamily, COL, null, putBuilder.build());
            } else {
                Map<String, String> properties = GSON.fromJson(Bytes.toString(value), MAP_TYPE);
                TopicMetadata metadata = new TopicMetadata(topicId, properties);
                if (metadata.exists()) {
                    throw new TopicAlreadyExistsException(topicId.getNamespace(), topicId.getTopic());
                }
                int newGenerationId = (metadata.getGeneration() * -1) + 1;
                TreeMap<String, String> newProperties = new TreeMap<>(properties);
                newProperties.put(TopicMetadata.GENERATION_KEY, Integer.toString(newGenerationId));
                putBuilder.add(columnFamily, COL, Bytes.toBytes(GSON.toJson(newProperties, MAP_TYPE)));
                completed = hTable.checkAndPut(rowKey, columnFamily, COL, value, putBuilder.build());
            }
        }
    } catch (IOException e) {
        throw exceptionHandler.handle(e);
    }
}
Also used : IOException(java.io.IOException) TreeMap(java.util.TreeMap) TopicAlreadyExistsException(co.cask.cdap.api.messaging.TopicAlreadyExistsException) Result(org.apache.hadoop.hbase.client.Result) TopicMetadata(co.cask.cdap.messaging.TopicMetadata) PutBuilder(co.cask.cdap.data2.util.hbase.PutBuilder) Get(org.apache.hadoop.hbase.client.Get) TopicId(co.cask.cdap.proto.id.TopicId)

Example 2 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMessageTable method rollback.

@Override
public void rollback(byte[] startKey, byte[] stopKey, byte[] txWritePtr) throws IOException {
    Scan scan = tableUtil.buildScan().setStartRow(startKey).setStopRow(stopKey).setCaching(scanCacheRows).build();
    List<Put> batchPuts = new ArrayList<>();
    try (ResultScanner scanner = DistributedScanner.create(hTable, scan, rowKeyDistributor, scanExecutor)) {
        for (Result result : scanner) {
            // No need to turn the key back to the original row key because we want to put with the actual row key
            PutBuilder putBuilder = tableUtil.buildPut(result.getRow());
            putBuilder.add(columnFamily, TX_COL, txWritePtr);
            batchPuts.add(putBuilder.build());
        }
    }
    try {
        if (!batchPuts.isEmpty()) {
            hTable.put(batchPuts);
            if (!hTable.isAutoFlush()) {
                hTable.flushCommits();
            }
        }
    } catch (IOException e) {
        throw exceptionHandler.handle(e);
    }
}
Also used : PutBuilder(co.cask.cdap.data2.util.hbase.PutBuilder) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result)

Example 3 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMetricsTable method put.

@Override
public void put(SortedMap<byte[], ? extends SortedMap<byte[], Long>> updates) {
    List<Put> puts = Lists.newArrayList();
    for (Map.Entry<byte[], ? extends SortedMap<byte[], Long>> row : updates.entrySet()) {
        byte[] distributedKey = createDistributedRowKey(row.getKey());
        PutBuilder put = tableUtil.buildPut(distributedKey);
        for (Map.Entry<byte[], Long> column : row.getValue().entrySet()) {
            put.add(columnFamily, column.getKey(), Bytes.toBytes(column.getValue()));
        }
        puts.add(put.build());
    }
    try {
        hTable.put(puts);
        hTable.flushCommits();
    } catch (IOException e) {
        throw new DataSetException("Put failed on table " + tableId, e);
    }
}
Also used : PutBuilder(co.cask.cdap.data2.util.hbase.PutBuilder) DataSetException(co.cask.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap) Put(org.apache.hadoop.hbase.client.Put)

Example 4 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseMetricsTable method putBytes.

@Override
public void putBytes(SortedMap<byte[], ? extends SortedMap<byte[], byte[]>> updates) {
    List<Put> puts = Lists.newArrayList();
    for (Map.Entry<byte[], ? extends SortedMap<byte[], byte[]>> row : updates.entrySet()) {
        byte[] distributedKey = createDistributedRowKey(row.getKey());
        PutBuilder put = tableUtil.buildPut(distributedKey);
        for (Map.Entry<byte[], byte[]> column : row.getValue().entrySet()) {
            put.add(columnFamily, column.getKey(), column.getValue());
        }
        puts.add(put.build());
    }
    try {
        hTable.put(puts);
        hTable.flushCommits();
    } catch (IOException e) {
        throw new DataSetException("Put failed on table " + tableId, e);
    }
}
Also used : PutBuilder(co.cask.cdap.data2.util.hbase.PutBuilder) DataSetException(co.cask.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap) Put(org.apache.hadoop.hbase.client.Put)

Example 5 with PutBuilder

use of io.cdap.cdap.data2.util.hbase.PutBuilder in project cdap by caskdata.

the class HBaseTable method persist.

@Override
protected void persist(NavigableMap<byte[], NavigableMap<byte[], Update>> updates) throws Exception {
    if (updates.isEmpty()) {
        return;
    }
    byte[] txId = tx == null ? null : Bytes.toBytes(tx.getTransactionId());
    byte[] txWritePointer = tx == null ? null : Bytes.toBytes(tx.getWritePointer());
    List<Mutation> mutations = new ArrayList<>();
    for (Map.Entry<byte[], NavigableMap<byte[], Update>> row : updates.entrySet()) {
        // create these only when they are needed
        PutBuilder put = null;
        PutBuilder incrementPut = null;
        IncrementBuilder increment = null;
        for (Map.Entry<byte[], Update> column : row.getValue().entrySet()) {
            // we want support tx and non-tx modes
            if (tx != null) {
                // TODO: hijacking timestamp... bad
                Update val = column.getValue();
                if (val instanceof IncrementValue) {
                    if (safeReadlessIncrements) {
                        increment = getIncrement(increment, row.getKey(), txId, txWritePointer);
                        increment.add(columnFamily, column.getKey(), tx.getWritePointer(), ((IncrementValue) val).getValue());
                    } else {
                        incrementPut = getPutForIncrement(incrementPut, row.getKey(), txId);
                        incrementPut.add(columnFamily, column.getKey(), tx.getWritePointer(), Bytes.toBytes(((IncrementValue) val).getValue()));
                    }
                } else if (val instanceof PutValue) {
                    put = getPut(put, row.getKey(), txId);
                    put.add(columnFamily, column.getKey(), tx.getWritePointer(), wrapDeleteIfNeeded(((PutValue) val).getValue()));
                }
            } else {
                Update val = column.getValue();
                if (val instanceof IncrementValue) {
                    incrementPut = getPutForIncrement(incrementPut, row.getKey(), txId);
                    incrementPut.add(columnFamily, column.getKey(), Bytes.toBytes(((IncrementValue) val).getValue()));
                } else if (val instanceof PutValue) {
                    put = getPut(put, row.getKey(), txId);
                    put.add(columnFamily, column.getKey(), ((PutValue) val).getValue());
                }
            }
        }
        if (incrementPut != null) {
            mutations.add(incrementPut.build());
        }
        if (increment != null) {
            mutations.add(increment.build());
        }
        if (put != null) {
            mutations.add(put.build());
        }
    }
    if (!hbaseFlush(mutations)) {
        LOG.info("No writes to persist!");
    }
}
Also used : NavigableMap(java.util.NavigableMap) ArrayList(java.util.ArrayList) Update(co.cask.cdap.data2.dataset2.lib.table.Update) IncrementValue(co.cask.cdap.data2.dataset2.lib.table.IncrementValue) PutValue(co.cask.cdap.data2.dataset2.lib.table.PutValue) PutBuilder(co.cask.cdap.data2.util.hbase.PutBuilder) IncrementBuilder(co.cask.cdap.data2.util.hbase.IncrementBuilder) Mutation(org.apache.hadoop.hbase.client.Mutation) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) NavigableMap(java.util.NavigableMap)

Aggregations

IOException (java.io.IOException)10 Put (org.apache.hadoop.hbase.client.Put)8 PutBuilder (co.cask.cdap.data2.util.hbase.PutBuilder)6 PutBuilder (io.cdap.cdap.data2.util.hbase.PutBuilder)6 ArrayList (java.util.ArrayList)6 Map (java.util.Map)6 NavigableMap (java.util.NavigableMap)6 SortedMap (java.util.SortedMap)4 Result (org.apache.hadoop.hbase.client.Result)4 DataSetException (co.cask.cdap.api.dataset.DataSetException)2 DataSetException (io.cdap.cdap.api.dataset.DataSetException)2 TreeMap (java.util.TreeMap)2 Get (org.apache.hadoop.hbase.client.Get)2 Mutation (org.apache.hadoop.hbase.client.Mutation)2 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)2 Scan (org.apache.hadoop.hbase.client.Scan)2 TopicAlreadyExistsException (co.cask.cdap.api.messaging.TopicAlreadyExistsException)1 IncrementValue (co.cask.cdap.data2.dataset2.lib.table.IncrementValue)1 PutValue (co.cask.cdap.data2.dataset2.lib.table.PutValue)1 Update (co.cask.cdap.data2.dataset2.lib.table.Update)1