Search in sources :

Example 6 with Increment

use of org.apache.hadoop.hbase.client.Increment in project pinpoint by naver.

the class SizeLimitedBulkIncrementerTest method multipleTablesConcurrent.

@Test
public void multipleTablesConcurrent() throws Exception {
    // Given
    final int numTables = 50;
    final int numRowIds = 100;
    final int numColumnIds = 20;
    final int maxCallCount = 200;
    List<TestDataSet> testDataSets = BulkIncrementerTestClazz.createRandomTestDataSetList(numTables, numRowIds, numColumnIds, maxCallCount);
    List<TableName> tableNames = new ArrayList<>(numTables);
    for (int i = 0; i < numTables; i++) {
        tableNames.add(TableName.valueOf(i + ""));
    }
    final int maxNumTestDatas = numTables * numRowIds * numColumnIds * maxCallCount;
    List<TestData> testDatas = new ArrayList<>(maxNumTestDatas);
    for (TestDataSet testDataSet : testDataSets) {
        testDatas.addAll(testDataSet.getTestDatas());
    }
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch incrementorLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, incrementorLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, incrementorLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    long actualTotalCount = 0;
    for (TestDataSet testDataSet : testDataSets) {
        TableName expectedTableName = testDataSet.getTableName();
        RowKey expectedRowKey = testDataSet.getRowKey();
        ColumnName expectedColumnName = testDataSet.getColumnName();
        Map<TableName, Map<ByteBuffer, Map<ByteBuffer, Long>>> resultMap = verifier.getResultMap();
        Map<ByteBuffer, Map<ByteBuffer, Long>> rows = resultMap.get(expectedTableName);
        if (rows == null) {
            continue;
        }
        Map<ByteBuffer, Long> keyValues = rows.get(ByteBuffer.wrap(expectedRowKey.getRowKey()));
        if (keyValues == null) {
            continue;
        }
        Long actualCount = keyValues.get(ByteBuffer.wrap(expectedColumnName.getColumnName()));
        if (actualCount == null) {
            continue;
        }
        actualTotalCount += actualCount;
    }
    Assert.assertTrue(actualTotalCount > bulkLimitSize);
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) ArrayList(java.util.ArrayList) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) CountDownLatch(java.util.concurrent.CountDownLatch) ByteBuffer(java.nio.ByteBuffer) TableName(org.apache.hadoop.hbase.TableName) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) Map(java.util.Map) Test(org.junit.Test)

Example 7 with Increment

use of org.apache.hadoop.hbase.client.Increment in project pinpoint by naver.

the class DefaultBulkWriter method flushLink.

@Override
public void flushLink() {
    // update statistics by rowkey and column for now. need to update it by rowkey later.
    Map<TableName, List<Increment>> incrementMap = bulkIncrementer.getIncrements(rowKeyDistributorByHashPrefix);
    for (Map.Entry<TableName, List<Increment>> entry : incrementMap.entrySet()) {
        TableName tableName = entry.getKey();
        List<Increment> increments = entry.getValue();
        if (logger.isDebugEnabled()) {
            logger.debug("flush {} to [{}] Increment:{}", this.getClass().getSimpleName(), tableName, increments.size());
        }
        hbaseTemplate.increment(tableName, increments);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Increment(org.apache.hadoop.hbase.client.Increment) List(java.util.List) Map(java.util.Map)

Example 8 with Increment

use of org.apache.hadoop.hbase.client.Increment in project pinpoint by naver.

the class RowKeyMerge method createBulkIncrement.

public Map<TableName, List<Increment>> createBulkIncrement(Map<RowInfo, Long> data, RowKeyDistributorByHashPrefix rowKeyDistributorByHashPrefix) {
    if (data.isEmpty()) {
        return Collections.emptyMap();
    }
    final Map<TableName, List<Increment>> tableIncrementMap = new HashMap<>();
    final Map<TableName, Map<RowKey, List<ColumnName>>> tableRowKeyMap = mergeRowKeys(data);
    for (Map.Entry<TableName, Map<RowKey, List<ColumnName>>> tableRowKeys : tableRowKeyMap.entrySet()) {
        final TableName tableName = tableRowKeys.getKey();
        final List<Increment> incrementList = new ArrayList<>();
        for (Map.Entry<RowKey, List<ColumnName>> rowKeyEntry : tableRowKeys.getValue().entrySet()) {
            Increment increment = createIncrement(rowKeyEntry, rowKeyDistributorByHashPrefix);
            incrementList.add(increment);
        }
        tableIncrementMap.put(tableName, incrementList);
    }
    return tableIncrementMap;
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TableName(org.apache.hadoop.hbase.TableName) Increment(org.apache.hadoop.hbase.client.Increment) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map)

Example 9 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class RowResource method increment.

/**
 * Validates the input request parameters, parses columns from CellSetModel,
 * and invokes Increment on HTable.
 *
 * @param model instance of CellSetModel
 * @return Response 200 OK, 304 Not modified, 400 Bad request
 */
Response increment(final CellSetModel model) {
    Table table = null;
    Increment increment = null;
    try {
        table = servlet.getTable(tableResource.getName());
        if (model.getRows().size() != 1) {
            servlet.getMetrics().incrementFailedIncrementRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Number of rows specified is not 1." + CRLF).build();
        }
        RowModel rowModel = model.getRows().get(0);
        byte[] key = rowModel.getKey();
        if (key == null) {
            key = rowspec.getRow();
        }
        if (key == null) {
            servlet.getMetrics().incrementFailedIncrementRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF).build();
        }
        increment = new Increment(key);
        increment.setReturnResults(returnResult);
        int i = 0;
        for (CellModel cell : rowModel.getCells()) {
            byte[] col = cell.getColumn();
            if (col == null) {
                try {
                    col = rowspec.getColumns()[i++];
                } catch (ArrayIndexOutOfBoundsException e) {
                    col = null;
                }
            }
            if (col == null) {
                servlet.getMetrics().incrementFailedIncrementRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
            }
            byte[][] parts = CellUtil.parseColumn(col);
            if (parts.length != 2) {
                servlet.getMetrics().incrementFailedIncrementRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF).build();
            }
            increment.addColumn(parts[0], parts[1], Long.parseLong(Bytes.toStringBinary(cell.getValue())));
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("INCREMENT " + increment.toString());
        }
        Result result = table.increment(increment);
        if (returnResult) {
            if (result.isEmpty()) {
                servlet.getMetrics().incrementFailedIncrementRequests(1);
                return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity("Increment return empty." + CRLF).build();
            }
            CellSetModel rModel = new CellSetModel();
            RowModel rRowModel = new RowModel(result.getRow());
            for (Cell cell : result.listCells()) {
                rRowModel.addCell(new CellModel(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cell.getTimestamp(), CellUtil.cloneValue(cell)));
            }
            rModel.addRow(rowModel);
            servlet.getMetrics().incrementSucessfulIncrementRequests(1);
            return Response.ok(rModel).build();
        }
        ResponseBuilder response = Response.ok();
        servlet.getMetrics().incrementSucessfulIncrementRequests(1);
        return response.build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedIncrementRequests(1);
        return processException(e);
    } finally {
        if (table != null)
            try {
                table.close();
            } catch (IOException ioe) {
                LOG.debug("Exception received while closing the table " + table.getName(), ioe);
            }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) IOException(java.io.IOException) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) Increment(org.apache.hadoop.hbase.client.Increment) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) ResponseBuilder(org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder) Cell(org.apache.hadoop.hbase.Cell)

Example 10 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class TestProtobufUtil method testIncrementNoTimestamp.

/**
 * Older clients may not send along a timestamp in the MutationProto. Check that we
 * default correctly.
 */
@Test
public void testIncrementNoTimestamp() throws IOException {
    MutationProto mutation = getIncrementMutation(null);
    Increment increment = ProtobufUtil.toIncrement(mutation, null);
    assertEquals(HConstants.LATEST_TIMESTAMP, increment.getTimestamp());
    increment.getFamilyCellMap().values().forEach(cells -> cells.forEach(cell -> assertEquals(HConstants.LATEST_TIMESTAMP, cell.getTimestamp())));
}
Also used : Any(org.apache.hbase.thirdparty.com.google.protobuf.Any) Increment(org.apache.hadoop.hbase.client.Increment) TimeRange(org.apache.hadoop.hbase.io.TimeRange) Column(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) ByteBuffer(java.nio.ByteBuffer) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) HConstants(org.apache.hadoop.hbase.HConstants) Delete(org.apache.hadoop.hbase.client.Delete) CellComparatorImpl(org.apache.hadoop.hbase.CellComparatorImpl) Tag(org.apache.hadoop.hbase.Tag) DeleteType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) LockServiceProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos) ClassRule(org.junit.ClassRule) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ExtendedCellBuilderFactory(org.apache.hadoop.hbase.ExtendedCellBuilderFactory) Cell(org.apache.hadoop.hbase.Cell) KeyValue(org.apache.hadoop.hbase.KeyValue) Bytes(org.apache.hadoop.hbase.util.Bytes) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) Append(org.apache.hadoop.hbase.client.Append) ExtendedCellBuilder(org.apache.hadoop.hbase.ExtendedCellBuilder) CellBuilderType(org.apache.hadoop.hbase.CellBuilderType) Assert.assertNotNull(org.junit.Assert.assertNotNull) Put(org.apache.hadoop.hbase.client.Put) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) ProcedureProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos) Get(org.apache.hadoop.hbase.client.Get) Assert.assertTrue(org.junit.Assert.assertTrue) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) CellProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos) Lists(org.apache.hbase.thirdparty.com.google.common.collect.Lists) List(java.util.List) BytesValue(org.apache.hbase.thirdparty.com.google.protobuf.BytesValue) PrivateCellUtil(org.apache.hadoop.hbase.PrivateCellUtil) SmallTests(org.apache.hadoop.hbase.testclassification.SmallTests) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) Collections(java.util.Collections) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) Assert.assertEquals(org.junit.Assert.assertEquals) Increment(org.apache.hadoop.hbase.client.Increment) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) Test(org.junit.Test)

Aggregations

Increment (org.apache.hadoop.hbase.client.Increment)81 Test (org.junit.Test)42 Put (org.apache.hadoop.hbase.client.Put)31 Append (org.apache.hadoop.hbase.client.Append)25 Result (org.apache.hadoop.hbase.client.Result)25 Delete (org.apache.hadoop.hbase.client.Delete)21 Get (org.apache.hadoop.hbase.client.Get)19 IOException (java.io.IOException)16 TableName (org.apache.hadoop.hbase.TableName)15 Table (org.apache.hadoop.hbase.client.Table)15 ArrayList (java.util.ArrayList)14 Cell (org.apache.hadoop.hbase.Cell)11 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)11 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)9 Mutation (org.apache.hadoop.hbase.client.Mutation)9 RowMutations (org.apache.hadoop.hbase.client.RowMutations)9 List (java.util.List)8 Map (java.util.Map)8 Scan (org.apache.hadoop.hbase.client.Scan)7 KeyValue (org.apache.hadoop.hbase.KeyValue)5