Search in sources :

Example 1 with Incrementer

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer in project pinpoint by naver.

the class SizeLimitedBulkIncrementerTest method multipleTablesConcurrent.

@Test
public void multipleTablesConcurrent() throws Exception {
    // Given
    final int numTables = 50;
    final int numRowIds = 100;
    final int numColumnIds = 20;
    final int maxCallCount = 200;
    List<TestDataSet> testDataSets = BulkIncrementerTestClazz.createRandomTestDataSetList(numTables, numRowIds, numColumnIds, maxCallCount);
    List<TableName> tableNames = new ArrayList<>(numTables);
    for (int i = 0; i < numTables; i++) {
        tableNames.add(TableName.valueOf(i + ""));
    }
    final int maxNumTestDatas = numTables * numRowIds * numColumnIds * maxCallCount;
    List<TestData> testDatas = new ArrayList<>(maxNumTestDatas);
    for (TestDataSet testDataSet : testDataSets) {
        testDatas.addAll(testDataSet.getTestDatas());
    }
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch incrementorLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, incrementorLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, incrementorLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    long actualTotalCount = 0;
    for (TestDataSet testDataSet : testDataSets) {
        TableName expectedTableName = testDataSet.getTableName();
        RowKey expectedRowKey = testDataSet.getRowKey();
        ColumnName expectedColumnName = testDataSet.getColumnName();
        Map<TableName, Map<ByteBuffer, Map<ByteBuffer, Long>>> resultMap = verifier.getResultMap();
        Map<ByteBuffer, Map<ByteBuffer, Long>> rows = resultMap.get(expectedTableName);
        if (rows == null) {
            continue;
        }
        Map<ByteBuffer, Long> keyValues = rows.get(ByteBuffer.wrap(expectedRowKey.getRowKey()));
        if (keyValues == null) {
            continue;
        }
        Long actualCount = keyValues.get(ByteBuffer.wrap(expectedColumnName.getColumnName()));
        if (actualCount == null) {
            continue;
        }
        actualTotalCount += actualCount;
    }
    Assert.assertTrue(actualTotalCount > bulkLimitSize);
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) ArrayList(java.util.ArrayList) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) CountDownLatch(java.util.concurrent.CountDownLatch) ByteBuffer(java.nio.ByteBuffer) TableName(org.apache.hadoop.hbase.TableName) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) Map(java.util.Map) Test(org.junit.Test)

Example 2 with Incrementer

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer in project pinpoint by naver.

the class SizeLimitedBulkIncrementerTest method singleTableConcurrent.

@Test
public void singleTableConcurrent() throws Exception {
    // Given
    TableName tableA = TableName.valueOf("A");
    TestDataSet testDataSetA_0_0 = new TestDataSet(tableA, 0, 0, 1000000);
    TestDataSet testDataSetA_0_1 = new TestDataSet(tableA, 0, 1, 1000001);
    List<TestData> testDatas = new ArrayList<>();
    testDatas.addAll(testDataSetA_0_0.getTestDatas());
    testDatas.addAll(testDataSetA_0_1.getTestDatas());
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<BulkIncrementerTestClazz.TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch completeLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, completeLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, completeLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    verifier.verify(testDataSetA_0_0);
    verifier.verify(testDataSetA_0_1);
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) TableName(org.apache.hadoop.hbase.TableName) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Test(org.junit.Test)

Example 3 with Incrementer

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer in project pinpoint by naver.

the class DefaultBulkUpdaterTest method singleTableConcurrent.

@Test
public void singleTableConcurrent() throws Exception {
    // Given
    TableName tableA = TableName.valueOf("A");
    TestDataSet testDataSetA_0_0 = new TestDataSet(tableA, 0, 0, 1000000);
    TestDataSet testDataSetA_0_1 = new TestDataSet(tableA, 0, 1, 1000001);
    List<TestData> testDatas = new ArrayList<>();
    testDatas.addAll(testDataSetA_0_0.getTestDatas());
    testDatas.addAll(testDataSetA_0_1.getTestDatas());
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<BulkIncrementerTestClazz.TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch completeLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, completeLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, completeLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    verifier.verify(testDataSetA_0_0);
    verifier.verify(testDataSetA_0_1);
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) TableName(org.apache.hadoop.hbase.TableName) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Test(org.junit.Test)

Example 4 with Incrementer

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer in project pinpoint by naver.

the class DefaultBulkUpdaterTest method multipleTablesConcurrent.

@Test
public void multipleTablesConcurrent() throws Exception {
    // Given
    final int numTables = 50;
    final int numRowIds = 100;
    final int numColumnIds = 20;
    final int maxCallCount = 200;
    List<TestDataSet> testDataSets = BulkIncrementerTestClazz.createRandomTestDataSetList(numTables, numRowIds, numColumnIds, maxCallCount);
    List<TableName> tableNames = new ArrayList<>(numTables);
    for (int i = 0; i < numTables; i++) {
        tableNames.add(TableName.valueOf(i + ""));
    }
    final int maxNumTestDatas = numTables * numRowIds * numColumnIds * maxCallCount;
    List<TestData> testDatas = new ArrayList<>(maxNumTestDatas);
    for (TestDataSet testDataSet : testDataSets) {
        testDatas.addAll(testDataSet.getTestDatas());
    }
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch incrementorLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, incrementorLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, incrementorLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    for (TestDataSet testDataSet : testDataSets) {
        verifier.verify(testDataSet);
    }
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) TableName(org.apache.hadoop.hbase.TableName) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Test(org.junit.Test)

Aggregations

Flusher (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher)4 Incrementer (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer)4 TestData (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData)4 TestDataSet (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet)4 TestVerifier (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier)4 ArrayList (java.util.ArrayList)4 List (java.util.List)4 Map (java.util.Map)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 FutureTask (java.util.concurrent.FutureTask)4 TableName (org.apache.hadoop.hbase.TableName)4 Increment (org.apache.hadoop.hbase.client.Increment)4 Test (org.junit.Test)4 ByteBuffer (java.nio.ByteBuffer)1