Search in sources :

Example 6 with TestVerifier

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier in project pinpoint by naver.

the class DefaultBulkUpdaterTest method singleTableConcurrent.

@Test
public void singleTableConcurrent() throws Exception {
    // Given
    TableName tableA = TableName.valueOf("A");
    TestDataSet testDataSetA_0_0 = new TestDataSet(tableA, 0, 0, 1000000);
    TestDataSet testDataSetA_0_1 = new TestDataSet(tableA, 0, 1, 1000001);
    List<TestData> testDatas = new ArrayList<>();
    testDatas.addAll(testDataSetA_0_0.getTestDatas());
    testDatas.addAll(testDataSetA_0_1.getTestDatas());
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<BulkIncrementerTestClazz.TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch completeLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, completeLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, completeLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    verifier.verify(testDataSetA_0_0);
    verifier.verify(testDataSetA_0_1);
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) TableName(org.apache.hadoop.hbase.TableName) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Test(org.junit.Test)

Example 7 with TestVerifier

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier in project pinpoint by naver.

the class DefaultBulkUpdaterTest method multipleTables.

@Test
public void multipleTables() {
    // Given
    TableName tableA = TableName.valueOf("a", "A");
    TableName tableB = TableName.valueOf("b", "A");
    TestDataSet testDataSetA_0_0 = new TestDataSet(tableA, 0, 0, 100);
    TestDataSet testDataSetA_0_1 = new TestDataSet(tableA, 0, 1, 200);
    TestDataSet testDataSetA_1_0 = new TestDataSet(tableA, 1, 0, 300);
    TestDataSet testDataSetA_1_1 = new TestDataSet(tableA, 1, 1, 400);
    TestDataSet testDataSetB_0_0 = new TestDataSet(tableB, 0, 0, 500);
    TestDataSet testDataSetB_0_1 = new TestDataSet(tableB, 0, 1, 600);
    TestDataSet testDataSetB_1_0 = new TestDataSet(tableB, 1, 0, 700);
    TestDataSet testDataSetB_1_1 = new TestDataSet(tableB, 1, 1, 800);
    List<TestData> testDatas = new ArrayList<>();
    testDatas.addAll(testDataSetA_0_0.getTestDatas());
    testDatas.addAll(testDataSetA_0_1.getTestDatas());
    testDatas.addAll(testDataSetA_1_0.getTestDatas());
    testDatas.addAll(testDataSetA_1_1.getTestDatas());
    testDatas.addAll(testDataSetB_0_0.getTestDatas());
    testDatas.addAll(testDataSetB_0_1.getTestDatas());
    testDatas.addAll(testDataSetB_1_0.getTestDatas());
    testDatas.addAll(testDataSetB_1_1.getTestDatas());
    Collections.shuffle(testDatas);
    // When
    for (TestData testData : testDatas) {
        bulkIncrementer.increment(testData.getTableName(), testData.getRowKey(), testData.getColumnName());
    }
    // Then
    Map<TableName, List<Increment>> incrementMap = bulkIncrementer.getIncrements(rowKeyDistributor);
    TestVerifier verifier = new TestVerifier(incrementMap);
    verifier.verify(testDataSetA_0_0);
    verifier.verify(testDataSetA_0_1);
    verifier.verify(testDataSetA_1_0);
    verifier.verify(testDataSetA_1_1);
    verifier.verify(testDataSetB_0_0);
    verifier.verify(testDataSetB_0_1);
    verifier.verify(testDataSetB_1_0);
    verifier.verify(testDataSetB_1_1);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) Test(org.junit.Test)

Example 8 with TestVerifier

use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier in project pinpoint by naver.

the class DefaultBulkUpdaterTest method multipleTablesConcurrent.

@Test
public void multipleTablesConcurrent() throws Exception {
    // Given
    final int numTables = 50;
    final int numRowIds = 100;
    final int numColumnIds = 20;
    final int maxCallCount = 200;
    List<TestDataSet> testDataSets = BulkIncrementerTestClazz.createRandomTestDataSetList(numTables, numRowIds, numColumnIds, maxCallCount);
    List<TableName> tableNames = new ArrayList<>(numTables);
    for (int i = 0; i < numTables; i++) {
        tableNames.add(TableName.valueOf(i + ""));
    }
    final int maxNumTestDatas = numTables * numRowIds * numColumnIds * maxCallCount;
    List<TestData> testDatas = new ArrayList<>(maxNumTestDatas);
    for (TestDataSet testDataSet : testDataSets) {
        testDatas.addAll(testDataSet.getTestDatas());
    }
    Collections.shuffle(testDatas);
    // When
    final int numIncrementers = 16;
    List<List<TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
    final CountDownLatch incrementorLatch = new CountDownLatch(testDataPartitions.size());
    final CountDownLatch flusherLatch = new CountDownLatch(1);
    FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, incrementorLatch, flusherLatch));
    new Thread(flushTask, "Flusher").start();
    int counter = 0;
    for (List<TestData> testDataPartition : testDataPartitions) {
        Incrementer incrementer = new Incrementer(bulkIncrementer, incrementorLatch, testDataPartition);
        new Thread(incrementer, "Incrementer-" + counter++).start();
    }
    flusherLatch.await(30L, TimeUnit.SECONDS);
    // Then
    Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
    TestVerifier verifier = new TestVerifier(incrementMap);
    for (TestDataSet testDataSet : testDataSets) {
        verifier.verify(testDataSet);
    }
}
Also used : TestData(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData) Incrementer(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer) ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) TestDataSet(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet) TableName(org.apache.hadoop.hbase.TableName) FutureTask(java.util.concurrent.FutureTask) Increment(org.apache.hadoop.hbase.client.Increment) Flusher(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher) TestVerifier(com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Test(org.junit.Test)

Aggregations

TestData (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestData)8 TestDataSet (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet)8 TestVerifier (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestVerifier)8 ArrayList (java.util.ArrayList)8 List (java.util.List)8 TableName (org.apache.hadoop.hbase.TableName)8 Test (org.junit.Test)8 Flusher (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Flusher)4 Incrementer (com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.Incrementer)4 Map (java.util.Map)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 FutureTask (java.util.concurrent.FutureTask)4 Increment (org.apache.hadoop.hbase.client.Increment)4 ByteBuffer (java.nio.ByteBuffer)1