use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet in project pinpoint by naver.
the class DefaultBulkUpdaterTest method singleTableConcurrent.
@Test
public void singleTableConcurrent() throws Exception {
// Given
TableName tableA = TableName.valueOf("A");
TestDataSet testDataSetA_0_0 = new TestDataSet(tableA, 0, 0, 1000000);
TestDataSet testDataSetA_0_1 = new TestDataSet(tableA, 0, 1, 1000001);
List<TestData> testDatas = new ArrayList<>();
testDatas.addAll(testDataSetA_0_0.getTestDatas());
testDatas.addAll(testDataSetA_0_1.getTestDatas());
Collections.shuffle(testDatas);
// When
final int numIncrementers = 16;
List<List<BulkIncrementerTestClazz.TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
final CountDownLatch completeLatch = new CountDownLatch(testDataPartitions.size());
final CountDownLatch flusherLatch = new CountDownLatch(1);
FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, completeLatch, flusherLatch));
new Thread(flushTask, "Flusher").start();
int counter = 0;
for (List<TestData> testDataPartition : testDataPartitions) {
Incrementer incrementer = new Incrementer(bulkIncrementer, completeLatch, testDataPartition);
new Thread(incrementer, "Incrementer-" + counter++).start();
}
flusherLatch.await(30L, TimeUnit.SECONDS);
// Then
Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
TestVerifier verifier = new TestVerifier(incrementMap);
verifier.verify(testDataSetA_0_0);
verifier.verify(testDataSetA_0_1);
}
use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet in project pinpoint by naver.
the class DefaultBulkUpdaterTest method multipleTables.
@Test
public void multipleTables() {
// Given
TableName tableA = TableName.valueOf("a", "A");
TableName tableB = TableName.valueOf("b", "A");
TestDataSet testDataSetA_0_0 = new TestDataSet(tableA, 0, 0, 100);
TestDataSet testDataSetA_0_1 = new TestDataSet(tableA, 0, 1, 200);
TestDataSet testDataSetA_1_0 = new TestDataSet(tableA, 1, 0, 300);
TestDataSet testDataSetA_1_1 = new TestDataSet(tableA, 1, 1, 400);
TestDataSet testDataSetB_0_0 = new TestDataSet(tableB, 0, 0, 500);
TestDataSet testDataSetB_0_1 = new TestDataSet(tableB, 0, 1, 600);
TestDataSet testDataSetB_1_0 = new TestDataSet(tableB, 1, 0, 700);
TestDataSet testDataSetB_1_1 = new TestDataSet(tableB, 1, 1, 800);
List<TestData> testDatas = new ArrayList<>();
testDatas.addAll(testDataSetA_0_0.getTestDatas());
testDatas.addAll(testDataSetA_0_1.getTestDatas());
testDatas.addAll(testDataSetA_1_0.getTestDatas());
testDatas.addAll(testDataSetA_1_1.getTestDatas());
testDatas.addAll(testDataSetB_0_0.getTestDatas());
testDatas.addAll(testDataSetB_0_1.getTestDatas());
testDatas.addAll(testDataSetB_1_0.getTestDatas());
testDatas.addAll(testDataSetB_1_1.getTestDatas());
Collections.shuffle(testDatas);
// When
for (TestData testData : testDatas) {
bulkIncrementer.increment(testData.getTableName(), testData.getRowKey(), testData.getColumnName());
}
// Then
Map<TableName, List<Increment>> incrementMap = bulkIncrementer.getIncrements(rowKeyDistributor);
TestVerifier verifier = new TestVerifier(incrementMap);
verifier.verify(testDataSetA_0_0);
verifier.verify(testDataSetA_0_1);
verifier.verify(testDataSetA_1_0);
verifier.verify(testDataSetA_1_1);
verifier.verify(testDataSetB_0_0);
verifier.verify(testDataSetB_0_1);
verifier.verify(testDataSetB_1_0);
verifier.verify(testDataSetB_1_1);
}
use of com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementerTestClazz.TestDataSet in project pinpoint by naver.
the class DefaultBulkUpdaterTest method multipleTablesConcurrent.
@Test
public void multipleTablesConcurrent() throws Exception {
// Given
final int numTables = 50;
final int numRowIds = 100;
final int numColumnIds = 20;
final int maxCallCount = 200;
List<TestDataSet> testDataSets = BulkIncrementerTestClazz.createRandomTestDataSetList(numTables, numRowIds, numColumnIds, maxCallCount);
List<TableName> tableNames = new ArrayList<>(numTables);
for (int i = 0; i < numTables; i++) {
tableNames.add(TableName.valueOf(i + ""));
}
final int maxNumTestDatas = numTables * numRowIds * numColumnIds * maxCallCount;
List<TestData> testDatas = new ArrayList<>(maxNumTestDatas);
for (TestDataSet testDataSet : testDataSets) {
testDatas.addAll(testDataSet.getTestDatas());
}
Collections.shuffle(testDatas);
// When
final int numIncrementers = 16;
List<List<TestData>> testDataPartitions = ListUtils.partition(testDatas, testDatas.size() / (numIncrementers - 1));
final CountDownLatch incrementorLatch = new CountDownLatch(testDataPartitions.size());
final CountDownLatch flusherLatch = new CountDownLatch(1);
FutureTask<Map<TableName, List<Increment>>> flushTask = new FutureTask<>(new Flusher(bulkIncrementer, rowKeyDistributor, incrementorLatch, flusherLatch));
new Thread(flushTask, "Flusher").start();
int counter = 0;
for (List<TestData> testDataPartition : testDataPartitions) {
Incrementer incrementer = new Incrementer(bulkIncrementer, incrementorLatch, testDataPartition);
new Thread(incrementer, "Incrementer-" + counter++).start();
}
flusherLatch.await(30L, TimeUnit.SECONDS);
// Then
Map<TableName, List<Increment>> incrementMap = flushTask.get(5L, TimeUnit.SECONDS);
TestVerifier verifier = new TestVerifier(incrementMap);
for (TestDataSet testDataSet : testDataSets) {
verifier.verify(testDataSet);
}
}
Aggregations