use of org.apache.druid.data.input.Row in project druid by druid-io.
the class IncrementalIndexTest method testConcurrentAdd.
@Test
public void testConcurrentAdd() throws Exception {
final IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES);
final int threadCount = 10;
final int elementsPerThread = 200;
final int dimensionCount = 5;
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
final long timestamp = System.currentTimeMillis();
final CountDownLatch latch = new CountDownLatch(threadCount);
for (int j = 0; j < threadCount; j++) {
executor.submit(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; i < elementsPerThread; i++) {
index.add(getRow(timestamp + i, i, dimensionCount));
}
} catch (Exception e) {
e.printStackTrace();
}
latch.countDown();
}
});
}
Assert.assertTrue(latch.await(60, TimeUnit.SECONDS));
boolean isRollup = index.isRollup();
Assert.assertEquals(dimensionCount, index.getDimensionNames().size());
Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), index.size());
Iterator<Row> iterator = index.iterator();
int curr = 0;
while (iterator.hasNext()) {
Row row = iterator.next();
Assert.assertEquals(timestamp + (isRollup ? curr : curr / threadCount), row.getTimestampFromEpoch());
Assert.assertEquals(isRollup ? threadCount : 1, row.getMetric("count").intValue());
curr++;
}
Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), curr);
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class IncrementalIndexTest method testFilteredAggregators.
@Test
public void testFilteredAggregators() throws Exception {
long timestamp = System.currentTimeMillis();
IncrementalIndex index = indexCreator.createIndex((Object) new AggregatorFactory[] { new CountAggregatorFactory("count"), new FilteredAggregatorFactory(new CountAggregatorFactory("count_selector_filtered"), new SelectorDimFilter("dim2", "2", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_bound_filtered"), new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_multivaldim_filtered"), new SelectorDimFilter("dim3", "b", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_numeric_filtered"), new SelectorDimFilter("met1", "11", null)) });
index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10)));
index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11)));
Assert.assertEquals(Arrays.asList("dim1", "dim2", "dim3"), index.getDimensionNames());
Assert.assertEquals(Arrays.asList("count", "count_selector_filtered", "count_bound_filtered", "count_multivaldim_filtered", "count_numeric_filtered"), index.getMetricNames());
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("a", "b"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getMetric("count"));
Assert.assertEquals(1L, row.getMetric("count_selector_filtered"));
Assert.assertEquals(1L, row.getMetric("count_bound_filtered"));
Assert.assertEquals(1L, row.getMetric("count_multivaldim_filtered"));
Assert.assertEquals(0L, row.getMetric("count_numeric_filtered"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("c", "d"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getMetric("count"));
Assert.assertEquals(0L, row.getMetric("count_selector_filtered"));
Assert.assertEquals(0L, row.getMetric("count_bound_filtered"));
Assert.assertEquals(0L, row.getMetric("count_multivaldim_filtered"));
Assert.assertEquals(1L, row.getMetric("count_numeric_filtered"));
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class IncrementalIndexTest method testCaseSensitivity.
@Test
public void testCaseSensitivity() throws Exception {
long timestamp = System.currentTimeMillis();
IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES);
populateIndex(timestamp, index);
Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames());
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2"));
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class SqlInputSourceTest method testMultipleSplits.
@Test
public void testMultipleSplits() throws Exception {
derbyConnector = derbyConnectorRule.getConnector();
SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
testUtils.createAndUpdateTable(TABLE_NAME_2, 10);
final File tempDir = createFirehoseTmpDir("testMultipleSplit");
SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST2, true, testUtils.getDerbyFirehoseConnector(), mapper);
InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
CloseableIterator<InputRow> resultIterator = sqlReader.read();
final List<Row> rows = new ArrayList<>();
while (resultIterator.hasNext()) {
rows.add(resultIterator.next());
}
assertResult(rows, SQLLIST2);
testUtils.dropTable(TABLE_NAME_1);
testUtils.dropTable(TABLE_NAME_2);
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class SqlInputSourceTest method testSingleSplit.
@Test
public void testSingleSplit() throws Exception {
derbyConnector = derbyConnectorRule.getConnector();
SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
final File tempDir = createFirehoseTmpDir("testSingleSplit");
SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST1, true, testUtils.getDerbyFirehoseConnector(), mapper);
InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
CloseableIterator<InputRow> resultIterator = sqlReader.read();
final List<Row> rows = new ArrayList<>();
while (resultIterator.hasNext()) {
rows.add(resultIterator.next());
}
assertResult(rows, SQLLIST1);
testUtils.dropTable(TABLE_NAME_1);
}
Aggregations