Search in sources :

Example 46 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class IncrementalIndexTest method testConcurrentAdd.

@Test
public void testConcurrentAdd() throws Exception {
    final IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES);
    final int threadCount = 10;
    final int elementsPerThread = 200;
    final int dimensionCount = 5;
    ExecutorService executor = Executors.newFixedThreadPool(threadCount);
    final long timestamp = System.currentTimeMillis();
    final CountDownLatch latch = new CountDownLatch(threadCount);
    for (int j = 0; j < threadCount; j++) {
        executor.submit(new Runnable() {

            @Override
            public void run() {
                try {
                    for (int i = 0; i < elementsPerThread; i++) {
                        index.add(getRow(timestamp + i, i, dimensionCount));
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
                latch.countDown();
            }
        });
    }
    Assert.assertTrue(latch.await(60, TimeUnit.SECONDS));
    boolean isRollup = index.isRollup();
    Assert.assertEquals(dimensionCount, index.getDimensionNames().size());
    Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), index.size());
    Iterator<Row> iterator = index.iterator();
    int curr = 0;
    while (iterator.hasNext()) {
        Row row = iterator.next();
        Assert.assertEquals(timestamp + (isRollup ? curr : curr / threadCount), row.getTimestampFromEpoch());
        Assert.assertEquals(isRollup ? threadCount : 1, row.getMetric("count").intValue());
        curr++;
    }
    Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), curr);
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) ExecutorService(java.util.concurrent.ExecutorService) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) CountDownLatch(java.util.concurrent.CountDownLatch) IndexSizeExceededException(org.apache.druid.segment.incremental.IndexSizeExceededException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) ExecutionException(java.util.concurrent.ExecutionException) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 47 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class IncrementalIndexTest method testFilteredAggregators.

@Test
public void testFilteredAggregators() throws Exception {
    long timestamp = System.currentTimeMillis();
    IncrementalIndex index = indexCreator.createIndex((Object) new AggregatorFactory[] { new CountAggregatorFactory("count"), new FilteredAggregatorFactory(new CountAggregatorFactory("count_selector_filtered"), new SelectorDimFilter("dim2", "2", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_bound_filtered"), new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_multivaldim_filtered"), new SelectorDimFilter("dim3", "b", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_numeric_filtered"), new SelectorDimFilter("met1", "11", null)) });
    index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10)));
    index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11)));
    Assert.assertEquals(Arrays.asList("dim1", "dim2", "dim3"), index.getDimensionNames());
    Assert.assertEquals(Arrays.asList("count", "count_selector_filtered", "count_bound_filtered", "count_multivaldim_filtered", "count_numeric_filtered"), index.getMetricNames());
    Assert.assertEquals(2, index.size());
    final Iterator<Row> rows = index.iterator();
    Row row = rows.next();
    Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
    Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1"));
    Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2"));
    Assert.assertEquals(Arrays.asList("a", "b"), row.getDimension("dim3"));
    Assert.assertEquals(1L, row.getMetric("count"));
    Assert.assertEquals(1L, row.getMetric("count_selector_filtered"));
    Assert.assertEquals(1L, row.getMetric("count_bound_filtered"));
    Assert.assertEquals(1L, row.getMetric("count_multivaldim_filtered"));
    Assert.assertEquals(0L, row.getMetric("count_numeric_filtered"));
    row = rows.next();
    Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
    Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1"));
    Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2"));
    Assert.assertEquals(Arrays.asList("c", "d"), row.getDimension("dim3"));
    Assert.assertEquals(1L, row.getMetric("count"));
    Assert.assertEquals(0L, row.getMetric("count_selector_filtered"));
    Assert.assertEquals(0L, row.getMetric("count_bound_filtered"));
    Assert.assertEquals(0L, row.getMetric("count_multivaldim_filtered"));
    Assert.assertEquals(1L, row.getMetric("count_numeric_filtered"));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 48 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class IncrementalIndexTest method testCaseSensitivity.

@Test
public void testCaseSensitivity() throws Exception {
    long timestamp = System.currentTimeMillis();
    IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES);
    populateIndex(timestamp, index);
    Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames());
    Assert.assertEquals(2, index.size());
    final Iterator<Row> rows = index.iterator();
    Row row = rows.next();
    Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
    Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1"));
    Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2"));
    row = rows.next();
    Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
    Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1"));
    Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2"));
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 49 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class SqlInputSourceTest method testMultipleSplits.

@Test
public void testMultipleSplits() throws Exception {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    testUtils.createAndUpdateTable(TABLE_NAME_2, 10);
    final File tempDir = createFirehoseTmpDir("testMultipleSplit");
    SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST2, true, testUtils.getDerbyFirehoseConnector(), mapper);
    InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
    CloseableIterator<InputRow> resultIterator = sqlReader.read();
    final List<Row> rows = new ArrayList<>();
    while (resultIterator.hasNext()) {
        rows.add(resultIterator.next());
    }
    assertResult(rows, SQLLIST2);
    testUtils.dropTable(TABLE_NAME_1);
    testUtils.dropTable(TABLE_NAME_2);
}
Also used : InputSourceReader(org.apache.druid.data.input.InputSourceReader) InputRow(org.apache.druid.data.input.InputRow) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) Test(org.junit.Test)

Example 50 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class SqlInputSourceTest method testSingleSplit.

@Test
public void testSingleSplit() throws Exception {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    final File tempDir = createFirehoseTmpDir("testSingleSplit");
    SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST1, true, testUtils.getDerbyFirehoseConnector(), mapper);
    InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
    CloseableIterator<InputRow> resultIterator = sqlReader.read();
    final List<Row> rows = new ArrayList<>();
    while (resultIterator.hasNext()) {
        rows.add(resultIterator.next());
    }
    assertResult(rows, SQLLIST1);
    testUtils.dropTable(TABLE_NAME_1);
}
Also used : InputSourceReader(org.apache.druid.data.input.InputSourceReader) InputRow(org.apache.druid.data.input.InputRow) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) Test(org.junit.Test)

Aggregations

Row (org.apache.druid.data.input.Row)54 Test (org.junit.Test)44 ArrayList (java.util.ArrayList)32 MapBasedRow (org.apache.druid.data.input.MapBasedRow)21 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)21 File (java.io.File)18 Firehose (org.apache.druid.data.input.Firehose)15 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)15 HashMap (java.util.HashMap)13 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)13 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)12 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)11 List (java.util.List)10 LongMeanAveragerFactory (org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8 InputRow (org.apache.druid.data.input.InputRow)8 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)7 IOException (java.io.IOException)6 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)6 Function (com.google.common.base.Function)5