Search in sources :

Example 1 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class GroupByQueryRunnerFactoryTest method testMergeRunnersEnsureGroupMerging.

@Test
public void testMergeRunnersEnsureGroupMerging() {
    GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).build();
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner() {

        @Override
        public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
            return factory.getToolchest().mergeResults(new QueryRunner() {

                @Override
                public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
                    final Query query = queryPlus.getQuery();
                    try {
                        return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(factory.createRunner(createSegment()).run(queryPlus, responseContext), factory.createRunner(createSegment()).run(queryPlus, responseContext))));
                    } catch (Exception e) {
                        throw new RuntimeException(e);
                    }
                }
            }).run(queryPlus, responseContext);
        }
    });
    Sequence<ResultRow> result = mergedRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
    List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t2", "count", 4L));
    TestHelper.assertExpectedObjects(expectedResults, result.toList(), "");
}
Also used : Query(org.apache.druid.query.Query) Sequence(org.apache.druid.java.util.common.guava.Sequence) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) QueryRunner(org.apache.druid.query.QueryRunner) IOException(java.io.IOException) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) ResponseContext(org.apache.druid.query.context.ResponseContext) QueryPlus(org.apache.druid.query.QueryPlus) Test(org.junit.Test)

Example 2 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class ScanQuerySpecTest method testSerialization.

@Test
public void testSerialization() throws Exception {
    String legacy = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"limit\":3," + "\"context\":null}";
    String current = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
    ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, ScanQuery.Order.NONE, null, null, Arrays.asList("market", "quality", "index"), null, null);
    String actual = JSON_MAPPER.writeValueAsString(query);
    Assert.assertEquals(current, actual);
    Assert.assertEquals(query, JSON_MAPPER.readValue(actual, ScanQuery.class));
    Assert.assertEquals(query, JSON_MAPPER.readValue(legacy, ScanQuery.class));
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Example 3 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class ScanQuerySpecTest method testSerializationLegacyString.

@Test
public void testSerializationLegacyString() throws Exception {
    ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 1, 3, ScanQuery.Order.NONE, null, null, Arrays.asList("market", "quality", "index"), null, null);
    final String serialized = JSON_MAPPER.writeValueAsString(query);
    final ScanQuery deserialized = (ScanQuery) JSON_MAPPER.readValue(serialized, Query.class);
    Assert.assertEquals(query, deserialized);
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) Query(org.apache.druid.query.Query) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Example 4 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class ScanQuerySpecTest method testSerializationWithOrderBy.

@Test
public void testSerializationWithOrderBy() throws Exception {
    String originalJson = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}]," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
    ScanQuery expectedQuery = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, null, Collections.singletonList(new ScanQuery.OrderBy("quality", ScanQuery.Order.ASCENDING)), null, Arrays.asList("market", "quality", "index", "__time"), null, null);
    String serializedJson = JSON_MAPPER.writeValueAsString(expectedQuery);
    Assert.assertEquals(originalJson, serializedJson);
    Assert.assertEquals(expectedQuery, JSON_MAPPER.readValue(originalJson, ScanQuery.class));
    Assert.assertEquals(ScanQuery.Order.NONE, expectedQuery.getTimeOrder());
    Assert.assertEquals(Collections.singletonList(new ScanQuery.OrderBy("quality", ScanQuery.Order.ASCENDING)), expectedQuery.getOrderBys());
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Example 5 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class DoubleStorageTest method testSelectValues.

@Test
public void testSelectValues() {
    QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(SCAN_QUERY_RUNNER_FACTORY, SEGMENT_ID, new QueryableIndexSegment(index, SEGMENT_ID), null);
    ScanQuery query = newTestQuery().intervals(new LegacySegmentSpec(INTERVAL)).virtualColumns().build();
    Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
    ScanResultValue expectedScanResult = new ScanResultValue(SEGMENT_ID.toString(), ImmutableList.of(TIME_COLUMN, DIM_NAME, DIM_FLOAT_NAME), getStreamOfEvents().collect(Collectors.toList()));
    List<ScanResultValue> expectedResults = Collections.singletonList(expectedScanResult);
    ScanQueryRunnerTest.verify(expectedResults, results);
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) ScanResultValue(org.apache.druid.query.scan.ScanResultValue) ScanQuery(org.apache.druid.query.scan.ScanQuery) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) ScanQueryRunnerTest(org.apache.druid.query.scan.ScanQueryRunnerTest) Test(org.junit.Test)

Aggregations

LegacySegmentSpec (org.apache.druid.query.spec.LegacySegmentSpec)28 Test (org.junit.Test)27 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)21 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)21 QueryableIndexSegment (org.apache.druid.segment.QueryableIndexSegment)21 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)20 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)20 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)20 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)20 ResultRow (org.apache.druid.query.groupby.ResultRow)18 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)15 TableDataSource (org.apache.druid.query.TableDataSource)6 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)3 Query (org.apache.druid.query.Query)2 QueryRunner (org.apache.druid.query.QueryRunner)2 InDimFilter (org.apache.druid.query.filter.InDimFilter)2 SegmentMetadataQuery (org.apache.druid.query.metadata.metadata.SegmentMetadataQuery)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 IOException (java.io.IOException)1 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)1