Search in sources :

Example 1 with LegacySegmentSpec

use of io.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class ScanQuerySpecTest method testSerializationLegacyString.

@Test
public void testSerializationLegacyString() throws Exception {
    String legacy = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"limit\":3," + "\"context\":null}";
    String current = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"context\":null," + "\"descending\":false}";
    ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.dataSource), new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), null, 0, 3, null, Arrays.<String>asList("market", "quality", "index"), null);
    String actual = jsonMapper.writeValueAsString(query);
    Assert.assertEquals(current, actual);
    Assert.assertEquals(query, jsonMapper.readValue(actual, ScanQuery.class));
    Assert.assertEquals(query, jsonMapper.readValue(legacy, ScanQuery.class));
}
Also used : TableDataSource(io.druid.query.TableDataSource) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 2 with LegacySegmentSpec

use of io.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class MultiValuedDimensionTest method testGroupByNoFilter.

@Test
public void testGroupByNoFilter() throws Exception {
    GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("tags", "tags"))).setAggregatorSpecs(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).build();
    Sequence<Row> result = helper.runQueryOnSegmentsObjs(ImmutableList.<Segment>of(new QueryableIndexSegment("sid1", queryableIndex), new IncrementalIndexSegment(incrementalIndex, "sid2")), query);
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", null, "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t3", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t4", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t5", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t6", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t7", "count", 2L));
    TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
}
Also used : QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) GroupByQuery(io.druid.query.groupby.GroupByQuery) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) IncrementalIndexSegment(io.druid.segment.IncrementalIndexSegment) ArrayList(java.util.ArrayList) Row(io.druid.data.input.Row) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest) Test(org.junit.Test)

Example 3 with LegacySegmentSpec

use of io.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class MultiValuedDimensionTest method testGroupByWithDimFilter.

@Test
public void testGroupByWithDimFilter() throws Exception {
    GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("tags", "tags"))).setAggregatorSpecs(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).setDimFilter(new SelectorDimFilter("tags", "t3", null)).build();
    Sequence<Row> result = helper.runQueryOnSegmentsObjs(ImmutableList.<Segment>of(new QueryableIndexSegment("sid1", queryableIndex), new IncrementalIndexSegment(incrementalIndex, "sid2")), query);
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t3", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t4", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t5", "count", 2L));
    TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
}
Also used : QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) ListFilteredDimensionSpec(io.druid.query.dimension.ListFilteredDimensionSpec) DimensionSpec(io.druid.query.dimension.DimensionSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) RegexFilteredDimensionSpec(io.druid.query.dimension.RegexFilteredDimensionSpec) IncrementalIndexSegment(io.druid.segment.IncrementalIndexSegment) ArrayList(java.util.ArrayList) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) GroupByQuery(io.druid.query.groupby.GroupByQuery) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) Row(io.druid.data.input.Row) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest) Test(org.junit.Test)

Example 4 with LegacySegmentSpec

use of io.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class GroupByQueryRunnerFactoryTest method testMergeRunnersEnsureGroupMerging.

@Test
public void testMergeRunnersEnsureGroupMerging() throws Exception {
    GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("tags", "tags"))).setAggregatorSpecs(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).build();
    final QueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(new GroupByQueryConfig());
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return factory.getToolchest().mergeResults(new QueryRunner() {

                @Override
                public Sequence run(Query query, Map responseContext) {
                    try {
                        return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(factory.createRunner(createSegment()).run(query, responseContext), factory.createRunner(createSegment()).run(query, responseContext))));
                    } catch (Exception e) {
                        Throwables.propagate(e);
                        return null;
                    }
                }
            }).run(query, responseContext);
        }
    });
    Sequence<Row> result = mergedRunner.run(query, Maps.newHashMap());
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t2", "count", 4L));
    TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
}
Also used : Query(io.druid.query.Query) ArrayList(java.util.ArrayList) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Sequence(io.druid.java.util.common.guava.Sequence) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) QueryRunner(io.druid.query.QueryRunner) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(io.druid.query.QueryRunnerFactory) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Row(io.druid.data.input.Row) Map(java.util.Map) Test(org.junit.Test)

Example 5 with LegacySegmentSpec

use of io.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class SelectQuerySpecTest method testSerializationLegacyString.

@Test
public void testSerializationLegacyString() throws Exception {
    String legacy = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":[\"market\",\"quality\"]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":null," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3}," + "\"context\":null}";
    String current = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":" + "[{\"type\":\"default\",\"dimension\":\"market\",\"outputName\":\"market\",\"outputType\":\"STRING\"}," + "{\"type\":\"default\",\"dimension\":\"quality\",\"outputName\":\"quality\",\"outputType\":\"STRING\"}]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":[]," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3,\"fromNext\":true}," + "\"context\":null}";
    SelectQuery query = new SelectQuery(new TableDataSource(QueryRunnerTestHelper.dataSource), new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, DefaultDimensionSpec.toSpec(Arrays.<String>asList("market", "quality")), Arrays.<String>asList("index"), null, new PagingSpec(null, 3, null), null);
    String actual = objectMapper.writeValueAsString(query);
    Assert.assertEquals(current, actual);
    Assert.assertEquals(query, objectMapper.readValue(actual, SelectQuery.class));
    Assert.assertEquals(query, objectMapper.readValue(legacy, SelectQuery.class));
}
Also used : TableDataSource(io.druid.query.TableDataSource) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

LegacySegmentSpec (io.druid.query.spec.LegacySegmentSpec)7 Test (org.junit.Test)7 Row (io.druid.data.input.Row)4 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)4 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)4 ArrayList (java.util.ArrayList)4 TableDataSource (io.druid.query.TableDataSource)3 GroupByQuery (io.druid.query.groupby.GroupByQuery)3 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)3 IncrementalIndexSegment (io.druid.segment.IncrementalIndexSegment)3 QueryableIndexSegment (io.druid.segment.QueryableIndexSegment)3 Interval (org.joda.time.Interval)3 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)2 DimensionSpec (io.druid.query.dimension.DimensionSpec)2 ListFilteredDimensionSpec (io.druid.query.dimension.ListFilteredDimensionSpec)2 RegexFilteredDimensionSpec (io.druid.query.dimension.RegexFilteredDimensionSpec)2 SelectorDimFilter (io.druid.query.filter.SelectorDimFilter)2 MergeSequence (io.druid.java.util.common.guava.MergeSequence)1 Sequence (io.druid.java.util.common.guava.Sequence)1 Query (io.druid.query.Query)1