Search in sources :

Example 16 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class UnionQueryRunnerTest method testUnionQueryRunner.

@Test
public void testUnionQueryRunner() {
    QueryRunner baseRunner = new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            // verify that table datasource is passed to baseQueryRunner
            Assert.assertTrue(query.getDataSource() instanceof TableDataSource);
            String dsName = Iterables.getOnlyElement(query.getDataSource().getNames());
            if (dsName.equals("ds1")) {
                responseContext.put("ds1", "ds1");
                return Sequences.simple(Arrays.asList(1, 2, 3));
            } else if (dsName.equals("ds2")) {
                responseContext.put("ds2", "ds2");
                return Sequences.simple(Arrays.asList(4, 5, 6));
            } else {
                throw new AssertionError("Unexpected DataSource");
            }
        }
    };
    UnionQueryRunner runner = new UnionQueryRunner(baseRunner);
    // Make a dummy query with Union datasource
    Query q = Druids.newTimeseriesQueryBuilder().dataSource(new UnionDataSource(Arrays.asList(new TableDataSource("ds1"), new TableDataSource("ds2")))).intervals("2014-01-01T00:00:00Z/2015-01-01T00:00:00Z").aggregators(QueryRunnerTestHelper.commonAggregators).build();
    Map<String, Object> responseContext = Maps.newHashMap();
    Sequence result = runner.run(q, responseContext);
    List res = Sequences.toList(result, Lists.newArrayList());
    Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6), res);
    // verify response context
    Assert.assertEquals(2, responseContext.size());
    Assert.assertEquals("ds1", responseContext.get("ds1"));
    Assert.assertEquals("ds2", responseContext.get("ds2"));
}
Also used : List(java.util.List) Sequence(io.druid.java.util.common.guava.Sequence) Map(java.util.Map) Test(org.junit.Test)

Example 17 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQuery.

@Test
public void testIngestAndQuery() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
    String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"market\"" + "}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 18 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQueryPrecomputedHll.

@Test
public void testIngestAndQueryPrecomputedHll() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
    String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"preComputedHll\"," + "\"isInputHyperUnique\": true" + "}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"preComputedHll\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.hll.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.DAY, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(4.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(4.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 19 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class GroupByQueryRunnerFactoryTest method testMergeRunnersEnsureGroupMerging.

@Test
public void testMergeRunnersEnsureGroupMerging() throws Exception {
    GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("tags", "tags"))).setAggregatorSpecs(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).build();
    final QueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(new GroupByQueryConfig());
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return factory.getToolchest().mergeResults(new QueryRunner() {

                @Override
                public Sequence run(Query query, Map responseContext) {
                    try {
                        return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(factory.createRunner(createSegment()).run(query, responseContext), factory.createRunner(createSegment()).run(query, responseContext))));
                    } catch (Exception e) {
                        Throwables.propagate(e);
                        return null;
                    }
                }
            }).run(query, responseContext);
        }
    });
    Sequence<Row> result = mergedRunner.run(query, Maps.newHashMap());
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t2", "count", 4L));
    TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
}
Also used : Query(io.druid.query.Query) ArrayList(java.util.ArrayList) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Sequence(io.druid.java.util.common.guava.Sequence) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) QueryRunner(io.druid.query.QueryRunner) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(io.druid.query.QueryRunnerFactory) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Row(io.druid.data.input.Row) Map(java.util.Map) Test(org.junit.Test)

Example 20 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class DefaultLimitSpecTest method testBuildWithExplicitOrder.

@Test
public void testBuildWithExplicitOrder() {
    DefaultLimitSpec limitSpec = new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("k1", OrderByColumnSpec.Direction.ASCENDING)), 2);
    Function<Sequence<Row>, Sequence<Row>> limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("k3", 1L)));
    Assert.assertEquals(ImmutableList.of(testRowsList.get(0), testRowsList.get(1)), Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
    // if there is an aggregator with same name then that is used to build ordering
    limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k1", "k1")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("k3", 1L)));
    Assert.assertEquals(ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
    // if there is a post-aggregator with same name then that is used to build ordering
    limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ArithmeticPostAggregator("k1", "+", ImmutableList.<PostAggregator>of(new ConstantPostAggregator("x", 1), new ConstantPostAggregator("y", 1)))));
    Assert.assertEquals((List) ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List) Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
    // makes same result
    limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ExpressionPostAggregator("k1", "1 + 1")));
    Assert.assertEquals((List) ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List) Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
}
Also used : DimensionSpec(io.druid.query.dimension.DimensionSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) PostAggregator(io.druid.query.aggregation.PostAggregator) ExpressionPostAggregator(io.druid.query.aggregation.post.ExpressionPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) ArrayList(java.util.ArrayList) Sequence(io.druid.java.util.common.guava.Sequence) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) ExpressionPostAggregator(io.druid.query.aggregation.post.ExpressionPostAggregator) Row(io.druid.data.input.Row) MapBasedRow(io.druid.data.input.MapBasedRow) Test(org.junit.Test)

Aggregations

Sequence (io.druid.java.util.common.guava.Sequence)56 Test (org.junit.Test)35 Interval (org.joda.time.Interval)26 DateTime (org.joda.time.DateTime)16 List (java.util.List)15 Query (io.druid.query.Query)14 Map (java.util.Map)14 QueryRunner (io.druid.query.QueryRunner)13 Result (io.druid.query.Result)12 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)10 MergeSequence (io.druid.java.util.common.guava.MergeSequence)9 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)9 Row (io.druid.data.input.Row)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)7 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)7 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)7 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 ArrayList (java.util.ArrayList)7 MapMaker (com.google.common.collect.MapMaker)6