use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class UnionQueryRunnerTest method testUnionQueryRunner.
@Test
public void testUnionQueryRunner() {
QueryRunner baseRunner = new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
// verify that table datasource is passed to baseQueryRunner
Assert.assertTrue(query.getDataSource() instanceof TableDataSource);
String dsName = Iterables.getOnlyElement(query.getDataSource().getNames());
if (dsName.equals("ds1")) {
responseContext.put("ds1", "ds1");
return Sequences.simple(Arrays.asList(1, 2, 3));
} else if (dsName.equals("ds2")) {
responseContext.put("ds2", "ds2");
return Sequences.simple(Arrays.asList(4, 5, 6));
} else {
throw new AssertionError("Unexpected DataSource");
}
}
};
UnionQueryRunner runner = new UnionQueryRunner(baseRunner);
// Make a dummy query with Union datasource
Query q = Druids.newTimeseriesQueryBuilder().dataSource(new UnionDataSource(Arrays.asList(new TableDataSource("ds1"), new TableDataSource("ds2")))).intervals("2014-01-01T00:00:00Z/2015-01-01T00:00:00Z").aggregators(QueryRunnerTestHelper.commonAggregators).build();
Map<String, Object> responseContext = Maps.newHashMap();
Sequence result = runner.run(q, responseContext);
List res = Sequences.toList(result, Lists.newArrayList());
Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6), res);
// verify response context
Assert.assertEquals(2, responseContext.size());
Assert.assertEquals("ds1", responseContext.get("ds1"));
Assert.assertEquals("ds2", responseContext.get("ds2"));
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class HyperUniquesAggregationTest method testIngestAndQuery.
@Test
public void testIngestAndQuery() throws Exception {
AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"market\"" + "}]";
String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + " \"format\" : \"tsv\"," + " \"timestampSpec\" : {" + " \"column\" : \"timestamp\"," + " \"format\" : \"auto\"" + "}," + " \"dimensionsSpec\" : {" + " \"dimensions\": []," + " \"dimensionExclusions\" : []," + " \"spatialDimensions\" : []" + " }," + " \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + " }" + "}";
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + " { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class HyperUniquesAggregationTest method testIngestAndQueryPrecomputedHll.
@Test
public void testIngestAndQueryPrecomputedHll() throws Exception {
AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"preComputedHll\"," + "\"isInputHyperUnique\": true" + "}]";
String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + " \"format\" : \"tsv\"," + " \"timestampSpec\" : {" + " \"column\" : \"timestamp\"," + " \"format\" : \"auto\"" + "}," + " \"dimensionsSpec\" : {" + " \"dimensions\": []," + " \"dimensionExclusions\" : []," + " \"spatialDimensions\" : []" + " }," + " \"columns\": [\"timestamp\", \"market\", \"preComputedHll\"]" + " }" + "}";
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + " { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.hll.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.DAY, 50000, query);
MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
Assert.assertEquals(4.0, row.getFloatMetric("index_hll"), 0.1);
Assert.assertEquals(4.0, row.getFloatMetric("index_unique_count"), 0.1);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class GroupByQueryRunnerFactoryTest method testMergeRunnersEnsureGroupMerging.
@Test
public void testMergeRunnersEnsureGroupMerging() throws Exception {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("tags", "tags"))).setAggregatorSpecs(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).build();
final QueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(new GroupByQueryConfig());
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return factory.getToolchest().mergeResults(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
try {
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(factory.createRunner(createSegment()).run(query, responseContext), factory.createRunner(createSegment()).run(query, responseContext))));
} catch (Exception e) {
Throwables.propagate(e);
return null;
}
}
}).run(query, responseContext);
}
});
Sequence<Row> result = mergedRunner.run(query, Maps.newHashMap());
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t2", "count", 4L));
TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class DefaultLimitSpecTest method testBuildWithExplicitOrder.
@Test
public void testBuildWithExplicitOrder() {
DefaultLimitSpec limitSpec = new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("k1", OrderByColumnSpec.Direction.ASCENDING)), 2);
Function<Sequence<Row>, Sequence<Row>> limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("k3", 1L)));
Assert.assertEquals(ImmutableList.of(testRowsList.get(0), testRowsList.get(1)), Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
// if there is an aggregator with same name then that is used to build ordering
limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k1", "k1")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("k3", 1L)));
Assert.assertEquals(ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
// if there is a post-aggregator with same name then that is used to build ordering
limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ArithmeticPostAggregator("k1", "+", ImmutableList.<PostAggregator>of(new ConstantPostAggregator("x", 1), new ConstantPostAggregator("y", 1)))));
Assert.assertEquals((List) ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List) Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
// makes same result
limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ExpressionPostAggregator("k1", "1 + 1")));
Assert.assertEquals((List) ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List) Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
}
Aggregations