use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class DataSchemaTest method testEmptyDatasource.
@Test
public void testEmptyDatasource() {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage("dataSource cannot be null or empty. Please provide a dataSource.");
DataSchema schema = new DataSchema("", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class DataSchemaTest method testExplicitInclude.
@Test
public void testExplicitInclude() {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
Assert.assertEquals(ImmutableSet.of("__time", "dimC", "col1", "metric1", "metric2"), schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions());
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class DataSchemaTest method testTransformSpec.
@Test
public void testTransformSpec() {
Map<String, Object> parserMap = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parserMap, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), new TransformSpec(new SelectorDimFilter("dimA", "foo", null), ImmutableList.of(new ExpressionTransform("expr", "concat(dimA,dimA)", TestExprMacroTable.INSTANCE))), jsonMapper);
// Test hack that produces a StringInputRowParser.
final StringInputRowParser parser = (StringInputRowParser) schema.getParser();
final InputRow row1bb = parser.parseBatch(ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}".getBytes(StandardCharsets.UTF_8))).get(0);
Assert.assertEquals(DateTimes.of("2000-01-01"), row1bb.getTimestamp());
Assert.assertEquals("foo", row1bb.getRaw("dimA"));
Assert.assertEquals("foofoo", row1bb.getRaw("expr"));
final InputRow row1string = parser.parse("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}");
Assert.assertEquals(DateTimes.of("2000-01-01"), row1string.getTimestamp());
Assert.assertEquals("foo", row1string.getRaw("dimA"));
Assert.assertEquals("foofoo", row1string.getRaw("expr"));
final InputRow row2 = parser.parseBatch(ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"x\"}".getBytes(StandardCharsets.UTF_8))).get(0);
Assert.assertNull(row2);
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class DataSchemaTest method testDuplicateAggregators.
@Test
public void testDuplicateAggregators() {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Cannot specify a column more than once: [metric1] seen in metricsSpec list (2 occurrences); " + "[metric3] seen in metricsSpec list (2 occurrences)");
DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3"), new DoubleSumAggregatorFactory("metric3", "col4"), new DoubleSumAggregatorFactory("metric3", "col5") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class CalciteJoinQueryTest method testJoinWithLimitBeforeJoining.
@Test
public void testJoinWithLimitBeforeJoining() throws Exception {
// Cannot vectorize JOIN operator.
cannotVectorize();
testQuery("SELECT t1.dim2, AVG(t1.m2) FROM (SELECT * FROM foo LIMIT 10) AS t1 INNER JOIN foo AS t2 ON t1.m1 = t2.m1 GROUP BY t1.dim2", ImmutableList.of(GroupByQuery.builder().setDataSource(join(new QueryDataSource(newScanQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).columns("dim2", "m1", "m2").context(QUERY_CONTEXT_DEFAULT).limit(10).build()), new QueryDataSource(newScanQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).columns(ImmutableList.of("m1")).resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).context(QUERY_CONTEXT_DEFAULT).build()), "j0.", equalsCondition(DruidExpression.ofColumn(ColumnType.FLOAT, "m1"), DruidExpression.ofColumn(ColumnType.FLOAT, "j0.m1")), JoinType.INNER)).setInterval(querySegmentSpec(Filtration.eternity())).setDimensions(new DefaultDimensionSpec("dim2", "d0", ColumnType.STRING)).setGranularity(Granularities.ALL).setAggregatorSpecs(useDefault ? aggregators(new DoubleSumAggregatorFactory("a0:sum", "m2"), new CountAggregatorFactory("a0:count")) : aggregators(new DoubleSumAggregatorFactory("a0:sum", "m2"), new FilteredAggregatorFactory(new CountAggregatorFactory("a0:count"), not(selector("m2", null, null))))).setPostAggregatorSpecs(ImmutableList.of(new ArithmeticPostAggregator("a0", "quotient", ImmutableList.of(new FieldAccessPostAggregator(null, "a0:sum"), new FieldAccessPostAggregator(null, "a0:count"))))).setContext(QUERY_CONTEXT_DEFAULT).build()), NullHandling.sqlCompatible() ? ImmutableList.of(new Object[] { null, 4.0 }, new Object[] { "", 3.0 }, new Object[] { "a", 2.5 }, new Object[] { "abc", 5.0 }) : ImmutableList.of(new Object[] { "", 3.6666666666666665 }, new Object[] { "a", 2.5 }, new Object[] { "abc", 5.0 }));
}
Aggregations