use of org.apache.druid.query.aggregation.FloatSumAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggAndExpressionFilteredAgg.
@Test
public void testTimeSeriesWithFilteredAggAndExpressionFilteredAgg() {
// can't vectorize if expression
cannotVectorize();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Lists.newArrayList(Iterables.concat(aggregatorFactoryList, ImmutableList.of(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)), new LongSumAggregatorFactory("altLongCount", null, "if (market == 'spot', 1, 0)", TestExprMacroTable.INSTANCE), new DoubleSumAggregatorFactory("altDoubleCount", null, "if (market == 'spot', 1, 0)", TestExprMacroTable.INSTANCE), new FloatSumAggregatorFactory("altFloatCount", null, "if (market == 'spot', 1, 0)", TestExprMacroTable.INSTANCE))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("filteredAgg", 18L).put("addRowsIndexConstant", 12486.361190795898d).put("index", 12459.361190795898d).put("uniques", 9.019833517963864d).put("rows", 26L).put("altLongCount", 18L).put("altDoubleCount", 18.0).put("altFloatCount", 18.0f).build())));
assertExpectedResults(expectedResults, actualResults);
}
use of org.apache.druid.query.aggregation.FloatSumAggregatorFactory in project druid by druid-io.
the class QueryableIndexColumnCapabilitiesTest method setup.
@BeforeClass
public static void setup() throws IOException {
MapInputRowParser parser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>builder().addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1", "d2"))).add(new DoubleDimensionSchema("d3")).add(new FloatDimensionSchema("d4")).add(new LongDimensionSchema("d5")).build())));
AggregatorFactory[] metricsSpecs = new AggregatorFactory[] { new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "d3"), new FloatSumAggregatorFactory("m2", "d4"), new LongSumAggregatorFactory("m3", "d5"), new HyperUniquesAggregatorFactory("m4", "d1") };
List<InputRow> rows = new ArrayList<>();
Map<String, Object> event = ImmutableMap.<String, Object>builder().put("time", DateTimes.nowUtc().getMillis()).put("d1", "some string").put("d2", ImmutableList.of("some", "list")).put("d3", 1.234).put("d4", 1.234f).put("d5", 10L).build();
rows.add(Iterables.getOnlyElement(parser.parseBatch(event)));
IndexBuilder builder = IndexBuilder.create().rows(rows).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
INC_INDEX = builder.buildIncrementalIndex();
MMAP_INDEX = builder.buildMMappedIndex();
List<InputRow> rowsWithNulls = new ArrayList<>();
rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(event)));
Map<String, Object> eventWithNulls = new HashMap<>();
eventWithNulls.put("time", DateTimes.nowUtc().getMillis());
eventWithNulls.put("d1", null);
eventWithNulls.put("d2", ImmutableList.of());
eventWithNulls.put("d3", null);
eventWithNulls.put("d4", null);
eventWithNulls.put("d5", null);
rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(eventWithNulls)));
IndexBuilder builderWithNulls = IndexBuilder.create().rows(rowsWithNulls).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
INC_INDEX_WITH_NULLS = builderWithNulls.buildIncrementalIndex();
MMAP_INDEX_WITH_NULLS = builderWithNulls.buildMMappedIndex();
}
use of org.apache.druid.query.aggregation.FloatSumAggregatorFactory in project druid by druid-io.
the class SchemaEvolutionTest method testNumericEvolutionFiltering.
@Test
@Parameters(method = "doVectorize")
public void testNumericEvolutionFiltering(boolean doVectorize) {
final TimeseriesQueryRunnerFactory factory = QueryRunnerTestHelper.newTimeseriesQueryRunnerFactory();
// "c1" changes from string(1) -> long(2) -> float(3) -> nonexistent(4)
// test behavior of filtering
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals("1000/3000").filters(new BoundDimFilter("c1", "9", "11", false, false, null, null, StringComparators.NUMERIC)).aggregators(ImmutableList.of(new LongSumAggregatorFactory("a", "c1"), new DoubleSumAggregatorFactory("b", "c1"), new FloatSumAggregatorFactory("d", "c1"), new LongMinAggregatorFactory("e", "c1"), new CountAggregatorFactory("c"))).context(ImmutableMap.of(QueryContexts.VECTORIZE_KEY, doVectorize)).build();
// Only string(1) -- which we can filter but not aggregate
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 19L, "b", 19.1, "c", 2L, "d", 19.1f, "e", 9L)), runQuery(query, factory, ImmutableList.of(index1)));
// Only long(2) -- which we can filter and aggregate
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 19L, "b", 19.0, "c", 2L, "d", 19.0f, "e", 9L)), runQuery(query, factory, ImmutableList.of(index2)));
// Only float(3) -- which we can't filter, but can aggregate
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 19L, "b", 19.1, "c", 2L, "d", 19.1f, "e", 9L)), runQuery(query, factory, ImmutableList.of(index3)));
// Only nonexistent(4)
Assert.assertEquals(timeseriesResult(TestHelper.createExpectedMap("a", NullHandling.defaultLongValue(), "b", NullHandling.defaultDoubleValue(), "c", 0L, "d", NullHandling.defaultFloatValue(), "e", NullHandling.sqlCompatible() ? null : Long.MAX_VALUE)), runQuery(query, factory, ImmutableList.of(index4)));
// string(1) + long(2) + float(3) + nonexistent(4)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 57L, "b", 57.2, "c", 6L, "d", 57.20000076293945, "e", 9L)), runQuery(query, factory, ImmutableList.of(index1, index2, index3, index4)));
}
use of org.apache.druid.query.aggregation.FloatSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method testResultLevelCacheKeyWithSubTotalsSpec.
@Test
public void testResultLevelCacheKeyWithSubTotalsSpec() {
final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index"))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("alias"), ImmutableList.of("market"), ImmutableList.of())).build();
final GroupByQuery query2 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index"))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("alias"), ImmutableList.of())).build();
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy1 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query1);
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy2 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query2);
Assert.assertTrue(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(strategy1.computeResultLevelCacheKey(query1), strategy2.computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.aggregation.FloatSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithSubtotalsSpecWithRenamedDimensionAndFilter.
// https://github.com/apache/druid/issues/7820
@Test
public void testGroupByWithSubtotalsSpecWithRenamedDimensionAndFilter() {
if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
return;
}
GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setVirtualColumns(new ExpressionVirtualColumn("alias", "quality", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"), new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("alias", "alias_renamed"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index"))).setDimFilter(new SelectorDimFilter("alias", "automotive", null)).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("alias_renamed"), ImmutableList.of())).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias_renamed", "automotive", "rows", 1L, "idx", 135L, "idxFloat", 135.88510131835938f, "idxDouble", 135.88510131835938d), makeRow(query, "2011-04-02", "alias_renamed", "automotive", "rows", 1L, "idx", 147L, "idxFloat", 147.42593f, "idxDouble", 147.42593d), makeRow(query, "2011-04-01T00:00:00.000Z", "rows", 1L, "idx", 135L, "idxFloat", 135.88510131835938f, "idxDouble", 135.88510131835938d), makeRow(query, "2011-04-02T00:00:00.000Z", "rows", 1L, "idx", 147L, "idxFloat", 147.42593f, "idxDouble", 147.42593d));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "subtotal");
}
Aggregations