use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class ArrayOfDoublesSketchToNumEntriesPostAggregatorTest method testResultArraySignature.
@Test
public void testResultArraySignature() {
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2000/3000").granularity(Granularities.HOUR).aggregators(new CountAggregatorFactory("count")).postAggregators(new ArrayOfDoublesSketchToNumEntriesPostAggregator("a", new ConstantPostAggregator("", 1))).build();
Assert.assertEquals(RowSignature.builder().addTimeColumn().add("count", ColumnType.LONG).add("a", ColumnType.LONG).build(), new TimeseriesQueryQueryToolChest().resultArraySignature(query));
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class ArrayOfDoublesSketchToNumEntriesPostAggregatorTest method testSerde.
@Test
public void testSerde() throws JsonProcessingException {
final PostAggregator there = new ArrayOfDoublesSketchToNumEntriesPostAggregator("a", new ConstantPostAggregator("", 0));
DefaultObjectMapper mapper = new DefaultObjectMapper();
ArrayOfDoublesSketchToNumEntriesPostAggregator andBackAgain = mapper.readValue(mapper.writeValueAsString(there), ArrayOfDoublesSketchToNumEntriesPostAggregator.class);
Assert.assertEquals(there, andBackAgain);
Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey());
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class ArrayOfDoublesSketchToNumEntriesPostAggregatorTest method testToString.
@Test
public void testToString() {
PostAggregator postAgg = new ArrayOfDoublesSketchToNumEntriesPostAggregator("a", new ConstantPostAggregator("", 0));
Assert.assertEquals("ArrayOfDoublesSketchToNumEntriesPostAggregator{name='a', field=ConstantPostAggregator{name='', constantValue=0}}", postAgg.toString());
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method testMultiColumnCacheStrategy.
@Test
public void testMultiColumnCacheStrategy() throws Exception {
final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(ImmutableList.of(new DefaultDimensionSpec("test", "test", ColumnType.STRING), new DefaultDimensionSpec("v0", "v0", ColumnType.STRING))).setVirtualColumns(new ExpressionVirtualColumn("v0", "concat('foo', test)", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, getComplexAggregatorFactoryForValueType(ValueType.STRING))).setPostAggregatorSpecs(ImmutableList.of(new ConstantPostAggregator("post", 10))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
CacheStrategy<ResultRow, Object, GroupByQuery> strategy = new GroupByQueryQueryToolChest(null, null).getCacheStrategy(query1);
// test timestamps that result in integer size millis
final ResultRow result1 = ResultRow.of(123L, "val1", "fooval1", 1, getIntermediateComplexValue(ValueType.STRING, "val1"));
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
ObjectMapper objectMapper = TestHelper.makeJsonMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
ResultRow fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result1, fromCacheResult);
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method doTestCacheStrategy.
private void doTestCacheStrategy(final ColumnType valueType, final Object dimValue) throws IOException {
final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(Collections.singletonList(new DefaultDimensionSpec("test", "test", valueType))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, getComplexAggregatorFactoryForValueType(valueType.getType()))).setPostAggregatorSpecs(ImmutableList.of(new ConstantPostAggregator("post", 10))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
CacheStrategy<ResultRow, Object, GroupByQuery> strategy = new GroupByQueryQueryToolChest(null, null).getCacheStrategy(query1);
// test timestamps that result in integer size millis
final ResultRow result1 = ResultRow.of(123L, dimValue, 1, getIntermediateComplexValue(valueType.getType(), dimValue));
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
ObjectMapper objectMapper = TestHelper.makeJsonMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
ResultRow fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result1, fromCacheResult);
// test timestamps that result in integer size millis
final ResultRow result2 = ResultRow.of(123L, dimValue, 1, dimValue, 10);
// Please see the comments on aggregator serde and type handling in CacheStrategy.fetchAggregatorsFromCache()
final ResultRow typeAdjustedResult2;
if (valueType.is(ValueType.FLOAT)) {
typeAdjustedResult2 = ResultRow.of(123L, dimValue, 1, 2.1d, 10);
} else if (valueType.is(ValueType.LONG)) {
typeAdjustedResult2 = ResultRow.of(123L, dimValue, 1, 2, 10);
} else {
typeAdjustedResult2 = result2;
}
Object preparedResultCacheValue = strategy.prepareForCache(true).apply(result2);
Object fromResultCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultCacheValue), strategy.getCacheObjectClazz());
ResultRow fromResultCacheResult = strategy.pullFromCache(true).apply(fromResultCacheValue);
Assert.assertEquals(typeAdjustedResult2, fromResultCacheResult);
}
Aggregations