use of io.druid.query.topn.DimensionAndMetricValueExtractor in project druid by druid-io.
the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndTopNQuery.
@Test
public void testSimpleDataIngestAndTopNQuery() throws Exception {
AggregationTestHelper topNQueryAggregationTestHelper = AggregationTestHelper.createTopNQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
Sequence seq = topNQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("topn_query.json"));
Result<TopNResultValue> result = (Result<TopNResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
DimensionAndMetricValueExtractor value = Iterables.getOnlyElement(result.getValue().getValue());
Assert.assertEquals(38.0, value.getDoubleMetric("sketch_count"), 0.01);
Assert.assertEquals(38.0, value.getDoubleMetric("sketchEstimatePostAgg"), 0.01);
Assert.assertEquals(38.0, value.getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
Assert.assertEquals(38.0, value.getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
Assert.assertEquals(0.0, value.getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
Assert.assertEquals(0.0, value.getDoubleMetric("non_existing_col_validation"), 0.01);
Assert.assertEquals("product_3", value.getDimensionValue("product"));
}
use of io.druid.query.topn.DimensionAndMetricValueExtractor in project druid by druid-io.
the class SchemalessTestSimpleTest method testFullOnTopN.
// @Test TODO: Handling of null values is inconsistent right now, need to make it all consistent and re-enable test
// TODO: Complain to Eric when you see this. It shouldn't be like this...
public void testFullOnTopN() {
TopNQuery query = new TopNQueryBuilder().dataSource(dataSource).granularity(allGran).dimension(marketDimension).metric(indexMetric).threshold(3).intervals(fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<DimensionAndMetricValueExtractor>asList(new DimensionAndMetricValueExtractor(ImmutableMap.<String, Object>builder().put("market", "spot").put("rows", 4L).put("index", 400.0D).put("addRowsIndexConstant", 405.0D).put("uniques", 1.0002442201269182D).put("maxIndex", 100.0).put("minIndex", 100.0).build()), new DimensionAndMetricValueExtractor(ImmutableMap.<String, Object>builder().put("market", "").put("rows", 2L).put("index", 200.0D).put("addRowsIndexConstant", 203.0D).put("uniques", 0.0).put("maxIndex", 100.0D).put("minIndex", 100.0D).build()), new DimensionAndMetricValueExtractor(ImmutableMap.<String, Object>builder().put("market", "total_market").put("rows", 2L).put("index", 200.0D).put("addRowsIndexConstant", 203.0D).put("uniques", 1.0002442201269182D).put("maxIndex", 100.0D).put("minIndex", 100.0D).build())))));
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
use of io.druid.query.topn.DimensionAndMetricValueExtractor in project druid by druid-io.
the class QueryMaker method executeTopN.
private Sequence<Object[]> executeTopN(final DruidQueryBuilder queryBuilder, final TopNQuery query) {
final List<RelDataTypeField> fieldList = queryBuilder.getRowType().getFieldList();
Hook.QUERY_PLAN.run(query);
return Sequences.concat(Sequences.map(query.run(walker, Maps.<String, Object>newHashMap()), new Function<Result<TopNResultValue>, Sequence<Object[]>>() {
@Override
public Sequence<Object[]> apply(final Result<TopNResultValue> result) {
final List<DimensionAndMetricValueExtractor> rows = result.getValue().getValue();
final List<Object[]> retVals = new ArrayList<>(rows.size());
for (DimensionAndMetricValueExtractor row : rows) {
final Object[] retVal = new Object[fieldList.size()];
for (final RelDataTypeField field : fieldList) {
final String outputName = queryBuilder.getRowOrder().get(field.getIndex());
retVal[field.getIndex()] = coerce(row.getMetric(outputName), field.getType().getSqlTypeName());
}
retVals.add(retVal);
}
return Sequences.simple(retVals);
}
}));
}
Aggregations