use of org.apache.druid.query.aggregation.datasketches.hll.HllSketchBuildAggregatorFactory in project druid by druid-io.
the class HllSketchSqlAggregatorTest method testtHllSketchPostAggsPostSort.
@Test
public void testtHllSketchPostAggsPostSort() throws Exception {
final String sketchSummary = "### HLL SKETCH SUMMARY: \n" + " Log Config K : 12\n" + " Hll Target : HLL_4\n" + " Current Mode : LIST\n" + " Memory : false\n" + " LB : 2.0\n" + " Estimate : 2.000000004967054\n" + " UB : 2.000099863468538\n" + " OutOfOrder Flag: false\n" + " Coupon Count : 2\n";
final String sql = "SELECT DS_HLL(dim2) as y FROM druid.foo ORDER BY HLL_SKETCH_ESTIMATE(DS_HLL(dim2)) DESC LIMIT 10";
testQuery(StringUtils.format("SELECT HLL_SKETCH_ESTIMATE(y), HLL_SKETCH_TO_STRING(y) from (%s)", sql), ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).aggregators(ImmutableList.of(new HllSketchBuildAggregatorFactory("a0", "dim2", null, null, true))).postAggregators(ImmutableList.of(new FieldAccessPostAggregator("p0", "a0"), new HllSketchToEstimatePostAggregator("p2", new FieldAccessPostAggregator("p1", "a0"), false), new HllSketchToEstimatePostAggregator("s1", new FieldAccessPostAggregator("s0", "p0"), false), new HllSketchToStringPostAggregator("s3", new FieldAccessPostAggregator("s2", "p0")))).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 2.000000004967054d, sketchSummary }));
}
use of org.apache.druid.query.aggregation.datasketches.hll.HllSketchBuildAggregatorFactory in project druid by druid-io.
the class HllSketchBaseSqlAggregator method toDruidAggregation.
@Nullable
@Override
public Aggregation toDruidAggregation(PlannerContext plannerContext, RowSignature rowSignature, VirtualColumnRegistry virtualColumnRegistry, RexBuilder rexBuilder, String name, AggregateCall aggregateCall, Project project, List<Aggregation> existingAggregations, boolean finalizeAggregations) {
// Don't use Aggregations.getArgumentsForSimpleAggregator, since it won't let us use direct column access
// for string columns.
final RexNode columnRexNode = Expressions.fromFieldAccess(rowSignature, project, aggregateCall.getArgList().get(0));
final DruidExpression columnArg = Expressions.toDruidExpression(plannerContext, rowSignature, columnRexNode);
if (columnArg == null) {
return null;
}
final int logK;
if (aggregateCall.getArgList().size() >= 2) {
final RexNode logKarg = Expressions.fromFieldAccess(rowSignature, project, aggregateCall.getArgList().get(1));
if (!logKarg.isA(SqlKind.LITERAL)) {
// logK must be a literal in order to plan.
return null;
}
logK = ((Number) RexLiteral.value(logKarg)).intValue();
} else {
logK = HllSketchAggregatorFactory.DEFAULT_LG_K;
}
final String tgtHllType;
if (aggregateCall.getArgList().size() >= 3) {
final RexNode tgtHllTypeArg = Expressions.fromFieldAccess(rowSignature, project, aggregateCall.getArgList().get(2));
if (!tgtHllTypeArg.isA(SqlKind.LITERAL)) {
// tgtHllType must be a literal in order to plan.
return null;
}
tgtHllType = RexLiteral.stringValue(tgtHllTypeArg);
} else {
tgtHllType = HllSketchAggregatorFactory.DEFAULT_TGT_HLL_TYPE.name();
}
final AggregatorFactory aggregatorFactory;
final String aggregatorName = finalizeAggregations ? Calcites.makePrefixedName(name, "a") : name;
if (columnArg.isDirectColumnAccess() && rowSignature.getColumnType(columnArg.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) {
aggregatorFactory = new HllSketchMergeAggregatorFactory(aggregatorName, columnArg.getDirectColumn(), logK, tgtHllType, ROUND);
} else {
final RelDataType dataType = columnRexNode.getType();
final ColumnType inputType = Calcites.getColumnTypeForRelDataType(dataType);
if (inputType == null) {
throw new ISE("Cannot translate sqlTypeName[%s] to Druid type for field[%s]", dataType.getSqlTypeName(), aggregatorName);
}
final DimensionSpec dimensionSpec;
if (columnArg.isDirectColumnAccess()) {
dimensionSpec = columnArg.getSimpleExtraction().toDimensionSpec(null, inputType);
} else {
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(columnArg, dataType);
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
}
aggregatorFactory = new HllSketchBuildAggregatorFactory(aggregatorName, dimensionSpec.getDimension(), logK, tgtHllType, ROUND);
}
return toAggregation(name, finalizeAggregations, aggregatorFactory);
}
use of org.apache.druid.query.aggregation.datasketches.hll.HllSketchBuildAggregatorFactory in project druid by druid-io.
the class HllSketchSqlAggregatorTest method testHllSketchPostAggs.
@Test
public void testHllSketchPostAggs() throws Exception {
final String sketchSummary = "### HLL SKETCH SUMMARY: \n" + " Log Config K : 12\n" + " Hll Target : HLL_4\n" + " Current Mode : LIST\n" + " Memory : false\n" + " LB : 2.0\n" + " Estimate : 2.000000004967054\n" + " UB : 2.000099863468538\n" + " OutOfOrder Flag: false\n" + " Coupon Count : 2\n";
final String otherSketchSummary = "### HLL SKETCH SUMMARY: \n" + " LOG CONFIG K : 12\n" + " HLL TARGET : HLL_4\n" + " CURRENT MODE : LIST\n" + " MEMORY : FALSE\n" + " LB : 2.0\n" + " ESTIMATE : 2.000000004967054\n" + " UB : 2.000099863468538\n" + " OUTOFORDER FLAG: FALSE\n" + " COUPON COUNT : 2\n";
testQuery("SELECT\n" + " DS_HLL(dim2),\n" + " DS_HLL(m1),\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(dim2)),\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(dim2)) + 1,\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(CONCAT(dim2, 'hello'))),\n" + " ABS(HLL_SKETCH_ESTIMATE(DS_HLL(dim2))),\n" + " HLL_SKETCH_ESTIMATE_WITH_ERROR_BOUNDS(DS_HLL(dim2), 2),\n" + " HLL_SKETCH_ESTIMATE_WITH_ERROR_BOUNDS(DS_HLL(dim2)),\n" + " DS_HLL(POWER(ABS(m1 + 100), 2)),\n" + " APPROX_COUNT_DISTINCT_DS_HLL(dim2),\n" + " HLL_SKETCH_TO_STRING(DS_HLL(dim2)),\n" + " UPPER(HLL_SKETCH_TO_STRING(DS_HLL(dim2))),\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(dim2), true)\n" + "FROM druid.foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).virtualColumns(new ExpressionVirtualColumn("v0", "concat(\"dim2\",'hello')", ColumnType.STRING, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("v1", "pow(abs((\"m1\" + 100)),2)", ColumnType.DOUBLE, TestExprMacroTable.INSTANCE)).aggregators(ImmutableList.of(new HllSketchBuildAggregatorFactory("a0", "dim2", null, null, true), new HllSketchBuildAggregatorFactory("a1", "m1", null, null, true), new HllSketchBuildAggregatorFactory("a2", "v0", null, null, true), new HllSketchBuildAggregatorFactory("a3", "v1", null, null, true), new HllSketchBuildAggregatorFactory("a4", "dim2", null, null, true))).postAggregators(ImmutableList.of(new FieldAccessPostAggregator("p0", "a0"), new FieldAccessPostAggregator("p1", "a1"), new HllSketchToEstimatePostAggregator("p3", new FieldAccessPostAggregator("p2", "a0"), false), new HllSketchToEstimatePostAggregator("p5", new FieldAccessPostAggregator("p4", "a0"), false), new ExpressionPostAggregator("p6", "(\"p5\" + 1)", null, TestExprMacroTable.INSTANCE), new HllSketchToEstimatePostAggregator("p8", new FieldAccessPostAggregator("p7", "a2"), false), new HllSketchToEstimatePostAggregator("p10", new FieldAccessPostAggregator("p9", "a0"), false), new ExpressionPostAggregator("p11", "abs(\"p10\")", null, TestExprMacroTable.INSTANCE), new HllSketchToEstimateWithBoundsPostAggregator("p13", new FieldAccessPostAggregator("p12", "a0"), 2), new HllSketchToEstimateWithBoundsPostAggregator("p15", new FieldAccessPostAggregator("p14", "a0"), 1), new FieldAccessPostAggregator("p16", "a3"), new HllSketchToStringPostAggregator("p18", new FieldAccessPostAggregator("p17", "a0")), new HllSketchToStringPostAggregator("p20", new FieldAccessPostAggregator("p19", "a0")), new ExpressionPostAggregator("p21", "upper(\"p20\")", null, TestExprMacroTable.INSTANCE), new HllSketchToEstimatePostAggregator("p23", new FieldAccessPostAggregator("p22", "a0"), true))).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "\"AgEHDAMIAgDhUv8P63iABQ==\"", "\"AgEHDAMIBgALpZ0PjpTfBY5ElQo+C7UE4jA+DKfcYQQ=\"", 2.000000004967054d, 3.000000004967054d, 3.000000014901161d, 2.000000004967054d, "[2.000000004967054,2.0,2.0001997319422404]", "[2.000000004967054,2.0,2.000099863468538]", "\"AgEHDAMIBgC1EYgH1mlHBwsKPwu5SK8MIiUxB7iZVwU=\"", 2L, sketchSummary, otherSketchSummary, 2.0 }));
}
use of org.apache.druid.query.aggregation.datasketches.hll.HllSketchBuildAggregatorFactory in project druid by druid-io.
the class HllSketchSqlAggregatorTest method testApproxCountDistinctHllSketch.
@Test
public void testApproxCountDistinctHllSketch() throws Exception {
// Can't vectorize due to SUBSTRING expression.
cannotVectorize();
final String sql = "SELECT\n" + " SUM(cnt),\n" + // uppercase
" APPROX_COUNT_DISTINCT_DS_HLL(dim2),\n" + // lowercase; also, filtered
" APPROX_COUNT_DISTINCT_DS_HLL(dim2) FILTER(WHERE dim2 <> ''),\n" + // on extractionFn, using generic A.C.D.
" APPROX_COUNT_DISTINCT(SUBSTRING(dim2, 1, 1)),\n" + // on expression, using COUNT DISTINCT
" COUNT(DISTINCT SUBSTRING(dim2, 1, 1) || 'x'),\n" + // on native HllSketch column
" APPROX_COUNT_DISTINCT_DS_HLL(hllsketch_dim1, 21, 'HLL_8'),\n" + // on native HllSketch column
" APPROX_COUNT_DISTINCT_DS_HLL(hllsketch_dim1)\n" + "FROM druid.foo";
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
expectedResults = ImmutableList.of(new Object[] { 6L, 2L, 2L, 1L, 2L, 5L, 5L });
} else {
expectedResults = ImmutableList.of(new Object[] { 6L, 2L, 2L, 1L, 1L, 5L, 5L });
}
testQuery(sql, ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).virtualColumns(new ExpressionVirtualColumn("v0", "substring(\"dim2\", 0, 1)", ColumnType.STRING, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("v1", "concat(substring(\"dim2\", 0, 1),'x')", ColumnType.STRING, TestExprMacroTable.INSTANCE)).aggregators(ImmutableList.of(new LongSumAggregatorFactory("a0", "cnt"), new HllSketchBuildAggregatorFactory("a1", "dim2", null, null, ROUND), new FilteredAggregatorFactory(new HllSketchBuildAggregatorFactory("a2", "dim2", null, null, ROUND), BaseCalciteQueryTest.not(BaseCalciteQueryTest.selector("dim2", "", null))), new HllSketchBuildAggregatorFactory("a3", "v0", null, null, ROUND), new HllSketchBuildAggregatorFactory("a4", "v1", null, null, ROUND), new HllSketchMergeAggregatorFactory("a5", "hllsketch_dim1", 21, "HLL_8", ROUND), new HllSketchMergeAggregatorFactory("a6", "hllsketch_dim1", null, null, ROUND))).context(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
use of org.apache.druid.query.aggregation.datasketches.hll.HllSketchBuildAggregatorFactory in project druid by druid-io.
the class HllSketchSqlAggregatorTest method testAvgDailyCountDistinctHllSketch.
@Test
public void testAvgDailyCountDistinctHllSketch() throws Exception {
// Can't vectorize due to outer query, which runs on an inline datasource.
cannotVectorize();
final List<Object[]> expectedResults = ImmutableList.of(new Object[] { 1L });
testQuery("SELECT\n" + " AVG(u)\n" + "FROM (" + " SELECT FLOOR(__time TO DAY), APPROX_COUNT_DISTINCT_DS_HLL(cnt) AS u\n" + " FROM druid.foo\n" + " GROUP BY 1\n" + ")", ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(new PeriodGranularity(Period.days(1), null, DateTimeZone.UTC)).aggregators(Collections.singletonList(new HllSketchBuildAggregatorFactory("a0:a", "cnt", null, null, ROUND))).postAggregators(ImmutableList.of(new FinalizingFieldAccessPostAggregator("a0", "a0:a"))).context(QUERY_CONTEXT_DEFAULT).build().withOverriddenContext(BaseCalciteQueryTest.getTimeseriesContextWithFloorTime(ImmutableMap.of(TimeseriesQuery.SKIP_EMPTY_BUCKETS, true, BaseQuery.SQL_QUERY_ID, "dummy"), "d0")))).setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).setGranularity(Granularities.ALL).setAggregatorSpecs(NullHandling.replaceWithDefault() ? Arrays.asList(new LongSumAggregatorFactory("_a0:sum", "a0"), new CountAggregatorFactory("_a0:count")) : Arrays.asList(new LongSumAggregatorFactory("_a0:sum", "a0"), new FilteredAggregatorFactory(new CountAggregatorFactory("_a0:count"), BaseCalciteQueryTest.not(BaseCalciteQueryTest.selector("a0", null, null))))).setPostAggregatorSpecs(ImmutableList.of(new ArithmeticPostAggregator("_a0", "quotient", ImmutableList.of(new FieldAccessPostAggregator(null, "_a0:sum"), new FieldAccessPostAggregator(null, "_a0:count"))))).setContext(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
Aggregations