use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class SearchBenchmark method basicC.
private static SearchQueryBuilder basicC(final GeneratorSchemaInfo basicSchema) {
final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
final List<String> dimUniformFilterVals = new ArrayList<>();
final int resultNum = (int) (100000 * 0.1);
final int step = 100000 / resultNum;
for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) {
dimUniformFilterVals.add(String.valueOf(i));
}
final String dimName = "dimUniform";
final List<DimFilter> dimFilters = new ArrayList<>();
dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, IdentityExtractionFn.getInstance()));
dimFilters.add(new SelectorDimFilter(dimName, "3", StrlenExtractionFn.instance()));
dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, new DimExtractionFn() {
@Override
public byte[] getCacheKey() {
return new byte[] { 0xF };
}
@Override
public String apply(String value) {
return String.valueOf(Long.parseLong(value) + 1);
}
@Override
public boolean preservesOrdering() {
return false;
}
@Override
public ExtractionType getExtractionType() {
return ExtractionType.ONE_TO_ONE;
}
}, null));
dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, new LowerExtractionFn(null)));
dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, new UpperExtractionFn(null)));
dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, new SubstringDimExtractionFn(1, 3)));
return Druids.newSearchQueryBuilder().dataSource("blah").granularity(Granularities.ALL).intervals(intervalSpec).query("").dimensions(Collections.singletonList("dimUniform")).filters(new AndDimFilter(dimFilters));
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class DoublesSketchSqlAggregatorTest method testQuantileOnInnerQuery.
@Test
public void testQuantileOnInnerQuery() throws Exception {
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
expectedResults = ImmutableList.of(new Object[] { 7.0, 11.0 });
} else {
expectedResults = ImmutableList.of(new Object[] { 5.25, 8.0 });
}
testQuery("SELECT AVG(x), APPROX_QUANTILE_DS(x, 0.98)\n" + "FROM (SELECT dim2, SUM(m1) AS x FROM foo GROUP BY dim2)", Collections.singletonList(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("dim2", "d0")).setAggregatorSpecs(ImmutableList.of(new DoubleSumAggregatorFactory("a0", "m1"))).setContext(QUERY_CONTEXT_DEFAULT).build())).setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).setGranularity(Granularities.ALL).setAggregatorSpecs(new DoubleSumAggregatorFactory("_a0:sum", "a0"), new CountAggregatorFactory("_a0:count"), new DoublesSketchAggregatorFactory("_a1:agg", "a0", null)).setPostAggregatorSpecs(ImmutableList.of(new ArithmeticPostAggregator("_a0", "quotient", ImmutableList.of(new FieldAccessPostAggregator(null, "_a0:sum"), new FieldAccessPostAggregator(null, "_a0:count"))), new DoublesSketchToQuantilePostAggregator("_a1", makeFieldAccessPostAgg("_a1:agg"), 0.98f))).setContext(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class DoublesSketchSqlAggregatorTest method testQuantileOnInnerQuantileQuery.
@Test
public void testQuantileOnInnerQuantileQuery() throws Exception {
ImmutableList.Builder<Object[]> builder = ImmutableList.builder();
builder.add(new Object[] { "", 1.0 });
builder.add(new Object[] { "1", 4.0 });
builder.add(new Object[] { "10.1", 2.0 });
builder.add(new Object[] { "2", 3.0 });
builder.add(new Object[] { "abc", 6.0 });
builder.add(new Object[] { "def", 5.0 });
final List<Object[]> expectedResults = builder.build();
testQuery("SELECT dim1, APPROX_QUANTILE_DS(x, 0.5)\n" + "FROM (SELECT dim1, dim2, APPROX_QUANTILE_DS(m1, 0.5) AS x FROM foo GROUP BY dim1, dim2) GROUP BY dim1", Collections.singletonList(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1")).setAggregatorSpecs(ImmutableList.of(new DoublesSketchAggregatorFactory("a0:agg", "m1", 128))).setPostAggregatorSpecs(ImmutableList.of(new DoublesSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.5f))).setContext(QUERY_CONTEXT_DEFAULT).build())).setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("d0", "_d0", ColumnType.STRING)).setAggregatorSpecs(new DoublesSketchAggregatorFactory("_a0:agg", "a0", 128)).setPostAggregatorSpecs(ImmutableList.of(new DoublesSketchToQuantilePostAggregator("_a0", makeFieldAccessPostAgg("_a0:agg"), 0.5f))).setContext(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class DoublesSketchSqlAggregatorTest method testFailWithSmallMaxStreamLength.
@Test
public void testFailWithSmallMaxStreamLength() throws Exception {
final Map<String, Object> context = new HashMap<>(QUERY_CONTEXT_DEFAULT);
context.put(DoublesSketchApproxQuantileSqlAggregator.CTX_APPROX_QUANTILE_DS_MAX_STREAM_LENGTH, 1);
testQueryThrows("SELECT\n" + "APPROX_QUANTILE_DS(m1, 0.01),\n" + "APPROX_QUANTILE_DS(cnt, 0.5)\n" + "FROM foo", context, Collections.singletonList(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).aggregators(ImmutableList.of(new DoublesSketchAggregatorFactory("a0:agg", "m1", null, 1L), new DoublesSketchAggregatorFactory("a1:agg", "cnt", null, 1L))).postAggregators(new DoublesSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.01f), new DoublesSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.50f)).context(context).build()), expectedException -> {
expectedException.expect(IllegalStateException.class);
expectedException.expectMessage("NullPointerException was thrown while updating Doubles sketch");
});
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class HllSketchSqlAggregatorTest method testHllSketchPostAggs.
@Test
public void testHllSketchPostAggs() throws Exception {
final String sketchSummary = "### HLL SKETCH SUMMARY: \n" + " Log Config K : 12\n" + " Hll Target : HLL_4\n" + " Current Mode : LIST\n" + " Memory : false\n" + " LB : 2.0\n" + " Estimate : 2.000000004967054\n" + " UB : 2.000099863468538\n" + " OutOfOrder Flag: false\n" + " Coupon Count : 2\n";
final String otherSketchSummary = "### HLL SKETCH SUMMARY: \n" + " LOG CONFIG K : 12\n" + " HLL TARGET : HLL_4\n" + " CURRENT MODE : LIST\n" + " MEMORY : FALSE\n" + " LB : 2.0\n" + " ESTIMATE : 2.000000004967054\n" + " UB : 2.000099863468538\n" + " OUTOFORDER FLAG: FALSE\n" + " COUPON COUNT : 2\n";
testQuery("SELECT\n" + " DS_HLL(dim2),\n" + " DS_HLL(m1),\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(dim2)),\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(dim2)) + 1,\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(CONCAT(dim2, 'hello'))),\n" + " ABS(HLL_SKETCH_ESTIMATE(DS_HLL(dim2))),\n" + " HLL_SKETCH_ESTIMATE_WITH_ERROR_BOUNDS(DS_HLL(dim2), 2),\n" + " HLL_SKETCH_ESTIMATE_WITH_ERROR_BOUNDS(DS_HLL(dim2)),\n" + " DS_HLL(POWER(ABS(m1 + 100), 2)),\n" + " APPROX_COUNT_DISTINCT_DS_HLL(dim2),\n" + " HLL_SKETCH_TO_STRING(DS_HLL(dim2)),\n" + " UPPER(HLL_SKETCH_TO_STRING(DS_HLL(dim2))),\n" + " HLL_SKETCH_ESTIMATE(DS_HLL(dim2), true)\n" + "FROM druid.foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).virtualColumns(new ExpressionVirtualColumn("v0", "concat(\"dim2\",'hello')", ColumnType.STRING, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("v1", "pow(abs((\"m1\" + 100)),2)", ColumnType.DOUBLE, TestExprMacroTable.INSTANCE)).aggregators(ImmutableList.of(new HllSketchBuildAggregatorFactory("a0", "dim2", null, null, true), new HllSketchBuildAggregatorFactory("a1", "m1", null, null, true), new HllSketchBuildAggregatorFactory("a2", "v0", null, null, true), new HllSketchBuildAggregatorFactory("a3", "v1", null, null, true), new HllSketchBuildAggregatorFactory("a4", "dim2", null, null, true))).postAggregators(ImmutableList.of(new FieldAccessPostAggregator("p0", "a0"), new FieldAccessPostAggregator("p1", "a1"), new HllSketchToEstimatePostAggregator("p3", new FieldAccessPostAggregator("p2", "a0"), false), new HllSketchToEstimatePostAggregator("p5", new FieldAccessPostAggregator("p4", "a0"), false), new ExpressionPostAggregator("p6", "(\"p5\" + 1)", null, TestExprMacroTable.INSTANCE), new HllSketchToEstimatePostAggregator("p8", new FieldAccessPostAggregator("p7", "a2"), false), new HllSketchToEstimatePostAggregator("p10", new FieldAccessPostAggregator("p9", "a0"), false), new ExpressionPostAggregator("p11", "abs(\"p10\")", null, TestExprMacroTable.INSTANCE), new HllSketchToEstimateWithBoundsPostAggregator("p13", new FieldAccessPostAggregator("p12", "a0"), 2), new HllSketchToEstimateWithBoundsPostAggregator("p15", new FieldAccessPostAggregator("p14", "a0"), 1), new FieldAccessPostAggregator("p16", "a3"), new HllSketchToStringPostAggregator("p18", new FieldAccessPostAggregator("p17", "a0")), new HllSketchToStringPostAggregator("p20", new FieldAccessPostAggregator("p19", "a0")), new ExpressionPostAggregator("p21", "upper(\"p20\")", null, TestExprMacroTable.INSTANCE), new HllSketchToEstimatePostAggregator("p23", new FieldAccessPostAggregator("p22", "a0"), true))).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "\"AgEHDAMIAgDhUv8P63iABQ==\"", "\"AgEHDAMIBgALpZ0PjpTfBY5ElQo+C7UE4jA+DKfcYQQ=\"", 2.000000004967054d, 3.000000004967054d, 3.000000014901161d, 2.000000004967054d, "[2.000000004967054,2.0,2.0001997319422404]", "[2.000000004967054,2.0,2.000099863468538]", "\"AgEHDAMIBgC1EYgH1mlHBwsKPwu5SK8MIiUxB7iZVwU=\"", 2L, sketchSummary, otherSketchSummary, 2.0 }));
}
Aggregations