use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class CalciteQueryTest method testOrderByEarliestLong.
@Test
public void testOrderByEarliestLong() throws Exception {
// Cannot vectorize EARLIEST aggregator.
skipVectorize();
List<Object[]> expected;
if (NullHandling.replaceWithDefault()) {
expected = ImmutableList.of(new Object[] { "1", 0L }, new Object[] { "2", 0L }, new Object[] { "abc", 0L }, new Object[] { "def", 0L }, new Object[] { "", 7L }, new Object[] { "10.1", 325323L });
} else {
expected = ImmutableList.of(new Object[] { "1", null }, new Object[] { "abc", null }, new Object[] { "def", null }, new Object[] { "2", 0L }, new Object[] { "", 7L }, new Object[] { "10.1", 325323L });
}
testQuery("SELECT dim1, EARLIEST(l1) FROM druid.numfoo GROUP BY 1 ORDER BY 2 LIMIT 10", ImmutableList.of(new TopNQueryBuilder().dataSource(CalciteTests.DATASOURCE3).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).dimension(new DefaultDimensionSpec("dim1", "_d0")).aggregators(aggregators(new LongFirstAggregatorFactory("a0", "l1", null))).metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec("a0"))).threshold(10).context(QUERY_CONTEXT_DEFAULT).build()), expected);
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class CalciteQueryTest method testTimeseriesUsingTimeFloorWithTimeShift.
@Test
public void testTimeseriesUsingTimeFloorWithTimeShift() throws Exception {
// Cannot vectorize due to virtual columns.
cannotVectorize();
testQuery("SELECT SUM(cnt), gran FROM (\n" + " SELECT TIME_FLOOR(TIME_SHIFT(__time, 'P1D', -1), 'P1M') AS gran,\n" + " cnt FROM druid.foo\n" + ") AS x\n" + "GROUP BY gran\n" + "ORDER BY gran", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "timestamp_floor(timestamp_shift(\"__time\",'P1D',-1,'UTC'),'P1M',null,'UTC')", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.LONG))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("d0", OrderByColumnSpec.Direction.ASCENDING, StringComparators.NUMERIC)), Integer.MAX_VALUE)).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1L, timestamp("1999-12-01") }, new Object[] { 2L, timestamp("2000-01-01") }, new Object[] { 1L, timestamp("2000-12-01") }, new Object[] { 2L, timestamp("2001-01-01") }));
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class CalciteSelectQueryTest method testSelectDistinctWithStrlenFilter.
@Test
public void testSelectDistinctWithStrlenFilter() throws Exception {
// Cannot vectorize due to usage of expressions.
cannotVectorize();
testQuery("SELECT distinct dim1 FROM druid.foo " + "WHERE CHARACTER_LENGTH(dim1) = 3 OR CAST(CHARACTER_LENGTH(dim1) AS varchar) = 3", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "strlen(\"dim1\")", ColumnType.LONG), // The two layers of CASTs here are unusual, they should really be collapsed into one
expressionVirtualColumn("v1", "CAST(CAST(strlen(\"dim1\"), 'STRING'), 'LONG')", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("dim1", "d0"))).setDimFilter(or(selector("v0", "3", null), selector("v1", "3", null))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "abc" }, new Object[] { "def" }));
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class CalciteQueryTest method testGroupByLimitPushdownExtraction.
@Test
public void testGroupByLimitPushdownExtraction() throws Exception {
cannotVectorize();
testQuery("SELECT dim4, substring(dim5, 1, 1), count(*) FROM druid.numfoo WHERE dim4 = 'a' GROUP BY 1,2 LIMIT 2", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("v0", "_d0"), new ExtractionDimensionSpec("dim5", "_d1", new SubstringDimExtractionFn(0, 1)))).setVirtualColumns(expressionVirtualColumn("v0", "'a'", ColumnType.STRING)).setDimFilter(selector("dim4", "a", null)).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(), 2)).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "a", "a", 2L }, new Object[] { "a", "b", 1L }));
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class CalciteQueryTest method testOrderByLatestFloat.
@Test
public void testOrderByLatestFloat() throws Exception {
// Cannot vectorize LATEST aggregator.
skipVectorize();
List<Object[]> expected;
if (NullHandling.replaceWithDefault()) {
expected = ImmutableList.of(new Object[] { "1", 0.0f }, new Object[] { "2", 0.0f }, new Object[] { "abc", 0.0f }, new Object[] { "def", 0.0f }, new Object[] { "10.1", 0.1f }, new Object[] { "", 1.0f });
} else {
expected = ImmutableList.of(new Object[] { "1", null }, new Object[] { "abc", null }, new Object[] { "def", null }, new Object[] { "2", 0.0f }, new Object[] { "10.1", 0.1f }, new Object[] { "", 1.0f });
}
testQuery("SELECT dim1, LATEST(f1) FROM druid.numfoo GROUP BY 1 ORDER BY 2 LIMIT 10", ImmutableList.of(new TopNQueryBuilder().dataSource(CalciteTests.DATASOURCE3).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).dimension(new DefaultDimensionSpec("dim1", "_d0")).aggregators(aggregators(new FloatLastAggregatorFactory("a0", "f1", null))).metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec("a0"))).threshold(10).context(QUERY_CONTEXT_DEFAULT).build()), expected);
}
Aggregations