use of org.apache.druid.query.QueryToolChest in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithTimestampResultFieldContextForArrayResponse.
@Test
public void testTimeseriesWithTimestampResultFieldContextForArrayResponse() {
Granularity gran = Granularities.DAY;
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(gran).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM, QueryRunnerTestHelper.QUALITY_UNIQUES).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext(ImmutableMap.of(TimeseriesQuery.CTX_TIMESTAMP_RESULT_FIELD, TIMESTAMP_RESULT_FIELD_NAME, TimeseriesQuery.SKIP_EMPTY_BUCKETS, true))).build();
Assert.assertEquals(TIMESTAMP_RESULT_FIELD_NAME, query.getTimestampResultField());
QueryToolChest<Result<TimeseriesResultValue>, TimeseriesQuery> toolChest = new TimeseriesQueryQueryToolChest();
RowSignature rowSignature = toolChest.resultArraySignature(query);
Assert.assertNotNull(rowSignature);
List<String> columnNames = rowSignature.getColumnNames();
Assert.assertNotNull(columnNames);
Assert.assertEquals(6, columnNames.size());
Assert.assertEquals("__time", columnNames.get(0));
Assert.assertEquals(TIMESTAMP_RESULT_FIELD_NAME, columnNames.get(1));
Assert.assertEquals("rows", columnNames.get(2));
Assert.assertEquals("index", columnNames.get(3));
Assert.assertEquals("uniques", columnNames.get(4));
Assert.assertEquals("addRowsIndexConstant", columnNames.get(5));
Sequence<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query));
Sequence<Object[]> resultsAsArrays = toolChest.resultsAsArrays(query, results);
Assert.assertNotNull(resultsAsArrays);
final String[] expectedIndex = descending ? QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES_DESC : QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES;
final String[] expectedIndexToUse = Arrays.stream(expectedIndex).filter(eachIndex -> !"0.0".equals(eachIndex)).toArray(String[]::new);
final Long expectedLast = descending ? QueryRunnerTestHelper.EARLIEST.getMillis() : QueryRunnerTestHelper.LAST.getMillis();
int count = 0;
Object[] lastResult = null;
for (Object[] result : resultsAsArrays.toList()) {
Long current = (Long) result[0];
Assert.assertFalse(StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current < expectedLast : current > expectedLast);
Assert.assertEquals((Long) result[1], current, 0);
Assert.assertEquals(QueryRunnerTestHelper.SKIPPED_DAY.getMillis() == current ? (Long) 0L : (Long) 13L, result[2]);
if (QueryRunnerTestHelper.SKIPPED_DAY.getMillis() != current) {
Assert.assertEquals(Doubles.tryParse(expectedIndexToUse[count]).doubleValue(), (Double) result[3], (Double) result[3] * 1e-6);
Assert.assertEquals((Double) result[4], 9.0d, 0.02);
Assert.assertEquals(new Double(expectedIndexToUse[count]) + 13L + 1L, (Double) result[5], (Double) result[5] * 1e-6);
} else {
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(0.0D, (Double) result[3], (Double) result[3] * 1e-6);
Assert.assertEquals(0.0D, (Double) result[4], 0.02);
Assert.assertEquals(new Double(expectedIndexToUse[count]) + 1L, (Double) result[5], (Double) result[5] * 1e-6);
} else {
Assert.assertNull(result[3]);
Assert.assertEquals((Double) result[4], 0.0, 0.02);
Assert.assertNull(result[5]);
}
}
lastResult = result;
++count;
}
Assert.assertEquals(expectedLast, lastResult[0]);
}
use of org.apache.druid.query.QueryToolChest in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithTimestampSpecMerge.
@Test
public void testSegmentMetadataQueryWithTimestampSpecMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, new TimestampSpec("ds", "auto", null), null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.TIMESTAMPSPEC).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.QueryToolChest in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithQueryGranularityMerge.
@Test
public void testSegmentMetadataQueryWithQueryGranularityMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, Granularities.NONE, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.QUERYGRANULARITY).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.QueryToolChest in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithAggregatorsMerge.
@Test
public void testSegmentMetadataQueryWithAggregatorsMerge() {
final Map<String, AggregatorFactory> expectedAggregators = new HashMap<>();
for (AggregatorFactory agg : TestIndex.METRIC_AGGS) {
expectedAggregators.put(agg.getName(), agg.getCombiningFactory());
}
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), expectedAggregators, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.AGGREGATORS).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.QueryToolChest in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithDefaultAnalysisMerge.
private void testSegmentMetadataQueryWithDefaultAnalysisMerge(String column, ColumnAnalysis analysis) {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)), ImmutableMap.of("__time", new ColumnAnalysis(ColumnType.LONG, ValueType.LONG.toString(), false, false, 12090 * 2, null, null, null, null), "index", new ColumnAnalysis(ColumnType.DOUBLE, ValueType.DOUBLE.toString(), false, false, 9672 * 2, null, null, null, null), column, analysis), expectedSegmentAnalysis1.getSize() + expectedSegmentAnalysis2.getSize(), expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
Query query = testQuery.withColumns(new ListColumnIncluderator(Arrays.asList("__time", "index", column)));
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
Aggregations