use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithRollupMerge.
@Test
public void testSegmentMetadataQueryWithRollupMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null), "placementish", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), true, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, rollup1 != rollup2 ? null : rollup1);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement", "placementish"))).analysisTypes(SegmentMetadataQuery.AnalysisType.ROLLUP).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithDefaultAnalysisMerge3.
@Test
public void testSegmentMetadataQueryWithDefaultAnalysisMerge3() {
int size1 = 0;
int size2 = 0;
if (bitmaps) {
size1 = mmap1 ? 9765 : 9660;
size2 = mmap2 ? 9765 : 9660;
}
ColumnAnalysis analysis = new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, size1 + size2, 9, "automotive", "travel", null);
testSegmentMetadataQueryWithDefaultAnalysisMerge("quality", analysis);
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithNoAnalysisTypesMerge.
@Test
public void testSegmentMetadataQueryWithNoAnalysisTypesMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes().merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithComplexColumnMerge.
@Test
public void testSegmentMetadataQueryWithComplexColumnMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 1, null, null, null), "quality_uniques", new ColumnAnalysis(ColumnType.ofComplex("hyperUnique"), "hyperUnique", false, true, 0, null, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement", "quality_uniques"))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class DruidSchemaTest method testSegmentMetadataFallbackType.
@Test
public void testSegmentMetadataFallbackType() {
RowSignature signature = DruidSchema.analysisToRowSignature(new SegmentAnalysis("id", ImmutableList.of(Intervals.utc(1L, 2L)), ImmutableMap.of("a", new ColumnAnalysis(null, ColumnType.STRING.asTypeString(), false, true, 1234, 26, "a", "z", null), "count", new ColumnAnalysis(null, ColumnType.LONG.asTypeString(), false, true, 1234, 26, "a", "z", null)), 1234, 100, null, null, null, null));
Assert.assertEquals(RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(), signature);
}
Aggregations