use of io.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithNoAnalysisTypesMerge.
@Test
public void testSegmentMetadataQueryWithNoAnalysisTypesMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).analysisTypes().merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of io.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithRollupMerge.
@Test
public void testSegmentMetadataQueryWithRollupMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null), "placementish", new ColumnAnalysis(ValueType.STRING.toString(), true, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, rollup1 != rollup2 ? null : rollup1);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement", "placementish"))).analysisTypes(SegmentMetadataQuery.AnalysisType.ROLLUP).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of io.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testDefaultIntervalAndFiltering.
@Test
public void testDefaultIntervalAndFiltering() throws Exception {
SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).merge(true).build();
Interval expectedInterval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT);
/* No interval specified, should use default interval */
Assert.assertTrue(testQuery.isUsingDefaultInterval());
Assert.assertEquals(testQuery.getIntervals().get(0), expectedInterval);
Assert.assertEquals(testQuery.getIntervals().size(), 1);
List<LogicalSegment> testSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2012-01-01/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2012-01-01T01/PT1H");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2013-01-05/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2013-05-20/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2014-01-05/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2014-02-05/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2015-01-19T01/PT1H");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2015-01-20T02/PT1H");
}
});
/* Test default period filter */
List<LogicalSegment> filteredSegments = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2015-01-19T01/PT1H");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2015-01-20T02/PT1H");
}
});
Assert.assertEquals(filteredSegments.size(), 2);
for (int i = 0; i < filteredSegments.size(); i++) {
Assert.assertEquals(expectedSegments.get(i).getInterval(), filteredSegments.get(i).getInterval());
}
/* Test 2 year period filtering */
SegmentMetadataQueryConfig twoYearPeriodCfg = new SegmentMetadataQueryConfig("P2Y");
List<LogicalSegment> filteredSegments2 = new SegmentMetadataQueryQueryToolChest(twoYearPeriodCfg).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments2 = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2013-05-20/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2014-01-05/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2014-02-05/P1D");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2015-01-19T01/PT1H");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return new Interval("2015-01-20T02/PT1H");
}
});
Assert.assertEquals(filteredSegments2.size(), 5);
for (int i = 0; i < filteredSegments2.size(); i++) {
Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval());
}
}
use of io.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testCacheKeyWithListColumnIncluderator.
@Test
public void testCacheKeyWithListColumnIncluderator() {
SegmentMetadataQuery oneColumnQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Arrays.asList("foo"))).build();
SegmentMetadataQuery twoColumnQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Arrays.asList("fo", "o"))).build();
final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(oneColumnQuery).computeCacheKey(oneColumnQuery);
final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(twoColumnQuery).computeCacheKey(twoColumnQuery);
Assert.assertFalse(Arrays.equals(oneColumnQueryCacheKey, twoColumnQueryCacheKey));
}
use of io.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithQueryGranularityMerge.
@Test
public void testSegmentMetadataQueryWithQueryGranularityMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, Granularities.NONE, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.QUERYGRANULARITY).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
Aggregations