use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testDefaultIntervalAndFiltering.
@Test
public void testDefaultIntervalAndFiltering() {
SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).merge(true).build();
/* No interval specified, should use default interval */
Assert.assertTrue(testQuery.isUsingDefaultInterval());
Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0));
Assert.assertEquals(testQuery.getIntervals().size(), 1);
List<LogicalSegment> testSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-05-20/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-02-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
/* Test default period filter */
List<LogicalSegment> filteredSegments = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
Assert.assertEquals(filteredSegments.size(), 2);
for (int i = 0; i < filteredSegments.size(); i++) {
Assert.assertEquals(expectedSegments.get(i).getInterval(), filteredSegments.get(i).getInterval());
}
/* Test 2 year period filtering */
SegmentMetadataQueryConfig twoYearPeriodCfg = new SegmentMetadataQueryConfig("P2Y");
List<LogicalSegment> filteredSegments2 = new SegmentMetadataQueryQueryToolChest(twoYearPeriodCfg).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments2 = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-05-20/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-02-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
Assert.assertEquals(filteredSegments2.size(), 5);
for (int i = 0; i < filteredSegments2.size(); i++) {
Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval());
}
}
use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithDefaultAnalysisMerge.
private void testSegmentMetadataQueryWithDefaultAnalysisMerge(String column, ColumnAnalysis analysis) {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)), ImmutableMap.of("__time", new ColumnAnalysis(ColumnType.LONG, ValueType.LONG.toString(), false, false, 12090 * 2, null, null, null, null), "index", new ColumnAnalysis(ColumnType.DOUBLE, ValueType.DOUBLE.toString(), false, false, 9672 * 2, null, null, null, null), column, analysis), expectedSegmentAnalysis1.getSize() + expectedSegmentAnalysis2.getSize(), expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
Query query = testQuery.withColumns(new ListColumnIncluderator(Arrays.asList("__time", "index", column)));
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class SegmentMetadataUnionQueryTest method testSegmentMetadataUnionQuery.
@Test
public void testSegmentMetadataUnionQuery() {
SegmentAnalysis expected = new SegmentAnalysis(QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, mmap ? 43524 : 43056, 1, "preferred", "preferred", null)), mmap ? 800544 : 803324, 4836, null, null, null, null);
SegmentMetadataQuery query = new Druids.SegmentMetadataQueryBuilder().dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX).build();
List result = runner.run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedObjects(ImmutableList.of(expected), result, "failed SegmentMetadata union query");
}
use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class DumpSegment method runMetadata.
private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
withOutputStream(new Function<OutputStream, Object>() {
@Override
public Object apply(final OutputStream out) {
evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {
@Override
public Object apply(SegmentAnalysis analysis) {
try {
objectMapper.writeValue(out, analysis);
} catch (IOException e) {
throw new RuntimeException(e);
}
return null;
}
}));
return null;
}
});
}
use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.
the class DoubleStorageTest method testMetaDataAnalysis.
@Test
public void testMetaDataAnalysis() {
QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(METADATA_QR_FACTORY, SEGMENT_ID, new QueryableIndexSegment(index, SEGMENT_ID), null);
SegmentMetadataQuery segmentMetadataQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals(ImmutableList.of(INTERVAL)).toInclude(new ListColumnIncluderator(Arrays.asList(TIME_COLUMN, DIM_NAME, DIM_FLOAT_NAME))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX).merge(true).build();
List<SegmentAnalysis> results = runner.run(QueryPlus.wrap(segmentMetadataQuery)).toList();
Assert.assertEquals(Collections.singletonList(expectedSegmentAnalysis), results);
}
Aggregations