use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentAnalyzerTest method testAnalyzingSegmentWithNonExistentAggregator.
/**
* This test verifies that if a segment was created using an unknown/invalid aggregator
* (which can happen if an aggregator was removed for a later version), then,
* analyzing the segment doesn't fail and the result of analysis of the complex column
* is reported as an error.
* @throws IOException
*/
@Test
public void testAnalyzingSegmentWithNonExistentAggregator() throws IOException {
final URL resource = SegmentAnalyzerTest.class.getClassLoader().getResource("druid.sample.numeric.tsv");
CharSource source = Resources.asByteSource(resource).asCharSource(StandardCharsets.UTF_8);
String invalid_aggregator = "invalid_aggregator";
AggregatorFactory[] metrics = new AggregatorFactory[] { new DoubleSumAggregatorFactory(TestIndex.DOUBLE_METRICS[0], "index"), new HyperUniquesAggregatorFactory("quality_uniques", "quality"), new InvalidAggregatorFactory(invalid_aggregator, "quality") };
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()).withTimestampSpec(new TimestampSpec("ds", "auto", null)).withDimensionsSpec(TestIndex.DIMENSIONS_SPEC).withMetrics(metrics).withRollup(true).build();
final IncrementalIndex retVal = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(10000).build();
IncrementalIndex incrementalIndex = TestIndex.loadIncrementalIndex(retVal, source);
// Analyze the in-memory segment.
{
SegmentAnalyzer analyzer = new SegmentAnalyzer(EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
IncrementalIndexSegment segment = new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("ds"));
Map<String, ColumnAnalysis> analyses = analyzer.analyze(segment);
ColumnAnalysis columnAnalysis = analyses.get(invalid_aggregator);
Assert.assertFalse(columnAnalysis.isError());
Assert.assertEquals("invalid_complex_column_type", columnAnalysis.getType());
Assert.assertEquals(ColumnType.ofComplex("invalid_complex_column_type"), columnAnalysis.getTypeSignature());
}
// Persist the index.
final File segmentFile = TestIndex.INDEX_MERGER.persist(incrementalIndex, temporaryFolder.newFolder(), TestIndex.INDEX_SPEC, null);
// Unload the complex serde, then analyze the persisted segment.
ComplexMetrics.unregisterSerde(InvalidAggregatorFactory.TYPE);
{
SegmentAnalyzer analyzer = new SegmentAnalyzer(EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
QueryableIndexSegment segment = new QueryableIndexSegment(TestIndex.INDEX_IO.loadIndex(segmentFile), SegmentId.dummy("ds"));
Map<String, ColumnAnalysis> analyses = analyzer.analyze(segment);
ColumnAnalysis invalidColumnAnalysis = analyses.get(invalid_aggregator);
Assert.assertTrue(invalidColumnAnalysis.isError());
Assert.assertEquals("error:unknown_complex_invalid_complex_column_type", invalidColumnAnalysis.getErrorMessage());
// Run a segment metadata query also to verify it doesn't break
final List<SegmentAnalysis> results = getSegmentAnalysises(segment, EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
for (SegmentAnalysis result : results) {
Assert.assertTrue(result.getColumns().get(invalid_aggregator).isError());
}
}
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentAnalyzerTest method testIncrementalWorksHelper.
private void testIncrementalWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisType> analyses) {
final List<SegmentAnalysis> results = getSegmentAnalysises(new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), SegmentId.dummy("ds")), analyses);
Assert.assertEquals(1, results.size());
final SegmentAnalysis analysis = results.get(0);
Assert.assertEquals(SegmentId.dummy("ds").toString(), analysis.getId());
final Map<String, ColumnAnalysis> columns = analysis.getColumns();
Assert.assertEquals(TestIndex.COLUMNS.length + 3, columns.size());
for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) {
final String dimension = schema.getName();
final ColumnAnalysis columnAnalysis = columns.get(dimension);
final boolean isString = schema.getColumnType().is(ValueType.STRING);
Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType());
Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
if (isString) {
if (analyses == null) {
Assert.assertTrue(dimension, columnAnalysis.getCardinality() > 0);
} else {
Assert.assertEquals(dimension, 0, columnAnalysis.getCardinality().longValue());
}
} else {
Assert.assertNull(dimension, columnAnalysis.getCardinality());
}
}
for (String metric : TestIndex.DOUBLE_METRICS) {
final ColumnAnalysis columnAnalysis = columns.get(metric);
Assert.assertEquals(metric, ValueType.DOUBLE.name(), columnAnalysis.getType());
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
Assert.assertNull(metric, columnAnalysis.getCardinality());
}
for (String metric : TestIndex.FLOAT_METRICS) {
final ColumnAnalysis columnAnalysis = columns.get(metric);
Assert.assertEquals(metric, ValueType.FLOAT.name(), columnAnalysis.getType());
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
Assert.assertNull(metric, columnAnalysis.getCardinality());
}
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithDefaultAnalysisMerge.
@Test
public void testSegmentMetadataQueryWithDefaultAnalysisMerge() {
int size1 = 0;
int size2 = 0;
if (bitmaps) {
size1 = mmap1 ? 10881 : 10764;
size2 = mmap2 ? 10881 : 10764;
}
ColumnAnalysis analysis = new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, size1 + size2, 1, "preferred", "preferred", null);
testSegmentMetadataQueryWithDefaultAnalysisMerge("placement", analysis);
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithTimestampSpecMerge.
@Test
public void testSegmentMetadataQueryWithTimestampSpecMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, new TimestampSpec("ds", "auto", null), null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.TIMESTAMPSPEC).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of org.apache.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithQueryGranularityMerge.
@Test
public void testSegmentMetadataQueryWithQueryGranularityMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, Granularities.NONE, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.QUERYGRANULARITY).merge(true).build();
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
Aggregations