Search in sources :

Example 1 with SegmentAnalysis

use of org.apache.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.

the class SegmentAnalyzerTest method testAnalyzingSegmentWithNonExistentAggregator.

/**
 * This test verifies that if a segment was created using an unknown/invalid aggregator
 * (which can happen if an aggregator was removed for a later version), then,
 * analyzing the segment doesn't fail and the result of analysis of the complex column
 * is reported as an error.
 * @throws IOException
 */
@Test
public void testAnalyzingSegmentWithNonExistentAggregator() throws IOException {
    final URL resource = SegmentAnalyzerTest.class.getClassLoader().getResource("druid.sample.numeric.tsv");
    CharSource source = Resources.asByteSource(resource).asCharSource(StandardCharsets.UTF_8);
    String invalid_aggregator = "invalid_aggregator";
    AggregatorFactory[] metrics = new AggregatorFactory[] { new DoubleSumAggregatorFactory(TestIndex.DOUBLE_METRICS[0], "index"), new HyperUniquesAggregatorFactory("quality_uniques", "quality"), new InvalidAggregatorFactory(invalid_aggregator, "quality") };
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()).withTimestampSpec(new TimestampSpec("ds", "auto", null)).withDimensionsSpec(TestIndex.DIMENSIONS_SPEC).withMetrics(metrics).withRollup(true).build();
    final IncrementalIndex retVal = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(10000).build();
    IncrementalIndex incrementalIndex = TestIndex.loadIncrementalIndex(retVal, source);
    // Analyze the in-memory segment.
    {
        SegmentAnalyzer analyzer = new SegmentAnalyzer(EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
        IncrementalIndexSegment segment = new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("ds"));
        Map<String, ColumnAnalysis> analyses = analyzer.analyze(segment);
        ColumnAnalysis columnAnalysis = analyses.get(invalid_aggregator);
        Assert.assertFalse(columnAnalysis.isError());
        Assert.assertEquals("invalid_complex_column_type", columnAnalysis.getType());
        Assert.assertEquals(ColumnType.ofComplex("invalid_complex_column_type"), columnAnalysis.getTypeSignature());
    }
    // Persist the index.
    final File segmentFile = TestIndex.INDEX_MERGER.persist(incrementalIndex, temporaryFolder.newFolder(), TestIndex.INDEX_SPEC, null);
    // Unload the complex serde, then analyze the persisted segment.
    ComplexMetrics.unregisterSerde(InvalidAggregatorFactory.TYPE);
    {
        SegmentAnalyzer analyzer = new SegmentAnalyzer(EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
        QueryableIndexSegment segment = new QueryableIndexSegment(TestIndex.INDEX_IO.loadIndex(segmentFile), SegmentId.dummy("ds"));
        Map<String, ColumnAnalysis> analyses = analyzer.analyze(segment);
        ColumnAnalysis invalidColumnAnalysis = analyses.get(invalid_aggregator);
        Assert.assertTrue(invalidColumnAnalysis.isError());
        Assert.assertEquals("error:unknown_complex_invalid_complex_column_type", invalidColumnAnalysis.getErrorMessage());
        // Run a segment metadata query also to verify it doesn't break
        final List<SegmentAnalysis> results = getSegmentAnalysises(segment, EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
        for (SegmentAnalysis result : results) {
            Assert.assertTrue(result.getColumns().get(invalid_aggregator).isError());
        }
    }
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) CharSource(com.google.common.io.CharSource) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ColumnBuilder(org.apache.druid.segment.column.ColumnBuilder) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) URL(java.net.URL) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) List(java.util.List) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) Map(java.util.Map) File(java.io.File) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 2 with SegmentAnalysis

use of org.apache.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.

the class SegmentAnalyzerTest method testIncrementalWorksHelper.

private void testIncrementalWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisType> analyses) {
    final List<SegmentAnalysis> results = getSegmentAnalysises(new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), SegmentId.dummy("ds")), analyses);
    Assert.assertEquals(1, results.size());
    final SegmentAnalysis analysis = results.get(0);
    Assert.assertEquals(SegmentId.dummy("ds").toString(), analysis.getId());
    final Map<String, ColumnAnalysis> columns = analysis.getColumns();
    Assert.assertEquals(TestIndex.COLUMNS.length + 3, columns.size());
    for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) {
        final String dimension = schema.getName();
        final ColumnAnalysis columnAnalysis = columns.get(dimension);
        final boolean isString = schema.getColumnType().is(ValueType.STRING);
        Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType());
        Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
        if (isString) {
            if (analyses == null) {
                Assert.assertTrue(dimension, columnAnalysis.getCardinality() > 0);
            } else {
                Assert.assertEquals(dimension, 0, columnAnalysis.getCardinality().longValue());
            }
        } else {
            Assert.assertNull(dimension, columnAnalysis.getCardinality());
        }
    }
    for (String metric : TestIndex.DOUBLE_METRICS) {
        final ColumnAnalysis columnAnalysis = columns.get(metric);
        Assert.assertEquals(metric, ValueType.DOUBLE.name(), columnAnalysis.getType());
        Assert.assertEquals(metric, 0, columnAnalysis.getSize());
        Assert.assertNull(metric, columnAnalysis.getCardinality());
    }
    for (String metric : TestIndex.FLOAT_METRICS) {
        final ColumnAnalysis columnAnalysis = columns.get(metric);
        Assert.assertEquals(metric, ValueType.FLOAT.name(), columnAnalysis.getType());
        Assert.assertEquals(metric, 0, columnAnalysis.getSize());
        Assert.assertNull(metric, columnAnalysis.getCardinality());
    }
}
Also used : IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema)

Example 3 with SegmentAnalysis

use of org.apache.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.

the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithTimestampSpecMerge.

@Test
public void testSegmentMetadataQueryWithTimestampSpecMerge() {
    SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, new TimestampSpec("ds", "auto", null), null, null);
    QueryToolChest toolChest = FACTORY.getToolchest();
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
    SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.TIMESTAMPSPEC).merge(true).build();
    TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
    exec.shutdownNow();
}
Also used : FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) ExecutorService(java.util.concurrent.ExecutorService) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) QueryToolChest(org.apache.druid.query.QueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Test(org.junit.Test)

Example 4 with SegmentAnalysis

use of org.apache.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.

the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithQueryGranularityMerge.

@Test
public void testSegmentMetadataQueryWithQueryGranularityMerge() {
    SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, Granularities.NONE, null);
    QueryToolChest toolChest = FACTORY.getToolchest();
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
    SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.QUERYGRANULARITY).merge(true).build();
    TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
    exec.shutdownNow();
}
Also used : FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) ExecutorService(java.util.concurrent.ExecutorService) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) QueryToolChest(org.apache.druid.query.QueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Test(org.junit.Test)

Example 5 with SegmentAnalysis

use of org.apache.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.

the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithAggregatorsMerge.

@Test
public void testSegmentMetadataQueryWithAggregatorsMerge() {
    final Map<String, AggregatorFactory> expectedAggregators = new HashMap<>();
    for (AggregatorFactory agg : TestIndex.METRIC_AGGS) {
        expectedAggregators.put(agg.getName(), agg.getCombiningFactory());
    }
    SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), expectedAggregators, null, null, null);
    QueryToolChest toolChest = FACTORY.getToolchest();
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
    SegmentMetadataQuery query = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.AGGREGATORS).merge(true).build();
    TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
    exec.shutdownNow();
}
Also used : FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) HashMap(java.util.HashMap) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) ExecutorService(java.util.concurrent.ExecutorService) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) QueryToolChest(org.apache.druid.query.QueryToolChest) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Test(org.junit.Test)

Aggregations

SegmentAnalysis (org.apache.druid.query.metadata.metadata.SegmentAnalysis)30 ColumnAnalysis (org.apache.druid.query.metadata.metadata.ColumnAnalysis)20 Test (org.junit.Test)18 SegmentMetadataQuery (org.apache.druid.query.metadata.metadata.SegmentMetadataQuery)16 QueryRunner (org.apache.druid.query.QueryRunner)11 ListColumnIncluderator (org.apache.druid.query.metadata.metadata.ListColumnIncluderator)11 ExecutorService (java.util.concurrent.ExecutorService)9 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)8 QueryToolChest (org.apache.druid.query.QueryToolChest)8 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)5 IOException (java.io.IOException)4 Map (java.util.Map)4 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)4 HashMap (java.util.HashMap)3 TableDataSource (org.apache.druid.query.TableDataSource)3 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)3 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 List (java.util.List)2