Search in sources :

Example 6 with ListColumnIncluderator

use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.

the class SegmentMetadataQueryTest method testDefaultIntervalAndFiltering.

@Test
public void testDefaultIntervalAndFiltering() {
    SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).merge(true).build();
    /* No interval specified, should use default interval */
    Assert.assertTrue(testQuery.isUsingDefaultInterval());
    Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0));
    Assert.assertEquals(testQuery.getIntervals().size(), 1);
    List<LogicalSegment> testSegments = Arrays.asList(new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2012-01-01/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2012-01-01T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2013-01-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2013-05-20/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-01-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-02-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-19T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-20T02/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    });
    /* Test default period filter */
    List<LogicalSegment> filteredSegments = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).filterSegments(testQuery, testSegments);
    List<LogicalSegment> expectedSegments = Arrays.asList(new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-19T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-20T02/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    });
    Assert.assertEquals(filteredSegments.size(), 2);
    for (int i = 0; i < filteredSegments.size(); i++) {
        Assert.assertEquals(expectedSegments.get(i).getInterval(), filteredSegments.get(i).getInterval());
    }
    /* Test 2 year period filtering */
    SegmentMetadataQueryConfig twoYearPeriodCfg = new SegmentMetadataQueryConfig("P2Y");
    List<LogicalSegment> filteredSegments2 = new SegmentMetadataQueryQueryToolChest(twoYearPeriodCfg).filterSegments(testQuery, testSegments);
    List<LogicalSegment> expectedSegments2 = Arrays.asList(new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2013-05-20/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-01-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-02-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-19T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-20T02/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    });
    Assert.assertEquals(filteredSegments2.size(), 5);
    for (int i = 0; i < filteredSegments2.size(); i++) {
        Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval());
    }
}
Also used : LogicalSegment(org.apache.druid.timeline.LogicalSegment) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 7 with ListColumnIncluderator

use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.

the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithDefaultAnalysisMerge.

private void testSegmentMetadataQueryWithDefaultAnalysisMerge(String column, ColumnAnalysis analysis) {
    SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)), ImmutableMap.of("__time", new ColumnAnalysis(ColumnType.LONG, ValueType.LONG.toString(), false, false, 12090 * 2, null, null, null, null), "index", new ColumnAnalysis(ColumnType.DOUBLE, ValueType.DOUBLE.toString(), false, false, 9672 * 2, null, null, null, null), column, analysis), expectedSegmentAnalysis1.getSize() + expectedSegmentAnalysis2.getSize(), expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
    QueryToolChest toolChest = FACTORY.getToolchest();
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), Lists.newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
    Query query = testQuery.withColumns(new ListColumnIncluderator(Arrays.asList("__time", "index", column)));
    TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(QueryPlus.wrap(query)), "failed SegmentMetadata merging query");
    exec.shutdownNow();
}
Also used : FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) Query(org.apache.druid.query.Query) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) ExecutorService(java.util.concurrent.ExecutorService) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) QueryToolChest(org.apache.druid.query.QueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner)

Example 8 with ListColumnIncluderator

use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.

the class SegmentMetadataUnionQueryTest method testSegmentMetadataUnionQuery.

@Test
public void testSegmentMetadataUnionQuery() {
    SegmentAnalysis expected = new SegmentAnalysis(QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, mmap ? 43524 : 43056, 1, "preferred", "preferred", null)), mmap ? 800544 : 803324, 4836, null, null, null, null);
    SegmentMetadataQuery query = new Druids.SegmentMetadataQueryBuilder().dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX).build();
    List result = runner.run(QueryPlus.wrap(query)).toList();
    TestHelper.assertExpectedObjects(ImmutableList.of(expected), result, "failed SegmentMetadata union query");
}
Also used : SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) Druids(org.apache.druid.query.Druids) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 9 with ListColumnIncluderator

use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.

the class DumpSegment method runMetadata.

private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
    final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    withOutputStream(new Function<OutputStream, Object>() {

        @Override
        public Object apply(final OutputStream out) {
            evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {

                @Override
                public Object apply(SegmentAnalysis analysis) {
                    try {
                        objectMapper.writeValue(out, analysis);
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                    return null;
                }
            }));
            return null;
        }
    });
}
Also used : ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Json(org.apache.druid.guice.annotations.Json) IOException(java.io.IOException) TableDataSource(org.apache.druid.query.TableDataSource) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 10 with ListColumnIncluderator

use of org.apache.druid.query.metadata.metadata.ListColumnIncluderator in project druid by druid-io.

the class DoubleStorageTest method testMetaDataAnalysis.

@Test
public void testMetaDataAnalysis() {
    QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(METADATA_QR_FACTORY, SEGMENT_ID, new QueryableIndexSegment(index, SEGMENT_ID), null);
    SegmentMetadataQuery segmentMetadataQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals(ImmutableList.of(INTERVAL)).toInclude(new ListColumnIncluderator(Arrays.asList(TIME_COLUMN, DIM_NAME, DIM_FLOAT_NAME))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX).merge(true).build();
    List<SegmentAnalysis> results = runner.run(QueryPlus.wrap(segmentMetadataQuery)).toList();
    Assert.assertEquals(Collections.singletonList(expectedSegmentAnalysis), results);
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ScanQueryRunnerTest(org.apache.druid.query.scan.ScanQueryRunnerTest) Test(org.junit.Test)

Aggregations

ListColumnIncluderator (org.apache.druid.query.metadata.metadata.ListColumnIncluderator)14 SegmentMetadataQuery (org.apache.druid.query.metadata.metadata.SegmentMetadataQuery)14 Test (org.junit.Test)12 SegmentAnalysis (org.apache.druid.query.metadata.metadata.SegmentAnalysis)11 ColumnAnalysis (org.apache.druid.query.metadata.metadata.ColumnAnalysis)9 ExecutorService (java.util.concurrent.ExecutorService)8 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)8 QueryRunner (org.apache.druid.query.QueryRunner)8 QueryToolChest (org.apache.druid.query.QueryToolChest)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 ImmutableList (com.google.common.collect.ImmutableList)1 FileOutputStream (java.io.FileOutputStream)1 IOException (java.io.IOException)1 OutputStream (java.io.OutputStream)1 HashMap (java.util.HashMap)1 List (java.util.List)1 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)1 Json (org.apache.druid.guice.annotations.Json)1 Druids (org.apache.druid.query.Druids)1 Query (org.apache.druid.query.Query)1