Search in sources :

Example 6 with SegmentMetadataQuery

use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.

the class SegmentMetadataQueryTest method testSerdeWithDefaultInterval.

@Test
public void testSerdeWithDefaultInterval() throws Exception {
    String queryStr = "{\n" + "  \"queryType\":\"segmentMetadata\",\n" + "  \"dataSource\":\"test_ds\"\n" + "}";
    Query query = MAPPER.readValue(queryStr, Query.class);
    Assert.assertTrue(query instanceof SegmentMetadataQuery);
    Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getTableNames()));
    Assert.assertEquals(Intervals.ETERNITY, query.getIntervals().get(0));
    Assert.assertTrue(((SegmentMetadataQuery) query).isUsingDefaultInterval());
    // test serialize and deserialize
    Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class));
    // test copy
    Assert.assertEquals(query, Druids.SegmentMetadataQueryBuilder.copy((SegmentMetadataQuery) query).build());
}
Also used : SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) Query(org.apache.druid.query.Query) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) Test(org.junit.Test)

Example 7 with SegmentMetadataQuery

use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.

the class SegmentMetadataQueryTest method testSerde.

@Test
public void testSerde() throws Exception {
    String queryStr = "{\n" + "  \"queryType\":\"segmentMetadata\",\n" + "  \"dataSource\":\"test_ds\",\n" + "  \"intervals\":[\"2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z\"],\n" + "  \"analysisTypes\":[\"cardinality\",\"size\"]\n" + "}";
    EnumSet<SegmentMetadataQuery.AnalysisType> expectedAnalysisTypes = EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE);
    Query query = MAPPER.readValue(queryStr, Query.class);
    Assert.assertTrue(query instanceof SegmentMetadataQuery);
    Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getTableNames()));
    Assert.assertEquals(Intervals.of("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), query.getIntervals().get(0));
    Assert.assertEquals(expectedAnalysisTypes, ((SegmentMetadataQuery) query).getAnalysisTypes());
    // test serialize and deserialize
    Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class));
}
Also used : SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) Query(org.apache.druid.query.Query) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) Test(org.junit.Test)

Example 8 with SegmentMetadataQuery

use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.

the class SegmentMetadataQueryTest method testDefaultIntervalAndFiltering.

@Test
public void testDefaultIntervalAndFiltering() {
    SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).merge(true).build();
    /* No interval specified, should use default interval */
    Assert.assertTrue(testQuery.isUsingDefaultInterval());
    Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0));
    Assert.assertEquals(testQuery.getIntervals().size(), 1);
    List<LogicalSegment> testSegments = Arrays.asList(new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2012-01-01/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2012-01-01T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2013-01-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2013-05-20/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-01-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-02-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-19T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-20T02/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    });
    /* Test default period filter */
    List<LogicalSegment> filteredSegments = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).filterSegments(testQuery, testSegments);
    List<LogicalSegment> expectedSegments = Arrays.asList(new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-19T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-20T02/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    });
    Assert.assertEquals(filteredSegments.size(), 2);
    for (int i = 0; i < filteredSegments.size(); i++) {
        Assert.assertEquals(expectedSegments.get(i).getInterval(), filteredSegments.get(i).getInterval());
    }
    /* Test 2 year period filtering */
    SegmentMetadataQueryConfig twoYearPeriodCfg = new SegmentMetadataQueryConfig("P2Y");
    List<LogicalSegment> filteredSegments2 = new SegmentMetadataQueryQueryToolChest(twoYearPeriodCfg).filterSegments(testQuery, testSegments);
    List<LogicalSegment> expectedSegments2 = Arrays.asList(new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2013-05-20/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-01-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2014-02-05/P1D");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-19T01/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    }, new LogicalSegment() {

        @Override
        public Interval getInterval() {
            return Intervals.of("2015-01-20T02/PT1H");
        }

        @Override
        public Interval getTrueInterval() {
            return getInterval();
        }
    });
    Assert.assertEquals(filteredSegments2.size(), 5);
    for (int i = 0; i < filteredSegments2.size(); i++) {
        Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval());
    }
}
Also used : LogicalSegment(org.apache.druid.timeline.LogicalSegment) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 9 with SegmentMetadataQuery

use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.

the class SegmentMetadataUnionQueryTest method testSegmentMetadataUnionQuery.

@Test
public void testSegmentMetadataUnionQuery() {
    SegmentAnalysis expected = new SegmentAnalysis(QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, mmap ? 43524 : 43056, 1, "preferred", "preferred", null)), mmap ? 800544 : 803324, 4836, null, null, null, null);
    SegmentMetadataQuery query = new Druids.SegmentMetadataQueryBuilder().dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX).build();
    List result = runner.run(QueryPlus.wrap(query)).toList();
    TestHelper.assertExpectedObjects(ImmutableList.of(expected), result, "failed SegmentMetadata union query");
}
Also used : SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) Druids(org.apache.druid.query.Druids) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 10 with SegmentMetadataQuery

use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.

the class DumpSegment method runMetadata.

private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
    final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    withOutputStream(new Function<OutputStream, Object>() {

        @Override
        public Object apply(final OutputStream out) {
            evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {

                @Override
                public Object apply(SegmentAnalysis analysis) {
                    try {
                        objectMapper.writeValue(out, analysis);
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                    return null;
                }
            }));
            return null;
        }
    });
}
Also used : ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Json(org.apache.druid.guice.annotations.Json) IOException(java.io.IOException) TableDataSource(org.apache.druid.query.TableDataSource) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

SegmentMetadataQuery (org.apache.druid.query.metadata.metadata.SegmentMetadataQuery)23 Test (org.junit.Test)16 SegmentAnalysis (org.apache.druid.query.metadata.metadata.SegmentAnalysis)15 ListColumnIncluderator (org.apache.druid.query.metadata.metadata.ListColumnIncluderator)13 ColumnAnalysis (org.apache.druid.query.metadata.metadata.ColumnAnalysis)12 QueryRunner (org.apache.druid.query.QueryRunner)11 QueryToolChest (org.apache.druid.query.QueryToolChest)8 ExecutorService (java.util.concurrent.ExecutorService)7 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)7 TableDataSource (org.apache.druid.query.TableDataSource)5 HashMap (java.util.HashMap)3 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)3 QueryPlus (org.apache.druid.query.QueryPlus)3 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)3 ResponseContext (org.apache.druid.query.context.ResponseContext)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 ImmutableList (com.google.common.collect.ImmutableList)2 Ordering (com.google.common.collect.Ordering)2 IOException (java.io.IOException)2