use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.
the class SegmentMetadataQueryTest method testSerdeWithDefaultInterval.
@Test
public void testSerdeWithDefaultInterval() throws Exception {
String queryStr = "{\n" + " \"queryType\":\"segmentMetadata\",\n" + " \"dataSource\":\"test_ds\"\n" + "}";
Query query = MAPPER.readValue(queryStr, Query.class);
Assert.assertTrue(query instanceof SegmentMetadataQuery);
Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getTableNames()));
Assert.assertEquals(Intervals.ETERNITY, query.getIntervals().get(0));
Assert.assertTrue(((SegmentMetadataQuery) query).isUsingDefaultInterval());
// test serialize and deserialize
Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class));
// test copy
Assert.assertEquals(query, Druids.SegmentMetadataQueryBuilder.copy((SegmentMetadataQuery) query).build());
}
use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.
the class SegmentMetadataQueryTest method testSerde.
@Test
public void testSerde() throws Exception {
String queryStr = "{\n" + " \"queryType\":\"segmentMetadata\",\n" + " \"dataSource\":\"test_ds\",\n" + " \"intervals\":[\"2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z\"],\n" + " \"analysisTypes\":[\"cardinality\",\"size\"]\n" + "}";
EnumSet<SegmentMetadataQuery.AnalysisType> expectedAnalysisTypes = EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE);
Query query = MAPPER.readValue(queryStr, Query.class);
Assert.assertTrue(query instanceof SegmentMetadataQuery);
Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getTableNames()));
Assert.assertEquals(Intervals.of("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), query.getIntervals().get(0));
Assert.assertEquals(expectedAnalysisTypes, ((SegmentMetadataQuery) query).getAnalysisTypes());
// test serialize and deserialize
Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class));
}
use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.
the class SegmentMetadataQueryTest method testDefaultIntervalAndFiltering.
@Test
public void testDefaultIntervalAndFiltering() {
SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).merge(true).build();
/* No interval specified, should use default interval */
Assert.assertTrue(testQuery.isUsingDefaultInterval());
Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0));
Assert.assertEquals(testQuery.getIntervals().size(), 1);
List<LogicalSegment> testSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-05-20/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-02-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
/* Test default period filter */
List<LogicalSegment> filteredSegments = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
Assert.assertEquals(filteredSegments.size(), 2);
for (int i = 0; i < filteredSegments.size(); i++) {
Assert.assertEquals(expectedSegments.get(i).getInterval(), filteredSegments.get(i).getInterval());
}
/* Test 2 year period filtering */
SegmentMetadataQueryConfig twoYearPeriodCfg = new SegmentMetadataQueryConfig("P2Y");
List<LogicalSegment> filteredSegments2 = new SegmentMetadataQueryQueryToolChest(twoYearPeriodCfg).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments2 = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-05-20/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-02-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
Assert.assertEquals(filteredSegments2.size(), 5);
for (int i = 0; i < filteredSegments2.size(); i++) {
Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval());
}
}
use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.
the class SegmentMetadataUnionQueryTest method testSegmentMetadataUnionQuery.
@Test
public void testSegmentMetadataUnionQuery() {
SegmentAnalysis expected = new SegmentAnalysis(QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.toString(), false, false, mmap ? 43524 : 43056, 1, "preferred", "preferred", null)), mmap ? 800544 : 803324, 4836, null, null, null, null);
SegmentMetadataQuery query = new Druids.SegmentMetadataQueryBuilder().dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.SIZE, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX).build();
List result = runner.run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedObjects(ImmutableList.of(expected), result, "failed SegmentMetadata union query");
}
use of org.apache.druid.query.metadata.metadata.SegmentMetadataQuery in project druid by druid-io.
the class DumpSegment method runMetadata.
private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
withOutputStream(new Function<OutputStream, Object>() {
@Override
public Object apply(final OutputStream out) {
evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {
@Override
public Object apply(SegmentAnalysis analysis) {
try {
objectMapper.writeValue(out, analysis);
} catch (IOException e) {
throw new RuntimeException(e);
}
return null;
}
}));
return null;
}
});
}
Aggregations