use of org.apache.druid.timeline.LogicalSegment in project druid by druid-io.
the class SegmentMetadataQueryTest method testDefaultIntervalAndFiltering.
@Test
public void testDefaultIntervalAndFiltering() {
SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder().dataSource("testing").toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))).merge(true).build();
/* No interval specified, should use default interval */
Assert.assertTrue(testQuery.isUsingDefaultInterval());
Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0));
Assert.assertEquals(testQuery.getIntervals().size(), 1);
List<LogicalSegment> testSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-05-20/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-02-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
/* Test default period filter */
List<LogicalSegment> filteredSegments = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
Assert.assertEquals(filteredSegments.size(), 2);
for (int i = 0; i < filteredSegments.size(); i++) {
Assert.assertEquals(expectedSegments.get(i).getInterval(), filteredSegments.get(i).getInterval());
}
/* Test 2 year period filtering */
SegmentMetadataQueryConfig twoYearPeriodCfg = new SegmentMetadataQueryConfig("P2Y");
List<LogicalSegment> filteredSegments2 = new SegmentMetadataQueryQueryToolChest(twoYearPeriodCfg).filterSegments(testQuery, testSegments);
List<LogicalSegment> expectedSegments2 = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-05-20/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-01-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2014-02-05/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-19T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015-01-20T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
Assert.assertEquals(filteredSegments2.size(), 5);
for (int i = 0; i < filteredSegments2.size(); i++) {
Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval());
}
}
use of org.apache.druid.timeline.LogicalSegment in project druid by druid-io.
the class DataSourceMetadataQueryTest method testFilterSegments.
@Test
public void testFilterSegments() {
GenericQueryMetricsFactory queryMetricsFactory = DefaultGenericQueryMetricsFactory.instance();
DataSourceQueryQueryToolChest toolChest = new DataSourceQueryQueryToolChest(queryMetricsFactory);
List<LogicalSegment> segments = toolChest.filterSegments(null, Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2012-01-01T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-01/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-01T01/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-01T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}));
Assert.assertEquals(segments.size(), 2);
// should only have the latest segments.
List<LogicalSegment> expected = Arrays.asList(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-01/P1D");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2013-01-01T02/PT1H");
}
@Override
public Interval getTrueInterval() {
return getInterval();
}
});
for (int i = 0; i < segments.size(); i++) {
Assert.assertEquals(expected.get(i).getInterval(), segments.get(i).getInterval());
}
}
use of org.apache.druid.timeline.LogicalSegment in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChest method filterSegments.
@Override
public <T extends LogicalSegment> List<T> filterSegments(SegmentMetadataQuery query, List<T> segments) {
if (!query.isUsingDefaultInterval()) {
return segments;
}
if (segments.size() <= 1) {
return segments;
}
final T max = segments.get(segments.size() - 1);
DateTime targetEnd = max.getInterval().getEnd();
final Interval targetInterval = new Interval(config.getDefaultHistory(), targetEnd);
return Lists.newArrayList(Iterables.filter(segments, input -> (input.getInterval().overlaps(targetInterval))));
}
use of org.apache.druid.timeline.LogicalSegment in project druid by druid-io.
the class DataSourceMetadataQueryTest method testFilterOverlappingSegments.
@Test
public void testFilterOverlappingSegments() {
final GenericQueryMetricsFactory queryMetricsFactory = DefaultGenericQueryMetricsFactory.instance();
final DataSourceQueryQueryToolChest toolChest = new DataSourceQueryQueryToolChest(queryMetricsFactory);
final List<LogicalSegment> segments = toolChest.filterSegments(null, ImmutableList.of(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2015/2016-08-01");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2015/2016-08-01");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2016-08-01/2017");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2016-08-01/2017");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2017/2017-08-01");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2017/2018");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2017-08-01/2017-08-02");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2017-08-01/2017-08-02");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2017-08-02/2018");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2017/2018");
}
}));
final List<LogicalSegment> expected = ImmutableList.of(new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2017/2017-08-01");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2017/2018");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2017-08-01/2017-08-02");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2017-08-01/2017-08-02");
}
}, new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of("2017-08-02/2018");
}
@Override
public Interval getTrueInterval() {
return Intervals.of("2017/2018");
}
});
Assert.assertEquals(expected.size(), segments.size());
for (int i = 0; i < expected.size(); i++) {
Assert.assertEquals(expected.get(i).getInterval(), segments.get(i).getInterval());
Assert.assertEquals(expected.get(i).getTrueInterval(), segments.get(i).getTrueInterval());
}
}
use of org.apache.druid.timeline.LogicalSegment in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testFilterSegments.
@Test
public void testFilterSegments() {
final SegmentMetadataQueryConfig config = new SegmentMetadataQueryConfig();
final SegmentMetadataQueryQueryToolChest toolChest = new SegmentMetadataQueryQueryToolChest(config);
final List<LogicalSegment> filteredSegments = toolChest.filterSegments(Druids.newSegmentMetadataQueryBuilder().dataSource("foo").merge(true).build(), ImmutableList.of("2000-01-01/P1D", "2000-01-04/P1D", "2000-01-09/P1D", "2000-01-09/P1D").stream().map(interval -> new LogicalSegment() {
@Override
public Interval getInterval() {
return Intervals.of(interval);
}
@Override
public Interval getTrueInterval() {
return Intervals.of(interval);
}
}).collect(Collectors.toList()));
Assert.assertEquals(Period.weeks(1), config.getDefaultHistory());
Assert.assertEquals(ImmutableList.of(Intervals.of("2000-01-04/P1D"), Intervals.of("2000-01-09/P1D"), Intervals.of("2000-01-09/P1D")), filteredSegments.stream().map(LogicalSegment::getInterval).collect(Collectors.toList()));
}
Aggregations