use of org.apache.druid.indexer.path.UsedSegmentsRetriever in project druid by druid-io.
the class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest method testRunUpdateSegmentListIfDatasourcePathSpecIsUsed.
private HadoopDruidIndexerConfig testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(PathSpec datasourcePathSpec, @Nullable Interval jobInterval) throws Exception {
HadoopIngestionSpec spec = new HadoopIngestionSpec(new DataSchema("foo", null, new AggregatorFactory[0], new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(Intervals.of("2010-01-01/P1D"))), null, jsonMapper), new HadoopIOConfig(jsonMapper.convertValue(datasourcePathSpec, Map.class), null, null), null);
spec = jsonMapper.readValue(jsonMapper.writeValueAsString(spec), HadoopIngestionSpec.class);
UsedSegmentsRetriever segmentsRetriever = EasyMock.createMock(UsedSegmentsRetriever.class);
EasyMock.expect(segmentsRetriever.retrieveUsedSegmentsForIntervals(TEST_DATA_SOURCE, Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL) : null), Segments.ONLY_VISIBLE)).andReturn(ImmutableList.of(SEGMENT));
EasyMock.expect(segmentsRetriever.retrieveUsedSegmentsForIntervals(TEST_DATA_SOURCE2, Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL2) : null), Segments.ONLY_VISIBLE)).andReturn(ImmutableList.of(SEGMENT2));
EasyMock.replay(segmentsRetriever);
HadoopIngestionSpec.updateSegmentListIfDatasourcePathSpecIsUsed(spec, jsonMapper, segmentsRetriever);
return HadoopDruidIndexerConfig.fromString(jsonMapper.writeValueAsString(spec));
}
Aggregations