use of org.apache.druid.segment.indexing.granularity.UniformGranularitySpec in project druid by druid-io.
the class IndexTaskTest method testOldSegmentNotDropWhenDropFlagTrueSinceIngestionIntervalDoesNotContainsOldSegment.
@Test
public void testOldSegmentNotDropWhenDropFlagTrueSinceIngestionIntervalDoesNotContainsOldSegment() throws Exception {
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.YEAR, Granularities.MINUTE, Collections.singletonList(Intervals.of("2014-01-01/2014-01-02"))), null, createTuningConfigWithMaxRowsPerSegment(10, true), false, false), null);
// Ingest data with YEAR segment granularity
List<DataSegment> segments = runTask(indexTask).rhs;
Assert.assertEquals(1, segments.size());
Set<DataSegment> usedSegmentsBeforeOverwrite = Sets.newHashSet(getSegmentsMetadataManager().iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(DATASOURCE, Intervals.ETERNITY, true).get());
Assert.assertEquals(1, usedSegmentsBeforeOverwrite.size());
for (DataSegment segment : usedSegmentsBeforeOverwrite) {
Assert.assertTrue(Granularities.YEAR.isAligned(segment.getInterval()));
}
indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.MINUTE, Granularities.MINUTE, Collections.singletonList(Intervals.of("2014-01-01/2014-01-02"))), null, createTuningConfigWithMaxRowsPerSegment(10, true), false, true), null);
// Ingest data with overwrite and MINUTE segment granularity
segments = runTask(indexTask).rhs;
Assert.assertEquals(3, segments.size());
Set<DataSegment> usedSegmentsBeforeAfterOverwrite = Sets.newHashSet(getSegmentsMetadataManager().iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(DATASOURCE, Intervals.ETERNITY, true).get());
Assert.assertEquals(4, usedSegmentsBeforeAfterOverwrite.size());
int yearSegmentFound = 0;
int minuteSegmentFound = 0;
for (DataSegment segment : usedSegmentsBeforeAfterOverwrite) {
// and 3 new segments with MINUTE segmentGranularity (from second ingestion)
if (usedSegmentsBeforeOverwrite.contains(segment)) {
Assert.assertTrue(Granularities.YEAR.isAligned(segment.getInterval()));
yearSegmentFound++;
} else {
Assert.assertTrue(Granularities.MINUTE.isAligned(segment.getInterval()));
minuteSegmentFound++;
}
}
Assert.assertEquals(1, yearSegmentFound);
Assert.assertEquals(3, minuteSegmentFound);
}
use of org.apache.druid.segment.indexing.granularity.UniformGranularitySpec in project druid by druid-io.
the class IndexTaskTest method testPerfectRollup.
@Test
public void testPerfectRollup() throws Exception {
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
populateRollupTestData(tmpFile);
IndexTask indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.DAY, Granularities.DAY, true, null), null, createTuningConfig(3, 2, null, 2L, null, true, true), false, false), null);
final List<DataSegment> segments = runTask(indexTask).rhs;
Assert.assertEquals(3, segments.size());
for (int i = 0; i < 3; i++) {
final DataSegment segment = segments.get(i);
final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z");
Assert.assertEquals(DATASOURCE, segment.getDataSource());
Assert.assertEquals(expectedInterval, segment.getInterval());
Assert.assertTrue(segment.getShardSpec().getClass().equals(HashBasedNumberedShardSpec.class));
Assert.assertEquals(i, segment.getShardSpec().getPartitionNum());
}
}
use of org.apache.druid.segment.indexing.granularity.UniformGranularitySpec in project druid by druid-io.
the class IndexTaskTest method testBestEffortRollup.
@Test
public void testBestEffortRollup() throws Exception {
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
populateRollupTestData(tmpFile);
IndexTask indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.DAY, Granularities.DAY, true, null), null, createTuningConfig(3, 2, null, 2L, null, false, true), false, false), null);
final List<DataSegment> segments = runTask(indexTask).rhs;
Assert.assertEquals(5, segments.size());
final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z");
for (int i = 0; i < 5; i++) {
final DataSegment segment = segments.get(i);
Assert.assertEquals(DATASOURCE, segment.getDataSource());
Assert.assertEquals(expectedInterval, segment.getInterval());
Assert.assertEquals(NumberedShardSpec.class, segment.getShardSpec().getClass());
Assert.assertEquals(i, segment.getShardSpec().getPartitionNum());
}
}
use of org.apache.druid.segment.indexing.granularity.UniformGranularitySpec in project druid by druid-io.
the class IndexTaskTest method testWaitForSegmentAvailabilityInvalidWaitTimeout.
@Test
public void testWaitForSegmentAvailabilityInvalidWaitTimeout() throws IOException {
final File tmpDir = temporaryFolder.newFolder();
TaskToolbox mockToolbox = EasyMock.createMock(TaskToolbox.class);
List<DataSegment> segmentsToWaitFor = new ArrayList<>();
segmentsToWaitFor.add(EasyMock.createMock(DataSegment.class));
IndexTask indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, createTuningConfigWithMaxRowsPerSegment(2, true), false, false), null);
EasyMock.replay(mockToolbox);
Assert.assertFalse(indexTask.waitForSegmentAvailability(mockToolbox, segmentsToWaitFor, -1));
EasyMock.verify(mockToolbox);
}
use of org.apache.druid.segment.indexing.granularity.UniformGranularitySpec in project druid by druid-io.
the class IndexTaskTest method testWaitForSegmentAvailabilityNoSegments.
@Test
public void testWaitForSegmentAvailabilityNoSegments() throws IOException {
final File tmpDir = temporaryFolder.newFolder();
TaskToolbox mockToolbox = EasyMock.createMock(TaskToolbox.class);
List<DataSegment> segmentsToWaitFor = new ArrayList<>();
IndexTask indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, createTuningConfigWithMaxRowsPerSegment(2, true), false, false), null);
EasyMock.replay(mockToolbox);
Assert.assertTrue(indexTask.waitForSegmentAvailability(mockToolbox, segmentsToWaitFor, 1000));
EasyMock.verify(mockToolbox);
}
Aggregations