use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testDetermineSegmentGranularityFromSegmentsToCompact.
@Test
public void testDetermineSegmentGranularityFromSegmentsToCompact() {
String dataSourceName = DATA_SOURCE_PREFIX + 1;
List<DataSegment> segments = new ArrayList<>();
segments.add(new DataSegment(dataSourceName, Intervals.of("2017-01-01T00:00:00/2017-01-02T00:00:00"), "1", null, ImmutableList.of(), ImmutableList.of(), shardSpecFactory.apply(0, 2), 0, 10L));
segments.add(new DataSegment(dataSourceName, Intervals.of("2017-01-01T00:00:00/2017-01-02T00:00:00"), "1", null, ImmutableList.of(), ImmutableList.of(), shardSpecFactory.apply(1, 2), 0, 10L));
dataSources = DataSourcesSnapshot.fromUsedSegments(segments, ImmutableMap.of()).getUsedSegmentsTimelinesPerDataSource();
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSourceName, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), null, null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
Assert.assertEquals(2, segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testMakeStatsForDataSourceWithCompactedIntervalBetweenNonCompactedIntervals.
@Test
public void testMakeStatsForDataSourceWithCompactedIntervalBetweenNonCompactedIntervals() {
// Only test and validate for one datasource for simplicity.
// This dataSource has three intervals already compacted (3 intervals, 120 byte, 12 segments already compacted)
String dataSourceName = DATA_SOURCE_PREFIX + 1;
List<DataSegment> segments = new ArrayList<>();
for (int j : new int[] { 0, 1, 2, 3, 7, 8 }) {
for (int k = 0; k < PARTITION_PER_TIME_INTERVAL; k++) {
DataSegment beforeNoon = createSegment(dataSourceName, j, true, k);
DataSegment afterNoon = createSegment(dataSourceName, j, false, k);
if (j == 3) {
// Make two intervals on this day compacted (two compacted intervals back-to-back)
beforeNoon = beforeNoon.withLastCompactionState(new CompactionState(partitionsSpec, null, null, null, ImmutableMap.of(), ImmutableMap.of()));
afterNoon = afterNoon.withLastCompactionState(new CompactionState(partitionsSpec, null, null, null, ImmutableMap.of(), ImmutableMap.of()));
}
if (j == 1) {
// Make one interval on this day compacted
afterNoon = afterNoon.withLastCompactionState(new CompactionState(partitionsSpec, null, null, null, ImmutableMap.of(), ImmutableMap.of()));
}
segments.add(beforeNoon);
segments.add(afterNoon);
}
}
dataSources = DataSourcesSnapshot.fromUsedSegments(segments, ImmutableMap.of()).getUsedSegmentsTimelinesPerDataSource();
final TestDruidLeaderClient leaderClient = new TestDruidLeaderClient(JSON_MAPPER);
leaderClient.start();
final HttpIndexingServiceClient indexingServiceClient = new HttpIndexingServiceClient(JSON_MAPPER, leaderClient);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, indexingServiceClient);
// Before any compaction, we do not have any snapshot of compactions
Map<String, AutoCompactionSnapshot> autoCompactionSnapshots = compactSegments.getAutoCompactionSnapshot();
Assert.assertEquals(0, autoCompactionSnapshots.size());
// 3 intervals, 120 byte, 12 segments already compacted before the run
for (int compactionRunCount = 0; compactionRunCount < 8; compactionRunCount++) {
// Do a cycle of auto compaction which creates one compaction task
final CoordinatorStats stats = doCompactSegments(compactSegments);
Assert.assertEquals(1, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
verifySnapshot(compactSegments, AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING, dataSourceName, TOTAL_BYTE_PER_DATASOURCE - 120 - 40 * (compactionRunCount + 1), 120 + 40 * (compactionRunCount + 1), 0, TOTAL_INTERVAL_PER_DATASOURCE - 3 - (compactionRunCount + 1), 3 + (compactionRunCount + 1), 0, TOTAL_SEGMENT_PER_DATASOURCE - 12 - 4 * (compactionRunCount + 1), // Each previous auto compaction run resulted in 2 compacted segments (4 segments compacted into 2 segments)
12 + 4 + 2 * (compactionRunCount), 0);
}
// Test that stats does not change (and is still correct) when auto compaction runs with everything is fully compacted
final CoordinatorStats stats = doCompactSegments(compactSegments);
Assert.assertEquals(0, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
verifySnapshot(compactSegments, AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING, dataSourceName, 0, TOTAL_BYTE_PER_DATASOURCE, 0, 0, TOTAL_INTERVAL_PER_DATASOURCE, 0, 0, // 32 segments needs compaction which is now compacted into 16 segments (4 segments compacted into 2 segments each run)
12 + 16, 0);
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithoutDimensionSpec.
@Test
public void testCompactWithoutDimensionSpec() {
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
final String dataSource = DATA_SOURCE_PREFIX + 0;
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), null, null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<ClientCompactionTaskDimensionsSpec> dimensionsSpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskDimensionsSpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), ArgumentMatchers.any(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), ArgumentMatchers.any(), dimensionsSpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
ClientCompactionTaskDimensionsSpec actual = dimensionsSpecArgumentCaptor.getValue();
Assert.assertNull(actual);
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testRunMultipleCompactionTaskSlotsWithUseAutoScaleSlotsOverMaxSlot.
@Test
public void testRunMultipleCompactionTaskSlotsWithUseAutoScaleSlotsOverMaxSlot() {
int maxCompactionSlot = 3;
Assert.assertTrue(maxCompactionSlot < MAXIMUM_CAPACITY_WITH_AUTO_SCALE);
final TestDruidLeaderClient leaderClient = new TestDruidLeaderClient(JSON_MAPPER);
leaderClient.start();
final HttpIndexingServiceClient indexingServiceClient = new HttpIndexingServiceClient(JSON_MAPPER, leaderClient);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, indexingServiceClient);
final CoordinatorStats stats = doCompactSegments(compactSegments, createCompactionConfigs(), maxCompactionSlot, true);
Assert.assertEquals(maxCompactionSlot, stats.getGlobalStat(CompactSegments.AVAILABLE_COMPACTION_TASK_SLOT));
Assert.assertEquals(maxCompactionSlot, stats.getGlobalStat(CompactSegments.MAX_COMPACTION_TASK_SLOT));
Assert.assertEquals(maxCompactionSlot, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testDetermineSegmentGranularityFromSegmentGranularityInCompactionConfig.
@Test
public void testDetermineSegmentGranularityFromSegmentGranularityInCompactionConfig() {
String dataSourceName = DATA_SOURCE_PREFIX + 1;
List<DataSegment> segments = new ArrayList<>();
segments.add(new DataSegment(dataSourceName, Intervals.of("2017-01-01T00:00:00/2017-01-02T00:00:00"), "1", null, ImmutableList.of(), ImmutableList.of(), shardSpecFactory.apply(0, 2), 0, 10L));
segments.add(new DataSegment(dataSourceName, Intervals.of("2017-01-01T00:00:00/2017-01-02T00:00:00"), "1", null, ImmutableList.of(), ImmutableList.of(), shardSpecFactory.apply(1, 2), 0, 10L));
dataSources = DataSourcesSnapshot.fromUsedSegments(segments, ImmutableMap.of()).getUsedSegmentsTimelinesPerDataSource();
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSourceName, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
Assert.assertEquals(2, segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
Aggregations