use of org.apache.druid.server.coordinator.DataSourceCompactionConfig in project druid by druid-io.
the class NewestSegmentFirstIteratorTest method testFindPartitionsSpecFromConfigWithDeprecatedMaxRowsPerSegmentAndMaxTotalRowsReturnGivenValues.
@Test
public void testFindPartitionsSpecFromConfigWithDeprecatedMaxRowsPerSegmentAndMaxTotalRowsReturnGivenValues() {
final DataSourceCompactionConfig config = new DataSourceCompactionConfig("datasource", null, null, 100, null, new UserCompactionTaskQueryTuningConfig(null, null, 1000L, null, null, null, null, null, null, null, null, null, null, null, null, null, null), null, null, null, null, null, null);
Assert.assertEquals(new DynamicPartitionsSpec(100, 1000L), NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(config.getTuningConfig(), config.getMaxRowsPerSegment())));
}
use of org.apache.druid.server.coordinator.DataSourceCompactionConfig in project druid by druid-io.
the class NewestSegmentFirstIteratorTest method testFindPartitionsSpecFromConfigWithDeprecatedMaxRowsPerSegmentAndPartitionsSpecIgnoreDeprecatedOne.
@Test
public void testFindPartitionsSpecFromConfigWithDeprecatedMaxRowsPerSegmentAndPartitionsSpecIgnoreDeprecatedOne() {
final DataSourceCompactionConfig config = new DataSourceCompactionConfig("datasource", null, null, 100, null, new UserCompactionTaskQueryTuningConfig(null, null, null, null, new DynamicPartitionsSpec(null, null), null, null, null, null, null, null, null, null, null, null, null, null), null, null, null, null, null, null);
Assert.assertEquals(new DynamicPartitionsSpec(null, Long.MAX_VALUE), NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(config.getTuningConfig(), config.getMaxRowsPerSegment())));
}
use of org.apache.druid.server.coordinator.DataSourceCompactionConfig in project druid by druid-io.
the class CoordinatorCompactionConfigsResourceTest method testAddOrUpdateCompactionConfigWithExistingConfig.
@Test
public void testAddOrUpdateCompactionConfigWithExistingConfig() {
final ArgumentCaptor<byte[]> oldConfigCaptor = ArgumentCaptor.forClass(byte[].class);
final ArgumentCaptor<CoordinatorCompactionConfig> newConfigCaptor = ArgumentCaptor.forClass(CoordinatorCompactionConfig.class);
Mockito.when(mockJacksonConfigManager.set(ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY), oldConfigCaptor.capture(), newConfigCaptor.capture(), ArgumentMatchers.any())).thenReturn(ConfigManager.SetResult.ok());
final DataSourceCompactionConfig newConfig = new DataSourceCompactionConfig("dataSource", null, 500L, null, new Period(3600), null, new UserCompactionTaskGranularityConfig(Granularities.HOUR, null, true), null, null, null, null, ImmutableMap.of("key", "val"));
String author = "maytas";
String comment = "hello";
Response result = coordinatorCompactionConfigsResource.addOrUpdateCompactionConfig(newConfig, author, comment, mockHttpServletRequest);
Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus());
Assert.assertNotNull(oldConfigCaptor.getValue());
Assert.assertEquals(oldConfigCaptor.getValue(), OLD_CONFIG_IN_BYTES);
Assert.assertNotNull(newConfigCaptor.getValue());
Assert.assertEquals(2, newConfigCaptor.getValue().getCompactionConfigs().size());
Assert.assertEquals(OLD_CONFIG, newConfigCaptor.getValue().getCompactionConfigs().get(0));
Assert.assertEquals(newConfig, newConfigCaptor.getValue().getCompactionConfigs().get(1));
}
use of org.apache.druid.server.coordinator.DataSourceCompactionConfig in project druid by druid-io.
the class CompactSegmentsTest method testMakeStats.
@Test
public void testMakeStats() {
final TestDruidLeaderClient leaderClient = new TestDruidLeaderClient(JSON_MAPPER);
leaderClient.start();
final HttpIndexingServiceClient indexingServiceClient = new HttpIndexingServiceClient(JSON_MAPPER, leaderClient);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, indexingServiceClient);
// Before any compaction, we do not have any snapshot of compactions
Map<String, AutoCompactionSnapshot> autoCompactionSnapshots = compactSegments.getAutoCompactionSnapshot();
Assert.assertEquals(0, autoCompactionSnapshots.size());
for (int compactionRunCount = 0; compactionRunCount < 11; compactionRunCount++) {
doCompactionAndAssertCompactSegmentStatistics(compactSegments, compactionRunCount);
}
// Test that stats does not change (and is still correct) when auto compaction runs with everything is fully compacted
final CoordinatorStats stats = doCompactSegments(compactSegments);
Assert.assertEquals(0, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
for (int i = 0; i < 3; i++) {
verifySnapshot(compactSegments, AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING, DATA_SOURCE_PREFIX + i, 0, TOTAL_BYTE_PER_DATASOURCE, 0, 0, TOTAL_INTERVAL_PER_DATASOURCE, 0, 0, TOTAL_SEGMENT_PER_DATASOURCE / 2, 0);
}
// Test run auto compaction with one datasource auto compaction disabled
// Snapshot should not contain datasource with auto compaction disabled
List<DataSourceCompactionConfig> removedOneConfig = createCompactionConfigs();
removedOneConfig.remove(0);
doCompactSegments(compactSegments, removedOneConfig);
for (int i = 1; i < 3; i++) {
verifySnapshot(compactSegments, AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING, DATA_SOURCE_PREFIX + i, 0, TOTAL_BYTE_PER_DATASOURCE, 0, 0, TOTAL_INTERVAL_PER_DATASOURCE, 0, 0, TOTAL_SEGMENT_PER_DATASOURCE / 2, 0);
}
// Run auto compaction without any dataSource in the compaction config
// Snapshot should be empty
doCompactSegments(compactSegments, new ArrayList<>());
Assert.assertEquals(0, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
Assert.assertTrue(compactSegments.getAutoCompactionSnapshot().isEmpty());
assertLastSegmentNotCompacted(compactSegments);
}
use of org.apache.druid.server.coordinator.DataSourceCompactionConfig in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithGranularitySpecConflictWithActiveCompactionTask.
@Test
public void testCompactWithGranularitySpecConflictWithActiveCompactionTask() {
final String dataSource = DATA_SOURCE_PREFIX + 0;
final String conflictTaskId = "taskIdDummy";
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
TaskStatusPlus runningConflictCompactionTask = new TaskStatusPlus(conflictTaskId, "groupId", "compact", DateTimes.EPOCH, DateTimes.EPOCH, TaskState.RUNNING, RunnerTaskState.RUNNING, -1L, TaskLocation.unknown(), dataSource, null);
TaskPayloadResponse runningConflictCompactionTaskPayload = new TaskPayloadResponse(conflictTaskId, new ClientCompactionTaskQuery(conflictTaskId, dataSource, new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2000/2099"), "testSha256OfSortedSegmentIds"), null), null, new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null), null, null, null, null));
Mockito.when(mockIndexingServiceClient.getActiveTasks()).thenReturn(ImmutableList.of(runningConflictCompactionTask));
Mockito.when(mockIndexingServiceClient.getTaskPayload(ArgumentMatchers.eq(conflictTaskId))).thenReturn(runningConflictCompactionTaskPayload);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
// Verify that conflict task was canceled
Mockito.verify(mockIndexingServiceClient).cancelTask(conflictTaskId);
// The active conflict task has interval of 2000/2099
// Make sure that we do not skip interval of conflict task.
// Since we cancel the task and will have to compact those intervals with the new segmentGranulartity
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// All segments is compact at the same time since we changed the segment granularity to YEAR and all segment
// are within the same year
Assert.assertEquals(datasourceToSegments.get(dataSource).size(), segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
Aggregations