use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithGranularitySpecConflictWithActiveCompactionTask.
@Test
public void testCompactWithGranularitySpecConflictWithActiveCompactionTask() {
final String dataSource = DATA_SOURCE_PREFIX + 0;
final String conflictTaskId = "taskIdDummy";
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
TaskStatusPlus runningConflictCompactionTask = new TaskStatusPlus(conflictTaskId, "groupId", "compact", DateTimes.EPOCH, DateTimes.EPOCH, TaskState.RUNNING, RunnerTaskState.RUNNING, -1L, TaskLocation.unknown(), dataSource, null);
TaskPayloadResponse runningConflictCompactionTaskPayload = new TaskPayloadResponse(conflictTaskId, new ClientCompactionTaskQuery(conflictTaskId, dataSource, new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2000/2099"), "testSha256OfSortedSegmentIds"), null), null, new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null), null, null, null, null));
Mockito.when(mockIndexingServiceClient.getActiveTasks()).thenReturn(ImmutableList.of(runningConflictCompactionTask));
Mockito.when(mockIndexingServiceClient.getTaskPayload(ArgumentMatchers.eq(conflictTaskId))).thenReturn(runningConflictCompactionTaskPayload);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
// Verify that conflict task was canceled
Mockito.verify(mockIndexingServiceClient).cancelTask(conflictTaskId);
// The active conflict task has interval of 2000/2099
// Make sure that we do not skip interval of conflict task.
// Since we cancel the task and will have to compact those intervals with the new segmentGranulartity
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// All segments is compact at the same time since we changed the segment granularity to YEAR and all segment
// are within the same year
Assert.assertEquals(datasourceToSegments.get(dataSource).size(), segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithGranularitySpec.
@Test
public void testCompactWithGranularitySpec() {
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
final String dataSource = DATA_SOURCE_PREFIX + 0;
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// All segments is compact at the same time since we changed the segment granularity to YEAR and all segment
// are within the same year
Assert.assertEquals(datasourceToSegments.get(dataSource).size(), segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithoutGranularitySpec.
@Test
public void testCompactWithoutGranularitySpec() {
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
final String dataSource = DATA_SOURCE_PREFIX + 0;
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), null, null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// Only the same amount of segments as the original PARTITION_PER_TIME_INTERVAL since segment granulartity is the same
Assert.assertEquals(PARTITION_PER_TIME_INTERVAL, segmentsCaptor.getValue().size());
Assert.assertNull(granularitySpecArgumentCaptor.getValue().getSegmentGranularity());
Assert.assertNull(granularitySpecArgumentCaptor.getValue().getQueryGranularity());
Assert.assertNull(granularitySpecArgumentCaptor.getValue().isRollup());
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testRunParallelCompactionMultipleCompactionTaskSlots.
@Test
public void testRunParallelCompactionMultipleCompactionTaskSlots() {
final TestDruidLeaderClient leaderClient = new TestDruidLeaderClient(JSON_MAPPER);
leaderClient.start();
final HttpIndexingServiceClient indexingServiceClient = new HttpIndexingServiceClient(JSON_MAPPER, leaderClient);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, indexingServiceClient);
final CoordinatorStats stats = doCompactSegments(compactSegments, createCompactionConfigs(2), 4);
Assert.assertEquals(4, stats.getGlobalStat(CompactSegments.AVAILABLE_COMPACTION_TASK_SLOT));
Assert.assertEquals(4, stats.getGlobalStat(CompactSegments.MAX_COMPACTION_TASK_SLOT));
Assert.assertEquals(2, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
}
use of org.apache.druid.client.indexing.HttpIndexingServiceClient in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithDimensionSpec.
@Test
public void testCompactWithDimensionSpec() {
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
final String dataSource = DATA_SOURCE_PREFIX + 0;
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), null, new UserCompactionTaskDimensionsConfig(DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "foo"))), null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<ClientCompactionTaskDimensionsSpec> dimensionsSpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskDimensionsSpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), ArgumentMatchers.any(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), ArgumentMatchers.any(), dimensionsSpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
ClientCompactionTaskDimensionsSpec actual = dimensionsSpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
Assert.assertEquals(DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "foo")), actual.getDimensions());
}
Aggregations