use of org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig in project druid by druid-io.
the class NewestSegmentFirstPolicyTest method testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentTimezone.
@Test
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentTimezone() {
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {
});
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null));
// Create segments that were compacted (CompactionState != null) and have segmentGranularity=DAY
final VersionedIntervalTimeline<String, DataSegment> timeline = createTimeline(new SegmentGenerateSpec(Intervals.of("2017-10-02T00:00:00/2017-10-03T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, null, indexSpec, null)));
// Duration of new segmentGranularity is the same as before (P1D),
// but we changed the timezone from UTC to Bangkok in the auto compaction spec
final CompactionSegmentIterator iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(130000, new Period("P0D"), new UserCompactionTaskGranularityConfig(new PeriodGranularity(new Period("P1D"), null, DateTimeZone.forTimeZone(TimeZone.getTimeZone("Asia/Bangkok"))), null, null))), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
// We should get all segments in timeline back since skip offset is P0D.
Assert.assertTrue(iterator.hasNext());
List<DataSegment> expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-10-01T00:00:00/2017-10-03T00:00:00"), Partitions.ONLY_COMPLETE));
Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
// No more
Assert.assertFalse(iterator.hasNext());
}
use of org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig in project druid by druid-io.
the class CoordinatorCompactionConfigsResourceTest method testAddOrUpdateCompactionConfigWithExistingConfig.
@Test
public void testAddOrUpdateCompactionConfigWithExistingConfig() {
final ArgumentCaptor<byte[]> oldConfigCaptor = ArgumentCaptor.forClass(byte[].class);
final ArgumentCaptor<CoordinatorCompactionConfig> newConfigCaptor = ArgumentCaptor.forClass(CoordinatorCompactionConfig.class);
Mockito.when(mockJacksonConfigManager.set(ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY), oldConfigCaptor.capture(), newConfigCaptor.capture(), ArgumentMatchers.any())).thenReturn(ConfigManager.SetResult.ok());
final DataSourceCompactionConfig newConfig = new DataSourceCompactionConfig("dataSource", null, 500L, null, new Period(3600), null, new UserCompactionTaskGranularityConfig(Granularities.HOUR, null, true), null, null, null, null, ImmutableMap.of("key", "val"));
String author = "maytas";
String comment = "hello";
Response result = coordinatorCompactionConfigsResource.addOrUpdateCompactionConfig(newConfig, author, comment, mockHttpServletRequest);
Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus());
Assert.assertNotNull(oldConfigCaptor.getValue());
Assert.assertEquals(oldConfigCaptor.getValue(), OLD_CONFIG_IN_BYTES);
Assert.assertNotNull(newConfigCaptor.getValue());
Assert.assertEquals(2, newConfigCaptor.getValue().getCompactionConfigs().size());
Assert.assertEquals(OLD_CONFIG, newConfigCaptor.getValue().getCompactionConfigs().get(0));
Assert.assertEquals(newConfig, newConfigCaptor.getValue().getCompactionConfigs().get(1));
}
use of org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithGranularitySpecConflictWithActiveCompactionTask.
@Test
public void testCompactWithGranularitySpecConflictWithActiveCompactionTask() {
final String dataSource = DATA_SOURCE_PREFIX + 0;
final String conflictTaskId = "taskIdDummy";
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
TaskStatusPlus runningConflictCompactionTask = new TaskStatusPlus(conflictTaskId, "groupId", "compact", DateTimes.EPOCH, DateTimes.EPOCH, TaskState.RUNNING, RunnerTaskState.RUNNING, -1L, TaskLocation.unknown(), dataSource, null);
TaskPayloadResponse runningConflictCompactionTaskPayload = new TaskPayloadResponse(conflictTaskId, new ClientCompactionTaskQuery(conflictTaskId, dataSource, new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2000/2099"), "testSha256OfSortedSegmentIds"), null), null, new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null), null, null, null, null));
Mockito.when(mockIndexingServiceClient.getActiveTasks()).thenReturn(ImmutableList.of(runningConflictCompactionTask));
Mockito.when(mockIndexingServiceClient.getTaskPayload(ArgumentMatchers.eq(conflictTaskId))).thenReturn(runningConflictCompactionTaskPayload);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
// Verify that conflict task was canceled
Mockito.verify(mockIndexingServiceClient).cancelTask(conflictTaskId);
// The active conflict task has interval of 2000/2099
// Make sure that we do not skip interval of conflict task.
// Since we cancel the task and will have to compact those intervals with the new segmentGranulartity
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// All segments is compact at the same time since we changed the segment granularity to YEAR and all segment
// are within the same year
Assert.assertEquals(datasourceToSegments.get(dataSource).size(), segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithGranularitySpec.
@Test
public void testCompactWithGranularitySpec() {
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
final String dataSource = DATA_SOURCE_PREFIX + 0;
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// All segments is compact at the same time since we changed the segment granularity to YEAR and all segment
// are within the same year
Assert.assertEquals(datasourceToSegments.get(dataSource).size(), segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig in project druid by druid-io.
the class CompactSegmentsTest method testDetermineSegmentGranularityFromSegmentGranularityInCompactionConfig.
@Test
public void testDetermineSegmentGranularityFromSegmentGranularityInCompactionConfig() {
String dataSourceName = DATA_SOURCE_PREFIX + 1;
List<DataSegment> segments = new ArrayList<>();
segments.add(new DataSegment(dataSourceName, Intervals.of("2017-01-01T00:00:00/2017-01-02T00:00:00"), "1", null, ImmutableList.of(), ImmutableList.of(), shardSpecFactory.apply(0, 2), 0, 10L));
segments.add(new DataSegment(dataSourceName, Intervals.of("2017-01-01T00:00:00/2017-01-02T00:00:00"), "1", null, ImmutableList.of(), ImmutableList.of(), shardSpecFactory.apply(1, 2), 0, 10L));
dataSources = DataSourcesSnapshot.fromUsedSegments(segments, ImmutableMap.of()).getUsedSegmentsTimelinesPerDataSource();
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSourceName, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
Assert.assertEquals(2, segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
Aggregations