use of org.apache.druid.client.indexing.ClientCompactionIOConfig in project druid by druid-io.
the class ClientCompactionTaskQuerySerdeTest method testCompactionTaskToClientCompactionTaskQuery.
@Test
public void testCompactionTaskToClientCompactionTaskQuery() throws IOException {
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
final CompactionTask.Builder builder = new CompactionTask.Builder("datasource", new SegmentCacheManagerFactory(mapper), new RetryPolicyFactory(new RetryPolicyConfig()));
final CompactionTask task = builder.inputSpec(new CompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true).tuningConfig(new ParallelIndexTuningConfig(null, null, null, 40000, 2000L, null, null, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, null, null, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), null, 100, 5, 1000L, new Duration(3000L), 7, 1000, 100, null, null, null, null, null, null)).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true)).dimensionsSpec(DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))).setDimensionExclusions(ImmutableList.of("__time", "val")).build()).metricsSpec(new AggregatorFactory[] { new CountAggregatorFactory("cnt") }).transformSpec(new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null))).build();
final ClientCompactionTaskQuery expected = new ClientCompactionTaskQuery(task.getId(), "datasource", new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true), new ClientCompactionTaskQueryTuningConfig(100, 40000, 2000L, 30000L, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100), new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true), new ClientCompactionTaskDimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null)), new HashMap<>());
final byte[] json = mapper.writeValueAsBytes(task);
final ClientCompactionTaskQuery actual = (ClientCompactionTaskQuery) mapper.readValue(json, ClientTaskQuery.class);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.client.indexing.ClientCompactionIOConfig in project druid by druid-io.
the class ClientCompactionTaskQuerySerdeTest method testClientCompactionTaskQueryToCompactionTask.
@Test
public void testClientCompactionTaskQueryToCompactionTask() throws IOException {
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
final ClientCompactionTaskQuery query = new ClientCompactionTaskQuery("id", "datasource", new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true), new ClientCompactionTaskQueryTuningConfig(null, 40000, 2000L, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100), new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true), new ClientCompactionTaskDimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null)), ImmutableMap.of("key", "value"));
final byte[] json = mapper.writeValueAsBytes(query);
final CompactionTask task = (CompactionTask) mapper.readValue(json, Task.class);
Assert.assertEquals(query.getId(), task.getId());
Assert.assertEquals(query.getDataSource(), task.getDataSource());
Assert.assertTrue(task.getIoConfig().getInputSpec() instanceof CompactionIntervalSpec);
Assert.assertEquals(query.getIoConfig().getInputSpec().getInterval(), ((CompactionIntervalSpec) task.getIoConfig().getInputSpec()).getInterval());
Assert.assertEquals(query.getIoConfig().getInputSpec().getSha256OfSortedSegmentIds(), ((CompactionIntervalSpec) task.getIoConfig().getInputSpec()).getSha256OfSortedSegmentIds());
Assert.assertEquals(query.getTuningConfig().getMaxRowsInMemory().intValue(), task.getTuningConfig().getMaxRowsInMemory());
Assert.assertEquals(query.getTuningConfig().getMaxBytesInMemory().longValue(), task.getTuningConfig().getMaxBytesInMemory());
Assert.assertEquals(query.getTuningConfig().getSplitHintSpec(), task.getTuningConfig().getSplitHintSpec());
Assert.assertEquals(query.getTuningConfig().getPartitionsSpec(), task.getTuningConfig().getPartitionsSpec());
Assert.assertEquals(query.getTuningConfig().getIndexSpec(), task.getTuningConfig().getIndexSpec());
Assert.assertEquals(query.getTuningConfig().getIndexSpecForIntermediatePersists(), task.getTuningConfig().getIndexSpecForIntermediatePersists());
Assert.assertEquals(query.getTuningConfig().getPushTimeout().longValue(), task.getTuningConfig().getPushTimeout());
Assert.assertEquals(query.getTuningConfig().getSegmentWriteOutMediumFactory(), task.getTuningConfig().getSegmentWriteOutMediumFactory());
Assert.assertEquals(query.getTuningConfig().getMaxNumConcurrentSubTasks().intValue(), task.getTuningConfig().getMaxNumConcurrentSubTasks());
Assert.assertEquals(query.getTuningConfig().getMaxRetry().intValue(), task.getTuningConfig().getMaxRetry());
Assert.assertEquals(query.getTuningConfig().getTaskStatusCheckPeriodMs().longValue(), task.getTuningConfig().getTaskStatusCheckPeriodMs());
Assert.assertEquals(query.getTuningConfig().getChatHandlerTimeout(), task.getTuningConfig().getChatHandlerTimeout());
Assert.assertEquals(query.getTuningConfig().getMaxNumSegmentsToMerge().intValue(), task.getTuningConfig().getMaxNumSegmentsToMerge());
Assert.assertEquals(query.getTuningConfig().getTotalNumMergeTasks().intValue(), task.getTuningConfig().getTotalNumMergeTasks());
Assert.assertEquals(query.getGranularitySpec(), task.getGranularitySpec());
Assert.assertEquals(query.getGranularitySpec().getQueryGranularity(), task.getGranularitySpec().getQueryGranularity());
Assert.assertEquals(query.getGranularitySpec().getSegmentGranularity(), task.getGranularitySpec().getSegmentGranularity());
Assert.assertEquals(query.getGranularitySpec().isRollup(), task.getGranularitySpec().isRollup());
Assert.assertEquals(query.getIoConfig().isDropExisting(), task.getIoConfig().isDropExisting());
Assert.assertEquals(query.getContext(), task.getContext());
Assert.assertEquals(query.getDimensionsSpec().getDimensions(), task.getDimensionsSpec().getDimensions());
Assert.assertEquals(query.getTransformSpec().getFilter(), task.getTransformSpec().getFilter());
Assert.assertArrayEquals(query.getMetricsSpec(), task.getMetricsSpec());
}
use of org.apache.druid.client.indexing.ClientCompactionIOConfig in project druid by druid-io.
the class CompactSegmentsTest method testCompactWithGranularitySpecConflictWithActiveCompactionTask.
@Test
public void testCompactWithGranularitySpecConflictWithActiveCompactionTask() {
final String dataSource = DATA_SOURCE_PREFIX + 0;
final String conflictTaskId = "taskIdDummy";
final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
TaskStatusPlus runningConflictCompactionTask = new TaskStatusPlus(conflictTaskId, "groupId", "compact", DateTimes.EPOCH, DateTimes.EPOCH, TaskState.RUNNING, RunnerTaskState.RUNNING, -1L, TaskLocation.unknown(), dataSource, null);
TaskPayloadResponse runningConflictCompactionTaskPayload = new TaskPayloadResponse(conflictTaskId, new ClientCompactionTaskQuery(conflictTaskId, dataSource, new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2000/2099"), "testSha256OfSortedSegmentIds"), null), null, new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null), null, null, null, null));
Mockito.when(mockIndexingServiceClient.getActiveTasks()).thenReturn(ImmutableList.of(runningConflictCompactionTask));
Mockito.when(mockIndexingServiceClient.getTaskPayload(ArgumentMatchers.eq(conflictTaskId))).thenReturn(runningConflictCompactionTaskPayload);
final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, null, null));
doCompactSegments(compactSegments, compactionConfigs);
// Verify that conflict task was canceled
Mockito.verify(mockIndexingServiceClient).cancelTask(conflictTaskId);
// The active conflict task has interval of 2000/2099
// Make sure that we do not skip interval of conflict task.
// Since we cancel the task and will have to compact those intervals with the new segmentGranulartity
ArgumentCaptor<List<DataSegment>> segmentsCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ClientCompactionTaskGranularitySpec> granularitySpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskGranularitySpec.class);
Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), segmentsCaptor.capture(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), granularitySpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
// All segments is compact at the same time since we changed the segment granularity to YEAR and all segment
// are within the same year
Assert.assertEquals(datasourceToSegments.get(dataSource).size(), segmentsCaptor.getValue().size());
ClientCompactionTaskGranularitySpec actual = granularitySpecArgumentCaptor.getValue();
Assert.assertNotNull(actual);
ClientCompactionTaskGranularitySpec expected = new ClientCompactionTaskGranularitySpec(Granularities.YEAR, null, null);
Assert.assertEquals(expected, actual);
}
Aggregations