Search in sources :

Example 6 with ClientCompactionTaskTransformSpec

use of org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec in project druid by druid-io.

the class CompactionTaskTest method testCreateCompactionTaskWithTransformSpec.

@Test
public void testCreateCompactionTaskWithTransformSpec() {
    ClientCompactionTaskTransformSpec transformSpec = new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null));
    final Builder builder = new Builder(DATA_SOURCE, segmentCacheManagerFactory, RETRY_POLICY_FACTORY);
    builder.inputSpec(new CompactionIntervalSpec(COMPACTION_INTERVAL, SegmentUtils.hashIds(SEGMENTS)));
    builder.tuningConfig(createTuningConfig());
    builder.transformSpec(transformSpec);
    final CompactionTask taskCreatedWithTransformSpec = builder.build();
    Assert.assertEquals(transformSpec, taskCreatedWithTransformSpec.getTransformSpec());
}
Also used : SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Builder(org.apache.druid.indexing.common.task.CompactionTask.Builder) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) Test(org.junit.Test)

Example 7 with ClientCompactionTaskTransformSpec

use of org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec in project druid by druid-io.

the class ClientCompactionTaskQuerySerdeTest method testClientCompactionTaskQueryToCompactionTask.

@Test
public void testClientCompactionTaskQueryToCompactionTask() throws IOException {
    final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
    final ClientCompactionTaskQuery query = new ClientCompactionTaskQuery("id", "datasource", new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true), new ClientCompactionTaskQueryTuningConfig(null, 40000, 2000L, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100), new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true), new ClientCompactionTaskDimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null)), ImmutableMap.of("key", "value"));
    final byte[] json = mapper.writeValueAsBytes(query);
    final CompactionTask task = (CompactionTask) mapper.readValue(json, Task.class);
    Assert.assertEquals(query.getId(), task.getId());
    Assert.assertEquals(query.getDataSource(), task.getDataSource());
    Assert.assertTrue(task.getIoConfig().getInputSpec() instanceof CompactionIntervalSpec);
    Assert.assertEquals(query.getIoConfig().getInputSpec().getInterval(), ((CompactionIntervalSpec) task.getIoConfig().getInputSpec()).getInterval());
    Assert.assertEquals(query.getIoConfig().getInputSpec().getSha256OfSortedSegmentIds(), ((CompactionIntervalSpec) task.getIoConfig().getInputSpec()).getSha256OfSortedSegmentIds());
    Assert.assertEquals(query.getTuningConfig().getMaxRowsInMemory().intValue(), task.getTuningConfig().getMaxRowsInMemory());
    Assert.assertEquals(query.getTuningConfig().getMaxBytesInMemory().longValue(), task.getTuningConfig().getMaxBytesInMemory());
    Assert.assertEquals(query.getTuningConfig().getSplitHintSpec(), task.getTuningConfig().getSplitHintSpec());
    Assert.assertEquals(query.getTuningConfig().getPartitionsSpec(), task.getTuningConfig().getPartitionsSpec());
    Assert.assertEquals(query.getTuningConfig().getIndexSpec(), task.getTuningConfig().getIndexSpec());
    Assert.assertEquals(query.getTuningConfig().getIndexSpecForIntermediatePersists(), task.getTuningConfig().getIndexSpecForIntermediatePersists());
    Assert.assertEquals(query.getTuningConfig().getPushTimeout().longValue(), task.getTuningConfig().getPushTimeout());
    Assert.assertEquals(query.getTuningConfig().getSegmentWriteOutMediumFactory(), task.getTuningConfig().getSegmentWriteOutMediumFactory());
    Assert.assertEquals(query.getTuningConfig().getMaxNumConcurrentSubTasks().intValue(), task.getTuningConfig().getMaxNumConcurrentSubTasks());
    Assert.assertEquals(query.getTuningConfig().getMaxRetry().intValue(), task.getTuningConfig().getMaxRetry());
    Assert.assertEquals(query.getTuningConfig().getTaskStatusCheckPeriodMs().longValue(), task.getTuningConfig().getTaskStatusCheckPeriodMs());
    Assert.assertEquals(query.getTuningConfig().getChatHandlerTimeout(), task.getTuningConfig().getChatHandlerTimeout());
    Assert.assertEquals(query.getTuningConfig().getMaxNumSegmentsToMerge().intValue(), task.getTuningConfig().getMaxNumSegmentsToMerge());
    Assert.assertEquals(query.getTuningConfig().getTotalNumMergeTasks().intValue(), task.getTuningConfig().getTotalNumMergeTasks());
    Assert.assertEquals(query.getGranularitySpec(), task.getGranularitySpec());
    Assert.assertEquals(query.getGranularitySpec().getQueryGranularity(), task.getGranularitySpec().getQueryGranularity());
    Assert.assertEquals(query.getGranularitySpec().getSegmentGranularity(), task.getGranularitySpec().getSegmentGranularity());
    Assert.assertEquals(query.getGranularitySpec().isRollup(), task.getGranularitySpec().isRollup());
    Assert.assertEquals(query.getIoConfig().isDropExisting(), task.getIoConfig().isDropExisting());
    Assert.assertEquals(query.getContext(), task.getContext());
    Assert.assertEquals(query.getDimensionsSpec().getDimensions(), task.getDimensionsSpec().getDimensions());
    Assert.assertEquals(query.getTransformSpec().getFilter(), task.getTransformSpec().getFilter());
    Assert.assertArrayEquals(query.getMetricsSpec(), task.getMetricsSpec());
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) ClientCompactionIOConfig(org.apache.druid.client.indexing.ClientCompactionIOConfig) ClientCompactionIntervalSpec(org.apache.druid.client.indexing.ClientCompactionIntervalSpec) Duration(org.joda.time.Duration) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) ClientCompactionTaskQueryTuningConfig(org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig) ClientCompactionIntervalSpec(org.apache.druid.client.indexing.ClientCompactionIntervalSpec) SegmentsSplitHintSpec(org.apache.druid.data.input.SegmentsSplitHintSpec) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) DefaultBitmapSerdeFactory(org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) ClientCompactionTaskQuery(org.apache.druid.client.indexing.ClientCompactionTaskQuery) ClientCompactionTaskDimensionsSpec(org.apache.druid.client.indexing.ClientCompactionTaskDimensionsSpec) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 8 with ClientCompactionTaskTransformSpec

use of org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec in project druid by druid-io.

the class AbstractBatchIndexTask method compactionStateAnnotateFunction.

public static Function<Set<DataSegment>, Set<DataSegment>> compactionStateAnnotateFunction(boolean storeCompactionState, TaskToolbox toolbox, IngestionSpec ingestionSpec) {
    if (storeCompactionState) {
        TuningConfig tuningConfig = ingestionSpec.getTuningConfig();
        GranularitySpec granularitySpec = ingestionSpec.getDataSchema().getGranularitySpec();
        // We do not need to store dimensionExclusions and spatialDimensions since auto compaction does not support them
        DimensionsSpec dimensionsSpec = ingestionSpec.getDataSchema().getDimensionsSpec() == null ? null : new DimensionsSpec(ingestionSpec.getDataSchema().getDimensionsSpec().getDimensions());
        // We only need to store filter since that is the only field auto compaction support
        Map<String, Object> transformSpec = ingestionSpec.getDataSchema().getTransformSpec() == null || TransformSpec.NONE.equals(ingestionSpec.getDataSchema().getTransformSpec()) ? null : new ClientCompactionTaskTransformSpec(ingestionSpec.getDataSchema().getTransformSpec().getFilter()).asMap(toolbox.getJsonMapper());
        List<Object> metricsSpec = ingestionSpec.getDataSchema().getAggregators() == null ? null : toolbox.getJsonMapper().convertValue(ingestionSpec.getDataSchema().getAggregators(), new TypeReference<List<Object>>() {
        });
        final CompactionState compactionState = new CompactionState(tuningConfig.getPartitionsSpec(), dimensionsSpec, metricsSpec, transformSpec, tuningConfig.getIndexSpec().asMap(toolbox.getJsonMapper()), granularitySpec.asMap(toolbox.getJsonMapper()));
        return segments -> segments.stream().map(s -> s.withLastCompactionState(compactionState)).collect(Collectors.toSet());
    } else {
        return Function.identity();
    }
}
Also used : IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) TuningConfig(org.apache.druid.segment.indexing.TuningConfig) TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) Partitions(org.apache.druid.timeline.Partitions) CompactionState(org.apache.druid.timeline.CompactionState) JodaUtils(org.apache.druid.java.util.common.JodaUtils) TaskActionClient(org.apache.druid.indexing.common.actions.TaskActionClient) Optional(com.google.common.base.Optional) Map(java.util.Map) InputSourceReader(org.apache.druid.data.input.InputSourceReader) TaskLock(org.apache.druid.indexing.common.TaskLock) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Execs(org.apache.druid.java.util.common.concurrent.Execs) InputRowSchemas(org.apache.druid.indexing.input.InputRowSchemas) Predicate(java.util.function.Predicate) GuardedBy(com.google.errorprone.annotations.concurrent.GuardedBy) InputFormat(org.apache.druid.data.input.InputFormat) Collection(java.util.Collection) Segments(org.apache.druid.indexing.overlord.Segments) StringUtils(org.apache.druid.java.util.common.StringUtils) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) IngestionSpec(org.apache.druid.segment.indexing.IngestionSpec) Collectors(java.util.stream.Collectors) LockGranularity(org.apache.druid.indexing.common.LockGranularity) MaxAllowedLocksExceededException(org.apache.druid.indexing.common.task.batch.MaxAllowedLocksExceededException) IngestSegmentFirehoseFactory(org.apache.druid.indexing.firehose.IngestSegmentFirehoseFactory) InputRow(org.apache.druid.data.input.InputRow) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) TaskLockType(org.apache.druid.indexing.common.TaskLockType) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) DataSegment(org.apache.druid.timeline.DataSegment) TransformSpec(org.apache.druid.segment.transform.TransformSpec) Logger(org.apache.druid.java.util.common.logger.Logger) Granularity(org.apache.druid.java.util.common.granularity.Granularity) ParseExceptionHandler(org.apache.druid.segment.incremental.ParseExceptionHandler) IntervalsByGranularity(org.apache.druid.java.util.common.granularity.IntervalsByGranularity) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) TimeChunkLockTryAcquireAction(org.apache.druid.indexing.common.actions.TimeChunkLockTryAcquireAction) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) Function(java.util.function.Function) TaskStatus(org.apache.druid.indexer.TaskStatus) TuningConfig(org.apache.druid.segment.indexing.TuningConfig) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) InputSource(org.apache.druid.data.input.InputSource) Interval(org.joda.time.Interval) ImmutableList(com.google.common.collect.ImmutableList) WindowedSegmentId(org.apache.druid.indexing.firehose.WindowedSegmentId) BiConsumer(java.util.function.BiConsumer) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) Nullable(javax.annotation.Nullable) ExecutorService(java.util.concurrent.ExecutorService) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) Period(org.joda.time.Period) FirehoseFactory(org.apache.druid.data.input.FirehoseFactory) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) Iterator(java.util.Iterator) ServiceMetricEvent(org.apache.druid.java.util.emitter.service.ServiceMetricEvent) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) IOException(java.io.IOException) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) GranularityType(org.apache.druid.java.util.common.granularity.GranularityType) Preconditions(com.google.common.base.Preconditions) RetrieveUsedSegmentsAction(org.apache.druid.indexing.common.actions.RetrieveUsedSegmentsAction) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) DataSchema(org.apache.druid.segment.indexing.DataSchema) Collections(java.util.Collections) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) CompactionState(org.apache.druid.timeline.CompactionState) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) TypeReference(com.fasterxml.jackson.core.type.TypeReference)

Example 9 with ClientCompactionTaskTransformSpec

use of org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec in project druid by druid-io.

the class CompactionTaskParallelRunTest method testRunCompactionWithFilterShouldStoreInState.

@Test
public void testRunCompactionWithFilterShouldStoreInState() throws Exception {
    runIndexTask(null, true);
    final Builder builder = new Builder(DATA_SOURCE, getSegmentCacheManagerFactory(), RETRY_POLICY_FACTORY);
    final CompactionTask compactionTask = builder.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null)).tuningConfig(AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING).transformSpec(new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim", "a", null))).build();
    final Set<DataSegment> compactedSegments = runTask(compactionTask);
    Assert.assertEquals(3, compactedSegments.size());
    for (DataSegment segment : compactedSegments) {
        Assert.assertSame(lockGranularity == LockGranularity.TIME_CHUNK ? NumberedShardSpec.class : NumberedOverwriteShardSpec.class, segment.getShardSpec().getClass());
        Map<String, String> expectedLongSumMetric = new HashMap<>();
        expectedLongSumMetric.put("type", "longSum");
        expectedLongSumMetric.put("name", "val");
        expectedLongSumMetric.put("fieldName", "val");
        expectedLongSumMetric.put("expression", null);
        CompactionState expectedState = new CompactionState(new DynamicPartitionsSpec(null, Long.MAX_VALUE), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), ImmutableList.of(expectedLongSumMetric), getObjectMapper().readValue(getObjectMapper().writeValueAsString(compactionTask.getTransformSpec()), Map.class), compactionTask.getTuningConfig().getIndexSpec().asMap(getObjectMapper()), getObjectMapper().readValue(getObjectMapper().writeValueAsString(new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, true, ImmutableList.of(segment.getInterval()))), Map.class));
        Assert.assertEquals(expectedState, segment.getLastCompactionState());
    }
}
Also used : HashMap(java.util.HashMap) Builder(org.apache.druid.indexing.common.task.CompactionTask.Builder) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) DataSegment(org.apache.druid.timeline.DataSegment) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) NumberedOverwriteShardSpec(org.apache.druid.timeline.partition.NumberedOverwriteShardSpec) CompactionState(org.apache.druid.timeline.CompactionState) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) AbstractParallelIndexSupervisorTaskTest(org.apache.druid.indexing.common.task.batch.parallel.AbstractParallelIndexSupervisorTaskTest) Test(org.junit.Test)

Example 10 with ClientCompactionTaskTransformSpec

use of org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec in project druid by druid-io.

the class NewestSegmentFirstIterator method needsCompaction.

private boolean needsCompaction(DataSourceCompactionConfig config, SegmentsToCompact candidates) {
    Preconditions.checkState(!candidates.isEmpty(), "Empty candidates");
    final ClientCompactionTaskQueryTuningConfig tuningConfig = ClientCompactionTaskQueryTuningConfig.from(config.getTuningConfig(), config.getMaxRowsPerSegment());
    final PartitionsSpec partitionsSpecFromConfig = findPartitionsSpecFromConfig(tuningConfig);
    final CompactionState lastCompactionState = candidates.segments.get(0).getLastCompactionState();
    if (lastCompactionState == null) {
        log.info("Candidate segment[%s] is not compacted yet. Needs compaction.", candidates.segments.get(0).getId());
        return true;
    }
    final boolean allCandidatesHaveSameLastCompactionState = candidates.segments.stream().allMatch(segment -> lastCompactionState.equals(segment.getLastCompactionState()));
    if (!allCandidatesHaveSameLastCompactionState) {
        log.info("[%s] Candidate segments were compacted with different partitions spec. Needs compaction.", candidates.segments.size());
        log.debugSegments(candidates.segments, "Candidate segments compacted with different partiton spec");
        return true;
    }
    final PartitionsSpec segmentPartitionsSpec = lastCompactionState.getPartitionsSpec();
    final IndexSpec segmentIndexSpec = objectMapper.convertValue(lastCompactionState.getIndexSpec(), IndexSpec.class);
    final IndexSpec configuredIndexSpec;
    if (tuningConfig.getIndexSpec() == null) {
        configuredIndexSpec = new IndexSpec();
    } else {
        configuredIndexSpec = tuningConfig.getIndexSpec();
    }
    if (!Objects.equals(partitionsSpecFromConfig, segmentPartitionsSpec)) {
        log.info("Configured partitionsSpec[%s] is differenet from " + "the partitionsSpec[%s] of segments. Needs compaction.", partitionsSpecFromConfig, segmentPartitionsSpec);
        return true;
    }
    // segmentIndexSpec cannot be null.
    if (!segmentIndexSpec.equals(configuredIndexSpec)) {
        log.info("Configured indexSpec[%s] is different from the one[%s] of segments. Needs compaction", configuredIndexSpec, segmentIndexSpec);
        return true;
    }
    if (config.getGranularitySpec() != null) {
        final ClientCompactionTaskGranularitySpec existingGranularitySpec = lastCompactionState.getGranularitySpec() != null ? objectMapper.convertValue(lastCompactionState.getGranularitySpec(), ClientCompactionTaskGranularitySpec.class) : null;
        // Checks for segmentGranularity
        if (config.getGranularitySpec().getSegmentGranularity() != null) {
            final Granularity existingSegmentGranularity = existingGranularitySpec != null ? existingGranularitySpec.getSegmentGranularity() : null;
            if (existingSegmentGranularity == null) {
                // Candidate segments were all compacted without segment granularity set.
                // We need to check if all segments have the same segment granularity as the configured segment granularity.
                boolean needsCompaction = candidates.segments.stream().anyMatch(segment -> !config.getGranularitySpec().getSegmentGranularity().isAligned(segment.getInterval()));
                if (needsCompaction) {
                    log.info("Segments were previously compacted but without segmentGranularity in auto compaction." + " Configured segmentGranularity[%s] is different from granularity implied by segment intervals. Needs compaction", config.getGranularitySpec().getSegmentGranularity());
                    return true;
                }
            } else if (!config.getGranularitySpec().getSegmentGranularity().equals(existingSegmentGranularity)) {
                log.info("Configured segmentGranularity[%s] is different from the segmentGranularity[%s] of segments. Needs compaction", config.getGranularitySpec().getSegmentGranularity(), existingSegmentGranularity);
                return true;
            }
        }
        // Checks for rollup
        if (config.getGranularitySpec().isRollup() != null) {
            final Boolean existingRollup = existingGranularitySpec != null ? existingGranularitySpec.isRollup() : null;
            if (existingRollup == null || !config.getGranularitySpec().isRollup().equals(existingRollup)) {
                log.info("Configured rollup[%s] is different from the rollup[%s] of segments. Needs compaction", config.getGranularitySpec().isRollup(), existingRollup);
                return true;
            }
        }
        // Checks for queryGranularity
        if (config.getGranularitySpec().getQueryGranularity() != null) {
            final Granularity existingQueryGranularity = existingGranularitySpec != null ? existingGranularitySpec.getQueryGranularity() : null;
            if (!config.getGranularitySpec().getQueryGranularity().equals(existingQueryGranularity)) {
                log.info("Configured queryGranularity[%s] is different from the queryGranularity[%s] of segments. Needs compaction", config.getGranularitySpec().getQueryGranularity(), existingQueryGranularity);
                return true;
            }
        }
    }
    if (config.getDimensionsSpec() != null) {
        final DimensionsSpec existingDimensionsSpec = lastCompactionState.getDimensionsSpec();
        // Checks for list of dimensions
        if (config.getDimensionsSpec().getDimensions() != null) {
            final List<DimensionSchema> existingDimensions = existingDimensionsSpec != null ? existingDimensionsSpec.getDimensions() : null;
            if (!config.getDimensionsSpec().getDimensions().equals(existingDimensions)) {
                log.info("Configured dimensionsSpec is different from the dimensionsSpec of segments. Needs compaction");
                return true;
            }
        }
    }
    if (config.getTransformSpec() != null) {
        final ClientCompactionTaskTransformSpec existingTransformSpec = lastCompactionState.getTransformSpec() != null ? objectMapper.convertValue(lastCompactionState.getTransformSpec(), ClientCompactionTaskTransformSpec.class) : null;
        // Checks for filters
        if (config.getTransformSpec().getFilter() != null) {
            final DimFilter existingFilters = existingTransformSpec != null ? existingTransformSpec.getFilter() : null;
            if (!config.getTransformSpec().getFilter().equals(existingFilters)) {
                log.info("Configured filter[%s] is different from the filter[%s] of segments. Needs compaction", config.getTransformSpec().getFilter(), existingFilters);
                return true;
            }
        }
    }
    if (ArrayUtils.isNotEmpty(config.getMetricsSpec())) {
        final AggregatorFactory[] existingMetricsSpec = lastCompactionState.getMetricsSpec() == null || lastCompactionState.getMetricsSpec().isEmpty() ? null : objectMapper.convertValue(lastCompactionState.getMetricsSpec(), AggregatorFactory[].class);
        if (existingMetricsSpec == null || !Arrays.deepEquals(config.getMetricsSpec(), existingMetricsSpec)) {
            log.info("Configured metricsSpec[%s] is different from the metricsSpec[%s] of segments. Needs compaction", Arrays.toString(config.getMetricsSpec()), Arrays.toString(existingMetricsSpec));
            return true;
        }
    }
    return false;
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) Granularity(org.apache.druid.java.util.common.granularity.Granularity) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) ClientCompactionTaskQueryTuningConfig(org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) CompactionState(org.apache.druid.timeline.CompactionState) DimFilter(org.apache.druid.query.filter.DimFilter)

Aggregations

ClientCompactionTaskTransformSpec (org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec)11 Test (org.junit.Test)7 ClientCompactionTaskGranularitySpec (org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec)6 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)6 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)5 DynamicPartitionsSpec (org.apache.druid.indexer.partitions.DynamicPartitionsSpec)5 ArrayList (java.util.ArrayList)4 Granularity (org.apache.druid.java.util.common.granularity.Granularity)4 IndexSpec (org.apache.druid.segment.IndexSpec)4 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)3 Map (java.util.Map)3 ClientCompactionTaskDimensionsSpec (org.apache.druid.client.indexing.ClientCompactionTaskDimensionsSpec)3 ClientCompactionTaskQueryTuningConfig (org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig)3 CompactionState (org.apache.druid.timeline.CompactionState)3 DataSegment (org.apache.druid.timeline.DataSegment)3 ImmutableList (com.google.common.collect.ImmutableList)2 HashMap (java.util.HashMap)2 List (java.util.List)2 HttpIndexingServiceClient (org.apache.druid.client.indexing.HttpIndexingServiceClient)2 Builder (org.apache.druid.indexing.common.task.CompactionTask.Builder)2