Search in sources :

Example 76 with IndexSpec

use of org.apache.druid.segment.IndexSpec in project druid by druid-io.

the class KafkaIndexTaskTuningConfigTest method testSerdeWithDefaults.

@Test
public void testSerdeWithDefaults() throws Exception {
    String jsonStr = "{\"type\": \"kafka\"}";
    KafkaIndexTaskTuningConfig config = (KafkaIndexTaskTuningConfig) mapper.readValue(mapper.writeValueAsString(mapper.readValue(jsonStr, TuningConfig.class)), TuningConfig.class);
    Assert.assertNotNull(config.getBasePersistDirectory());
    Assert.assertEquals(new OnheapIncrementalIndex.Spec(), config.getAppendableIndexSpec());
    Assert.assertEquals(1000000, config.getMaxRowsInMemory());
    Assert.assertEquals(5_000_000, config.getMaxRowsPerSegment().intValue());
    Assert.assertNull(config.getMaxTotalRows());
    Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod());
    Assert.assertEquals(0, config.getMaxPendingPersists());
    Assert.assertEquals(new IndexSpec(), config.getIndexSpec());
    Assert.assertEquals(new IndexSpec(), config.getIndexSpecForIntermediatePersists());
    Assert.assertEquals(false, config.isReportParseExceptions());
    Assert.assertEquals(0, config.getHandoffConditionTimeout());
}
Also used : TuningConfig(org.apache.druid.segment.indexing.TuningConfig) TestModifiedKafkaIndexTaskTuningConfig(org.apache.druid.indexing.kafka.test.TestModifiedKafkaIndexTaskTuningConfig) KafkaSupervisorTuningConfig(org.apache.druid.indexing.kafka.supervisor.KafkaSupervisorTuningConfig) IndexSpec(org.apache.druid.segment.IndexSpec) TestModifiedKafkaIndexTaskTuningConfig(org.apache.druid.indexing.kafka.test.TestModifiedKafkaIndexTaskTuningConfig) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) Period(org.joda.time.Period) Test(org.junit.Test)

Example 77 with IndexSpec

use of org.apache.druid.segment.IndexSpec in project druid by druid-io.

the class KafkaIndexTaskTuningConfigTest method testSerdeWithNonDefaults.

@Test
public void testSerdeWithNonDefaults() throws Exception {
    String jsonStr = "{\n" + "  \"type\": \"kafka\",\n" + "  \"basePersistDirectory\": \"/tmp/xxx\",\n" + "  \"maxRowsInMemory\": 100,\n" + "  \"maxRowsPerSegment\": 100,\n" + "  \"maxTotalRows\": 1000,\n" + "  \"intermediatePersistPeriod\": \"PT1H\",\n" + "  \"maxPendingPersists\": 100,\n" + "  \"reportParseExceptions\": true,\n" + "  \"handoffConditionTimeout\": 100,\n" + "  \"indexSpec\": { \"metricCompression\" : \"NONE\" },\n" + "  \"indexSpecForIntermediatePersists\": { \"dimensionCompression\" : \"uncompressed\" },\n" + "  \"appendableIndexSpec\": { \"type\" : \"onheap\" }\n" + "}";
    KafkaIndexTaskTuningConfig config = (KafkaIndexTaskTuningConfig) mapper.readValue(mapper.writeValueAsString(mapper.readValue(jsonStr, TuningConfig.class)), TuningConfig.class);
    Assert.assertEquals(new File("/tmp/xxx"), config.getBasePersistDirectory());
    Assert.assertEquals(new OnheapIncrementalIndex.Spec(), config.getAppendableIndexSpec());
    Assert.assertEquals(100, config.getMaxRowsInMemory());
    Assert.assertEquals(100, config.getMaxRowsPerSegment().intValue());
    Assert.assertNotEquals(null, config.getMaxTotalRows());
    Assert.assertEquals(1000, config.getMaxTotalRows().longValue());
    Assert.assertEquals(new Period("PT1H"), config.getIntermediatePersistPeriod());
    Assert.assertEquals(100, config.getMaxPendingPersists());
    Assert.assertEquals(true, config.isReportParseExceptions());
    Assert.assertEquals(100, config.getHandoffConditionTimeout());
    Assert.assertEquals(new IndexSpec(null, null, CompressionStrategy.NONE, null), config.getIndexSpec());
    Assert.assertEquals(new IndexSpec(null, CompressionStrategy.UNCOMPRESSED, null, null), config.getIndexSpecForIntermediatePersists());
}
Also used : TuningConfig(org.apache.druid.segment.indexing.TuningConfig) TestModifiedKafkaIndexTaskTuningConfig(org.apache.druid.indexing.kafka.test.TestModifiedKafkaIndexTaskTuningConfig) KafkaSupervisorTuningConfig(org.apache.druid.indexing.kafka.supervisor.KafkaSupervisorTuningConfig) IndexSpec(org.apache.druid.segment.IndexSpec) TestModifiedKafkaIndexTaskTuningConfig(org.apache.druid.indexing.kafka.test.TestModifiedKafkaIndexTaskTuningConfig) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) Period(org.joda.time.Period) File(java.io.File) Test(org.junit.Test)

Example 78 with IndexSpec

use of org.apache.druid.segment.IndexSpec in project druid by druid-io.

the class HadoopTuningConfigTest method testSerde.

@Test
public void testSerde() throws Exception {
    HadoopTuningConfig expected = new HadoopTuningConfig("/tmp/workingpath", "version", null, null, null, null, null, 100, null, true, true, true, true, null, true, true, null, null, true, true, null, null, null, null, null);
    HadoopTuningConfig actual = jsonReadWriteRead(JSON_MAPPER.writeValueAsString(expected), HadoopTuningConfig.class);
    Assert.assertEquals("/tmp/workingpath", actual.getWorkingPath());
    Assert.assertEquals("version", actual.getVersion());
    Assert.assertEquals(new OnheapIncrementalIndex.Spec(), actual.getAppendableIndexSpec());
    Assert.assertNotNull(actual.getPartitionsSpec());
    Assert.assertEquals(ImmutableMap.<Long, List<HadoopyShardSpec>>of(), actual.getShardSpecs());
    Assert.assertEquals(new IndexSpec(), actual.getIndexSpec());
    Assert.assertEquals(new IndexSpec(), actual.getIndexSpecForIntermediatePersists());
    Assert.assertEquals(100, actual.getMaxRowsInMemory());
    Assert.assertEquals(true, actual.isLeaveIntermediate());
    Assert.assertEquals(true, actual.isCleanupOnFailure());
    Assert.assertEquals(true, actual.isOverwriteFiles());
    Assert.assertEquals(true, actual.isIgnoreInvalidRows());
    Assert.assertEquals(ImmutableMap.<String, String>of(), actual.getJobProperties());
    Assert.assertEquals(true, actual.isCombineText());
    Assert.assertEquals(true, actual.getUseCombiner());
    Assert.assertEquals(0, actual.getNumBackgroundPersistThreads());
    Assert.assertEquals(true, actual.isForceExtendableShardSpecs());
    Assert.assertEquals(true, actual.isUseExplicitVersion());
    Assert.assertEquals(0, actual.getAwaitSegmentAvailabilityTimeoutMillis());
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) Test(org.junit.Test)

Example 79 with IndexSpec

use of org.apache.druid.segment.IndexSpec in project druid by druid-io.

the class BaseFilterTest method makeConstructors.

public static Collection<Object[]> makeConstructors() {
    final List<Object[]> constructors = new ArrayList<>();
    final Map<String, BitmapSerdeFactory> bitmapSerdeFactories = ImmutableMap.of("concise", new ConciseBitmapSerdeFactory(), "roaring", new RoaringBitmapSerdeFactory(true));
    final Map<String, SegmentWriteOutMediumFactory> segmentWriteOutMediumFactories = ImmutableMap.of("tmpFile segment write-out medium", TmpFileSegmentWriteOutMediumFactory.instance(), "off-heap memory segment write-out medium", OffHeapMemorySegmentWriteOutMediumFactory.instance());
    final Map<String, Function<IndexBuilder, Pair<StorageAdapter, Closeable>>> finishers = ImmutableMap.<String, Function<IndexBuilder, Pair<StorageAdapter, Closeable>>>builder().put("incremental", input -> {
        final IncrementalIndex index = input.buildIncrementalIndex();
        return Pair.of(new IncrementalIndexStorageAdapter(index), index);
    }).put("mmapped", input -> {
        final QueryableIndex index = input.buildMMappedIndex();
        return Pair.of(new QueryableIndexStorageAdapter(index), index);
    }).put("mmappedMerged", input -> {
        final QueryableIndex index = input.buildMMappedMergedIndex();
        return Pair.of(new QueryableIndexStorageAdapter(index), index);
    }).put("rowBasedWithoutTypeSignature", input -> Pair.of(input.buildRowBasedSegmentWithoutTypeSignature().asStorageAdapter(), () -> {
    })).put("rowBasedWithTypeSignature", input -> Pair.of(input.buildRowBasedSegmentWithTypeSignature().asStorageAdapter(), () -> {
    })).build();
    for (Map.Entry<String, BitmapSerdeFactory> bitmapSerdeFactoryEntry : bitmapSerdeFactories.entrySet()) {
        for (Map.Entry<String, SegmentWriteOutMediumFactory> segmentWriteOutMediumFactoryEntry : segmentWriteOutMediumFactories.entrySet()) {
            for (Map.Entry<String, Function<IndexBuilder, Pair<StorageAdapter, Closeable>>> finisherEntry : finishers.entrySet()) {
                for (boolean cnf : ImmutableList.of(false, true)) {
                    for (boolean optimize : ImmutableList.of(false, true)) {
                        final String testName = StringUtils.format("bitmaps[%s], indexMerger[%s], finisher[%s], cnf[%s], optimize[%s]", bitmapSerdeFactoryEntry.getKey(), segmentWriteOutMediumFactoryEntry.getKey(), finisherEntry.getKey(), cnf, optimize);
                        final IndexBuilder indexBuilder = IndexBuilder.create().schema(DEFAULT_INDEX_SCHEMA).indexSpec(new IndexSpec(bitmapSerdeFactoryEntry.getValue(), null, null, null)).segmentWriteOutMediumFactory(segmentWriteOutMediumFactoryEntry.getValue());
                        constructors.add(new Object[] { testName, indexBuilder, finisherEntry.getValue(), cnf, optimize });
                    }
                }
            }
        }
    }
    return constructors;
}
Also used : Arrays(java.util.Arrays) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) RowAdapters(org.apache.druid.segment.RowAdapters) IndexSpec(org.apache.druid.segment.IndexSpec) ExprType(org.apache.druid.math.expr.ExprType) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) IndexedInts(org.apache.druid.segment.data.IndexedInts) StorageAdapter(org.apache.druid.segment.StorageAdapter) TmpFileSegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory) ByteBuffer(java.nio.ByteBuffer) Pair(org.apache.druid.java.util.common.Pair) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) ColumnSelectorFactory(org.apache.druid.segment.ColumnSelectorFactory) IncrementalIndexStorageAdapter(org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter) ExpressionType(org.apache.druid.math.expr.ExpressionType) Expr(org.apache.druid.math.expr.Expr) Map(java.util.Map) ConciseBitmapSerdeFactory(org.apache.druid.segment.data.ConciseBitmapSerdeFactory) OffHeapMemorySegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory) Parameterized(org.junit.runners.Parameterized) DateTimes(org.apache.druid.java.util.common.DateTimes) Sequence(org.apache.druid.java.util.common.guava.Sequence) RowBasedColumnSelectorFactory(org.apache.druid.segment.RowBasedColumnSelectorFactory) RoaringBitmapSerdeFactory(org.apache.druid.segment.data.RoaringBitmapSerdeFactory) Function(com.google.common.base.Function) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) QueryableIndex(org.apache.druid.segment.QueryableIndex) StringUtils(org.apache.druid.java.util.common.StringUtils) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) IndexBuilder(org.apache.druid.segment.IndexBuilder) VectorObjectSelector(org.apache.druid.segment.vector.VectorObjectSelector) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) TestExprMacroTable(org.apache.druid.query.expression.TestExprMacroTable) VectorValueSelector(org.apache.druid.segment.vector.VectorValueSelector) VectorColumnSelectorFactory(org.apache.druid.segment.vector.VectorColumnSelectorFactory) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) InputRow(org.apache.druid.data.input.InputRow) List(java.util.List) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) DimFilter(org.apache.druid.query.filter.DimFilter) BitmapIndexSelector(org.apache.druid.query.filter.BitmapIndexSelector) Iterables(com.google.common.collect.Iterables) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) Intervals(org.apache.druid.java.util.common.Intervals) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) Parser(org.apache.druid.math.expr.Parser) SegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory) HashMap(java.util.HashMap) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) ArrayList(java.util.ArrayList) VectorCursor(org.apache.druid.segment.vector.VectorCursor) ImmutableList(com.google.common.collect.ImmutableList) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) SettableSupplier(org.apache.druid.common.guava.SettableSupplier) VectorValueMatcher(org.apache.druid.query.filter.vector.VectorValueMatcher) RowBasedStorageAdapter(org.apache.druid.segment.RowBasedStorageAdapter) DimensionSelector(org.apache.druid.segment.DimensionSelector) VectorAggregator(org.apache.druid.query.aggregation.VectorAggregator) ExpressionVirtualColumn(org.apache.druid.segment.virtual.ExpressionVirtualColumn) Sequences(org.apache.druid.java.util.common.guava.Sequences) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SingleValueDimensionVectorSelector(org.apache.druid.segment.vector.SingleValueDimensionVectorSelector) Nullable(javax.annotation.Nullable) Before(org.junit.Before) ValueMatcher(org.apache.druid.query.filter.ValueMatcher) BitmapResultFactory(org.apache.druid.query.BitmapResultFactory) ColumnInspector(org.apache.druid.segment.ColumnInspector) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) VirtualColumns(org.apache.druid.segment.VirtualColumns) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Aggregator(org.apache.druid.query.aggregation.Aggregator) Maps(com.google.common.collect.Maps) ColumnSelector(org.apache.druid.segment.ColumnSelector) Granularities(org.apache.druid.java.util.common.granularity.Granularities) BitmapSerdeFactory(org.apache.druid.segment.data.BitmapSerdeFactory) Rule(org.junit.Rule) Cursor(org.apache.druid.segment.Cursor) NullHandling(org.apache.druid.common.config.NullHandling) RowSignature(org.apache.druid.segment.column.RowSignature) Closeable(java.io.Closeable) ColumnType(org.apache.druid.segment.column.ColumnType) Preconditions(com.google.common.base.Preconditions) Assert(org.junit.Assert) Collections(java.util.Collections) TemporaryFolder(org.junit.rules.TemporaryFolder) Filter(org.apache.druid.query.filter.Filter) IndexSpec(org.apache.druid.segment.IndexSpec) Closeable(java.io.Closeable) ArrayList(java.util.ArrayList) StorageAdapter(org.apache.druid.segment.StorageAdapter) IncrementalIndexStorageAdapter(org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter) RowBasedStorageAdapter(org.apache.druid.segment.RowBasedStorageAdapter) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) TmpFileSegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory) OffHeapMemorySegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory) SegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory) Function(com.google.common.base.Function) RoaringBitmapSerdeFactory(org.apache.druid.segment.data.RoaringBitmapSerdeFactory) ConciseBitmapSerdeFactory(org.apache.druid.segment.data.ConciseBitmapSerdeFactory) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) IndexBuilder(org.apache.druid.segment.IndexBuilder) QueryableIndex(org.apache.druid.segment.QueryableIndex) IncrementalIndexStorageAdapter(org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) ConciseBitmapSerdeFactory(org.apache.druid.segment.data.ConciseBitmapSerdeFactory) RoaringBitmapSerdeFactory(org.apache.druid.segment.data.RoaringBitmapSerdeFactory) BitmapSerdeFactory(org.apache.druid.segment.data.BitmapSerdeFactory)

Example 80 with IndexSpec

use of org.apache.druid.segment.IndexSpec in project druid by druid-io.

the class NewestSegmentFirstIterator method needsCompaction.

private boolean needsCompaction(DataSourceCompactionConfig config, SegmentsToCompact candidates) {
    Preconditions.checkState(!candidates.isEmpty(), "Empty candidates");
    final ClientCompactionTaskQueryTuningConfig tuningConfig = ClientCompactionTaskQueryTuningConfig.from(config.getTuningConfig(), config.getMaxRowsPerSegment());
    final PartitionsSpec partitionsSpecFromConfig = findPartitionsSpecFromConfig(tuningConfig);
    final CompactionState lastCompactionState = candidates.segments.get(0).getLastCompactionState();
    if (lastCompactionState == null) {
        log.info("Candidate segment[%s] is not compacted yet. Needs compaction.", candidates.segments.get(0).getId());
        return true;
    }
    final boolean allCandidatesHaveSameLastCompactionState = candidates.segments.stream().allMatch(segment -> lastCompactionState.equals(segment.getLastCompactionState()));
    if (!allCandidatesHaveSameLastCompactionState) {
        log.info("[%s] Candidate segments were compacted with different partitions spec. Needs compaction.", candidates.segments.size());
        log.debugSegments(candidates.segments, "Candidate segments compacted with different partiton spec");
        return true;
    }
    final PartitionsSpec segmentPartitionsSpec = lastCompactionState.getPartitionsSpec();
    final IndexSpec segmentIndexSpec = objectMapper.convertValue(lastCompactionState.getIndexSpec(), IndexSpec.class);
    final IndexSpec configuredIndexSpec;
    if (tuningConfig.getIndexSpec() == null) {
        configuredIndexSpec = new IndexSpec();
    } else {
        configuredIndexSpec = tuningConfig.getIndexSpec();
    }
    if (!Objects.equals(partitionsSpecFromConfig, segmentPartitionsSpec)) {
        log.info("Configured partitionsSpec[%s] is differenet from " + "the partitionsSpec[%s] of segments. Needs compaction.", partitionsSpecFromConfig, segmentPartitionsSpec);
        return true;
    }
    // segmentIndexSpec cannot be null.
    if (!segmentIndexSpec.equals(configuredIndexSpec)) {
        log.info("Configured indexSpec[%s] is different from the one[%s] of segments. Needs compaction", configuredIndexSpec, segmentIndexSpec);
        return true;
    }
    if (config.getGranularitySpec() != null) {
        final ClientCompactionTaskGranularitySpec existingGranularitySpec = lastCompactionState.getGranularitySpec() != null ? objectMapper.convertValue(lastCompactionState.getGranularitySpec(), ClientCompactionTaskGranularitySpec.class) : null;
        // Checks for segmentGranularity
        if (config.getGranularitySpec().getSegmentGranularity() != null) {
            final Granularity existingSegmentGranularity = existingGranularitySpec != null ? existingGranularitySpec.getSegmentGranularity() : null;
            if (existingSegmentGranularity == null) {
                // Candidate segments were all compacted without segment granularity set.
                // We need to check if all segments have the same segment granularity as the configured segment granularity.
                boolean needsCompaction = candidates.segments.stream().anyMatch(segment -> !config.getGranularitySpec().getSegmentGranularity().isAligned(segment.getInterval()));
                if (needsCompaction) {
                    log.info("Segments were previously compacted but without segmentGranularity in auto compaction." + " Configured segmentGranularity[%s] is different from granularity implied by segment intervals. Needs compaction", config.getGranularitySpec().getSegmentGranularity());
                    return true;
                }
            } else if (!config.getGranularitySpec().getSegmentGranularity().equals(existingSegmentGranularity)) {
                log.info("Configured segmentGranularity[%s] is different from the segmentGranularity[%s] of segments. Needs compaction", config.getGranularitySpec().getSegmentGranularity(), existingSegmentGranularity);
                return true;
            }
        }
        // Checks for rollup
        if (config.getGranularitySpec().isRollup() != null) {
            final Boolean existingRollup = existingGranularitySpec != null ? existingGranularitySpec.isRollup() : null;
            if (existingRollup == null || !config.getGranularitySpec().isRollup().equals(existingRollup)) {
                log.info("Configured rollup[%s] is different from the rollup[%s] of segments. Needs compaction", config.getGranularitySpec().isRollup(), existingRollup);
                return true;
            }
        }
        // Checks for queryGranularity
        if (config.getGranularitySpec().getQueryGranularity() != null) {
            final Granularity existingQueryGranularity = existingGranularitySpec != null ? existingGranularitySpec.getQueryGranularity() : null;
            if (!config.getGranularitySpec().getQueryGranularity().equals(existingQueryGranularity)) {
                log.info("Configured queryGranularity[%s] is different from the queryGranularity[%s] of segments. Needs compaction", config.getGranularitySpec().getQueryGranularity(), existingQueryGranularity);
                return true;
            }
        }
    }
    if (config.getDimensionsSpec() != null) {
        final DimensionsSpec existingDimensionsSpec = lastCompactionState.getDimensionsSpec();
        // Checks for list of dimensions
        if (config.getDimensionsSpec().getDimensions() != null) {
            final List<DimensionSchema> existingDimensions = existingDimensionsSpec != null ? existingDimensionsSpec.getDimensions() : null;
            if (!config.getDimensionsSpec().getDimensions().equals(existingDimensions)) {
                log.info("Configured dimensionsSpec is different from the dimensionsSpec of segments. Needs compaction");
                return true;
            }
        }
    }
    if (config.getTransformSpec() != null) {
        final ClientCompactionTaskTransformSpec existingTransformSpec = lastCompactionState.getTransformSpec() != null ? objectMapper.convertValue(lastCompactionState.getTransformSpec(), ClientCompactionTaskTransformSpec.class) : null;
        // Checks for filters
        if (config.getTransformSpec().getFilter() != null) {
            final DimFilter existingFilters = existingTransformSpec != null ? existingTransformSpec.getFilter() : null;
            if (!config.getTransformSpec().getFilter().equals(existingFilters)) {
                log.info("Configured filter[%s] is different from the filter[%s] of segments. Needs compaction", config.getTransformSpec().getFilter(), existingFilters);
                return true;
            }
        }
    }
    if (ArrayUtils.isNotEmpty(config.getMetricsSpec())) {
        final AggregatorFactory[] existingMetricsSpec = lastCompactionState.getMetricsSpec() == null || lastCompactionState.getMetricsSpec().isEmpty() ? null : objectMapper.convertValue(lastCompactionState.getMetricsSpec(), AggregatorFactory[].class);
        if (existingMetricsSpec == null || !Arrays.deepEquals(config.getMetricsSpec(), existingMetricsSpec)) {
            log.info("Configured metricsSpec[%s] is different from the metricsSpec[%s] of segments. Needs compaction", Arrays.toString(config.getMetricsSpec()), Arrays.toString(existingMetricsSpec));
            return true;
        }
    }
    return false;
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) Granularity(org.apache.druid.java.util.common.granularity.Granularity) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) ClientCompactionTaskQueryTuningConfig(org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) CompactionState(org.apache.druid.timeline.CompactionState) DimFilter(org.apache.druid.query.filter.DimFilter)

Aggregations

IndexSpec (org.apache.druid.segment.IndexSpec)89 Test (org.junit.Test)59 File (java.io.File)29 Period (org.joda.time.Period)28 RoaringBitmapSerdeFactory (org.apache.druid.segment.data.RoaringBitmapSerdeFactory)25 OnheapIncrementalIndex (org.apache.druid.segment.incremental.OnheapIncrementalIndex)24 ArrayList (java.util.ArrayList)21 Map (java.util.Map)20 DataSegment (org.apache.druid.timeline.DataSegment)20 DynamicPartitionsSpec (org.apache.druid.indexer.partitions.DynamicPartitionsSpec)19 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)19 CompactionState (org.apache.druid.timeline.CompactionState)16 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)15 ImmutableMap (com.google.common.collect.ImmutableMap)14 PartitionsSpec (org.apache.druid.indexer.partitions.PartitionsSpec)14 QueryableIndex (org.apache.druid.segment.QueryableIndex)14 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)13 InputRow (org.apache.druid.data.input.InputRow)13 Duration (org.joda.time.Duration)13 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)10