Search in sources :

Example 16 with DynamicPartitionsSpec

use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.

the class AbstractLocalInputSourceParallelIndexTest method doIndexTest.

public void doIndexTest(InputFormatDetails inputFormatDetails, @Nonnull Map<String, Object> extraInputFormatMap, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) throws Exception {
    final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
    Map inputFormatMap = new ImmutableMap.Builder<String, Object>().putAll(extraInputFormatMap).put("type", inputFormatDetails.getInputFormatType()).build();
    try (final Closeable ignored1 = unloader(indexDatasource + config.getExtraDatasourceNameSuffix())) {
        final Function<String, String> sqlInputSourcePropsTransform = spec -> {
            try {
                spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
                spec = StringUtils.replace(spec, "%%INPUT_SOURCE_FILTER%%", "*" + inputFormatDetails.getFileExtension());
                spec = StringUtils.replace(spec, "%%INPUT_SOURCE_BASE_DIR%%", "/resources/data/batch_index" + inputFormatDetails.getFolderSuffix());
                spec = StringUtils.replace(spec, "%%INPUT_FORMAT%%", jsonMapper.writeValueAsString(inputFormatMap));
                spec = StringUtils.replace(spec, "%%APPEND_TO_EXISTING%%", jsonMapper.writeValueAsString(false));
                spec = StringUtils.replace(spec, "%%DROP_EXISTING%%", jsonMapper.writeValueAsString(false));
                spec = StringUtils.replace(spec, "%%FORCE_GUARANTEED_ROLLUP%%", jsonMapper.writeValueAsString(false));
                return spec;
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        };
        doIndexTest(indexDatasource, INDEX_TASK, sqlInputSourcePropsTransform, INDEX_QUERIES_RESOURCE, false, true, true, segmentAvailabilityConfirmationPair);
    }
}
Also used : ImmutableMap(com.google.common.collect.ImmutableMap) Closeable(java.io.Closeable) Map(java.util.Map) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) StringUtils(org.apache.druid.java.util.common.StringUtils) UUID(java.util.UUID) Nonnull(javax.annotation.Nonnull) Function(java.util.function.Function) Pair(org.apache.druid.java.util.common.Pair) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) Closeable(java.io.Closeable) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap)

Example 17 with DynamicPartitionsSpec

use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.

the class AbstractHdfsInputSourceParallelIndexTest method doTest.

void doTest(Pair<String, List> hdfsInputSource, InputFormatDetails inputFormatDetails, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) throws Exception {
    final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
    try (final Closeable ignored1 = unloader(indexDatasource + config.getExtraDatasourceNameSuffix())) {
        final Function<String, String> hdfsPropsTransform = spec -> {
            try {
                spec = StringUtils.replace(spec, "%%INPUT_SOURCE_TYPE%%", "hdfs");
                spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
                spec = StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_KEY%%", hdfsInputSource.lhs);
                spec = StringUtils.replace(spec, "%%INPUT_FORMAT_TYPE%%", inputFormatDetails.getInputFormatType());
                spec = StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_VALUE%%", jsonMapper.writeValueAsString(hdfsInputSource.rhs));
                spec = StringUtils.replace(spec, "%%FOLDER_SUFFIX%%", inputFormatDetails.getFolderSuffix());
                spec = StringUtils.replace(spec, "%%FILE_EXTENSION%%", inputFormatDetails.getFileExtension());
                return spec;
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        };
        doIndexTest(indexDatasource, INDEX_TASK, hdfsPropsTransform, INDEX_QUERIES_RESOURCE, false, true, true, segmentAvailabilityConfirmationPair);
    }
}
Also used : List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) DataProvider(org.testng.annotations.DataProvider) Closeable(java.io.Closeable) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) StringUtils(org.apache.druid.java.util.common.StringUtils) UUID(java.util.UUID) Function(java.util.function.Function) Pair(org.apache.druid.java.util.common.Pair) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) Closeable(java.io.Closeable)

Example 18 with DynamicPartitionsSpec

use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.

the class ITAutoCompactionLockContentionTest method verifyCompactedIntervals.

/**
 * Verifies that the given intervals have been compacted.
 */
private void verifyCompactedIntervals(Interval... compactedIntervals) {
    List<DataSegment> segments = coordinator.getFullSegmentsMetadata(fullDatasourceName);
    List<DataSegment> observedCompactedSegments = new ArrayList<>();
    Set<Interval> observedCompactedIntervals = new HashSet<>();
    for (DataSegment segment : segments) {
        if (segment.getLastCompactionState() != null) {
            observedCompactedSegments.add(segment);
            observedCompactedIntervals.add(segment.getInterval());
        }
    }
    Set<Interval> expectedCompactedIntervals = new HashSet<>(Arrays.asList(compactedIntervals));
    Assert.assertEquals(observedCompactedIntervals, expectedCompactedIntervals);
    DynamicPartitionsSpec expectedPartitionSpec = new DynamicPartitionsSpec(Specs.MAX_ROWS_PER_SEGMENT, Long.MAX_VALUE);
    for (DataSegment compactedSegment : observedCompactedSegments) {
        Assert.assertNotNull(compactedSegment.getLastCompactionState());
        Assert.assertEquals(compactedSegment.getLastCompactionState().getPartitionsSpec(), expectedPartitionSpec);
    }
}
Also used : DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) ArrayList(java.util.ArrayList) DataSegment(org.apache.druid.timeline.DataSegment) Interval(org.joda.time.Interval) HashSet(java.util.HashSet)

Example 19 with DynamicPartitionsSpec

use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.

the class ITAutoCompactionUpgradeTest method testUpgradeAutoCompactionConfigurationWhenConfigurationFromOlderVersionAlreadyExist.

@Test
public void testUpgradeAutoCompactionConfigurationWhenConfigurationFromOlderVersionAlreadyExist() throws Exception {
    // Verify that compaction config already exist. This config was inserted manually into the database using SQL script.
    // This auto compaction configuration payload is from Druid 0.21.0
    CoordinatorCompactionConfig coordinatorCompactionConfig = compactionResource.getCoordinatorCompactionConfigs();
    DataSourceCompactionConfig foundDataSourceCompactionConfig = null;
    for (DataSourceCompactionConfig dataSourceCompactionConfig : coordinatorCompactionConfig.getCompactionConfigs()) {
        if (dataSourceCompactionConfig.getDataSource().equals(UPGRADE_DATASOURCE_NAME)) {
            foundDataSourceCompactionConfig = dataSourceCompactionConfig;
        }
    }
    Assert.assertNotNull(foundDataSourceCompactionConfig);
    // Now submit a new auto compaction configuration
    PartitionsSpec newPartitionsSpec = new DynamicPartitionsSpec(4000, null);
    Period newSkipOffset = Period.seconds(0);
    DataSourceCompactionConfig compactionConfig = new DataSourceCompactionConfig(UPGRADE_DATASOURCE_NAME, null, null, null, newSkipOffset, new UserCompactionTaskQueryTuningConfig(null, null, null, new MaxSizeSplitHintSpec(null, 1), newPartitionsSpec, null, null, null, null, null, 1, null, null, null, null, null, 1), new UserCompactionTaskGranularityConfig(Granularities.YEAR, null, null), null, null, null, new UserCompactionTaskIOConfig(true), null);
    compactionResource.submitCompactionConfig(compactionConfig);
    // Wait for compaction config to persist
    Thread.sleep(2000);
    // Verify that compaction was successfully updated
    coordinatorCompactionConfig = compactionResource.getCoordinatorCompactionConfigs();
    foundDataSourceCompactionConfig = null;
    for (DataSourceCompactionConfig dataSourceCompactionConfig : coordinatorCompactionConfig.getCompactionConfigs()) {
        if (dataSourceCompactionConfig.getDataSource().equals(UPGRADE_DATASOURCE_NAME)) {
            foundDataSourceCompactionConfig = dataSourceCompactionConfig;
        }
    }
    Assert.assertNotNull(foundDataSourceCompactionConfig);
    Assert.assertNotNull(foundDataSourceCompactionConfig.getTuningConfig());
    Assert.assertEquals(foundDataSourceCompactionConfig.getTuningConfig().getPartitionsSpec(), newPartitionsSpec);
    Assert.assertEquals(foundDataSourceCompactionConfig.getSkipOffsetFromLatest(), newSkipOffset);
}
Also used : CoordinatorCompactionConfig(org.apache.druid.server.coordinator.CoordinatorCompactionConfig) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) DataSourceCompactionConfig(org.apache.druid.server.coordinator.DataSourceCompactionConfig) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) UserCompactionTaskIOConfig(org.apache.druid.server.coordinator.UserCompactionTaskIOConfig) Period(org.joda.time.Period) UserCompactionTaskQueryTuningConfig(org.apache.druid.server.coordinator.UserCompactionTaskQueryTuningConfig) UserCompactionTaskGranularityConfig(org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig) MaxSizeSplitHintSpec(org.apache.druid.data.input.MaxSizeSplitHintSpec) Test(org.testng.annotations.Test) AbstractIndexerTest(org.apache.druid.tests.indexer.AbstractIndexerTest)

Example 20 with DynamicPartitionsSpec

use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.

the class ITAppendBatchIndexTest method submitIngestionTaskAndVerify.

private void submitIngestionTaskAndVerify(String indexDatasource, PartitionsSpec partitionsSpec, boolean appendToExisting, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) throws Exception {
    InputFormatDetails inputFormatDetails = InputFormatDetails.JSON;
    Map inputFormatMap = new ImmutableMap.Builder<String, Object>().put("type", inputFormatDetails.getInputFormatType()).build();
    final Function<String, String> sqlInputSourcePropsTransform = spec -> {
        try {
            spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(partitionsSpec));
            spec = StringUtils.replace(spec, "%%INPUT_SOURCE_FILTER%%", "*" + inputFormatDetails.getFileExtension());
            spec = StringUtils.replace(spec, "%%INPUT_SOURCE_BASE_DIR%%", "/resources/data/batch_index" + inputFormatDetails.getFolderSuffix());
            spec = StringUtils.replace(spec, "%%INPUT_FORMAT%%", jsonMapper.writeValueAsString(inputFormatMap));
            spec = StringUtils.replace(spec, "%%APPEND_TO_EXISTING%%", jsonMapper.writeValueAsString(appendToExisting));
            spec = StringUtils.replace(spec, "%%DROP_EXISTING%%", jsonMapper.writeValueAsString(false));
            if (partitionsSpec instanceof DynamicPartitionsSpec) {
                spec = StringUtils.replace(spec, "%%FORCE_GUARANTEED_ROLLUP%%", jsonMapper.writeValueAsString(false));
            } else if (partitionsSpec instanceof HashedPartitionsSpec || partitionsSpec instanceof SingleDimensionPartitionsSpec) {
                spec = StringUtils.replace(spec, "%%FORCE_GUARANTEED_ROLLUP%%", jsonMapper.writeValueAsString(true));
            }
            return spec;
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    };
    doIndexTest(indexDatasource, INDEX_TASK, sqlInputSourcePropsTransform, null, false, false, true, segmentAvailabilityConfirmationPair);
}
Also used : Logger(org.apache.druid.java.util.common.logger.Logger) DataProvider(org.testng.annotations.DataProvider) ImmutableMap(com.google.common.collect.ImmutableMap) StringUtils(org.apache.druid.java.util.common.StringUtils) DruidTestModuleFactory(org.apache.druid.testing.guice.DruidTestModuleFactory) HashedPartitionsSpec(org.apache.druid.indexer.partitions.HashedPartitionsSpec) Test(org.testng.annotations.Test) UUID(java.util.UUID) Function(java.util.function.Function) Guice(org.testng.annotations.Guice) Pair(org.apache.druid.java.util.common.Pair) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) SingleDimensionPartitionsSpec(org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec) TestNGGroup(org.apache.druid.tests.TestNGGroup) Closeable(java.io.Closeable) Map(java.util.Map) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) HashedPartitionsSpec(org.apache.druid.indexer.partitions.HashedPartitionsSpec) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) SingleDimensionPartitionsSpec(org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map)

Aggregations

DynamicPartitionsSpec (org.apache.druid.indexer.partitions.DynamicPartitionsSpec)52 Test (org.junit.Test)34 IndexSpec (org.apache.druid.segment.IndexSpec)19 List (java.util.List)15 Map (java.util.Map)15 ImmutableList (com.google.common.collect.ImmutableList)13 StringUtils (org.apache.druid.java.util.common.StringUtils)13 DataSegment (org.apache.druid.timeline.DataSegment)13 ImmutableMap (com.google.common.collect.ImmutableMap)12 HashMap (java.util.HashMap)11 Function (java.util.function.Function)11 Pair (org.apache.druid.java.util.common.Pair)11 Closeable (java.io.Closeable)10 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)10 RoaringBitmapSerdeFactory (org.apache.druid.segment.data.RoaringBitmapSerdeFactory)10 Duration (org.joda.time.Duration)10 Interval (org.joda.time.Interval)10 ArrayList (java.util.ArrayList)9 UUID (java.util.UUID)9 UniformGranularitySpec (org.apache.druid.segment.indexing.granularity.UniformGranularitySpec)9