use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTuningConfigTest method testSerdeWithNullAwaitSegmentAvailabilityTimeoutMillis.
@Test
public void testSerdeWithNullAwaitSegmentAvailabilityTimeoutMillis() {
final CompactionTask.CompactionTuningConfig tuningConfig = new CompactionTask.CompactionTuningConfig(null, null, null, 10, 1000L, null, null, null, null, new DynamicPartitionsSpec(100, 100L), new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.UNCOMPRESSED, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(), 1, false, true, 10000L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), null, 250, 100, 20L, new Duration(3600), 128, null, null, false, null, null, null, null);
Assert.assertEquals(0L, tuningConfig.getAwaitSegmentAvailabilityTimeoutMillis());
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTaskTest method testSerdeWithOldTuningConfigSuccessfullyDeserializeToNewOne.
@Test
public void testSerdeWithOldTuningConfigSuccessfullyDeserializeToNewOne() throws IOException {
final OldCompactionTaskWithAnyTuningConfigType oldTask = new OldCompactionTaskWithAnyTuningConfigType(null, null, DATA_SOURCE, null, SEGMENTS, null, null, null, null, null, new IndexTuningConfig(null, // null to compute maxRowsPerSegment automatically
null, null, 500000, 1000000L, null, null, null, null, null, null, new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, true, false, 5000L, null, null, null, null, null, null, null), null, toolbox.getJsonMapper(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, toolbox.getChatHandlerProvider(), toolbox.getRowIngestionMetersFactory(), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, toolbox.getAppenderatorsManager());
final Builder builder = new Builder(DATA_SOURCE, segmentCacheManagerFactory, RETRY_POLICY_FACTORY);
final CompactionTask expectedFromJson = builder.segments(SEGMENTS).tuningConfig(CompactionTask.getTuningConfig(oldTask.getTuningConfig())).build();
final ObjectMapper mapper = new DefaultObjectMapper((DefaultObjectMapper) OBJECT_MAPPER);
mapper.registerSubtypes(new NamedType(OldCompactionTaskWithAnyTuningConfigType.class, "compact"));
final byte[] bytes = mapper.writeValueAsBytes(oldTask);
final CompactionTask fromJson = mapper.readValue(bytes, CompactionTask.class);
assertEquals(expectedFromJson, fromJson);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTaskTest method testCreateIngestionSchemaWithMaxTotalRows.
@Test
public void testCreateIngestionSchemaWithMaxTotalRows() throws IOException, SegmentLoadingException {
final CompactionTask.CompactionTuningConfig tuningConfig = new CompactionTask.CompactionTuningConfig(null, null, null, 500000, 1000000L, null, 1000000L, null, null, null, new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, false, 5000L, null, null, null, null, null, null, null, null, null, null, null, null, null, null);
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(tuningConfig), null, null, null, null, COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration();
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), SEGMENT_INTERVALS, tuningConfig, Granularities.MONTH, Granularities.NONE, IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTuningConfigTest method testSerdeWithNonZeroAwaitSegmentAvailabilityTimeoutMillis.
@Test
public void testSerdeWithNonZeroAwaitSegmentAvailabilityTimeoutMillis() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("awaitSegmentAvailabilityTimeoutMillis is not supported for Compcation Task");
final CompactionTask.CompactionTuningConfig tuningConfig = new CompactionTask.CompactionTuningConfig(null, null, null, 10, 1000L, null, null, null, null, new DynamicPartitionsSpec(100, 100L), new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.UNCOMPRESSED, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(), 1, false, true, 10000L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), null, 250, 100, 20L, new Duration(3600), 128, null, null, false, null, null, null, 5L);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTuningConfigTest method testSerdeWithZeroAwaitSegmentAvailabilityTimeoutMillis.
@Test
public void testSerdeWithZeroAwaitSegmentAvailabilityTimeoutMillis() {
final CompactionTask.CompactionTuningConfig tuningConfig = new CompactionTask.CompactionTuningConfig(null, null, null, 10, 1000L, null, null, null, null, new DynamicPartitionsSpec(100, 100L), new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.UNCOMPRESSED, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(), 1, false, true, 10000L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), null, 250, 100, 20L, new Duration(3600), 128, null, null, false, null, null, null, 0L);
Assert.assertEquals(0L, tuningConfig.getAwaitSegmentAvailabilityTimeoutMillis());
}
Aggregations