use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class IndexTaskSerdeTest method testBestEffortRollupWithHashedPartitionsSpec.
@Test
public void testBestEffortRollupWithHashedPartitionsSpec() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("DynamicPartitionsSpec must be used for best-effort rollup");
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, null, null, new HashedPartitionsSpec(null, 10, ImmutableList.of("dim1", "dim2")), new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, null, null);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTaskTest method testCreateIngestionSchemaWithTargetPartitionSize.
@Test
public void testCreateIngestionSchemaWithTargetPartitionSize() throws IOException, SegmentLoadingException {
final CompactionTask.CompactionTuningConfig tuningConfig = new CompactionTask.CompactionTuningConfig(100000, null, null, 500000, 1000000L, null, null, null, null, null, new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, true, false, null, null, null, 10, null, null, null, null, null, null, null, null, null, null, null);
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(tuningConfig), null, null, null, null, COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration();
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), SEGMENT_INTERVALS, tuningConfig, Granularities.MONTH, Granularities.NONE, IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class CompactionTaskTest method testGetTuningConfigWithIndexTuningConfig.
@Test
public void testGetTuningConfigWithIndexTuningConfig() {
IndexTuningConfig indexTuningConfig = new IndexTuningConfig(null, // null to compute maxRowsPerSegment automatically
null, null, 500000, 1000000L, null, null, null, null, null, null, new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, true, false, null, null, null, null, null, null, null, null);
CompactionTask.CompactionTuningConfig compactionTuningConfig = new CompactionTask.CompactionTuningConfig(null, null, null, 500000, 1000000L, null, null, null, null, null, new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, true, false, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);
Assert.assertEquals(compactionTuningConfig, CompactionTask.getTuningConfig(indexTuningConfig));
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class IndexTaskSerdeTest method testSerdeTuningConfigWithDeprecatedDynamicPartitionsSpec.
@Test
public void testSerdeTuningConfigWithDeprecatedDynamicPartitionsSpec() throws IOException {
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, 1000, null, 100, 2000L, null, 3000L, null, null, null, null, new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, null, 1L);
assertSerdeTuningConfig(tuningConfig);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class ParallelIndexTuningConfigTest method testSerdeWithMaxNumSubTasks.
@Test
public void testSerdeWithMaxNumSubTasks() throws IOException {
final int maxNumSubTasks = 250;
final ParallelIndexTuningConfig tuningConfig = new ParallelIndexTuningConfig(null, null, null, 10, 1000L, null, null, null, null, new DynamicPartitionsSpec(100, 100L), new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.UNCOMPRESSED, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(), 1, false, true, 10000L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), maxNumSubTasks, null, 100, 20L, new Duration(3600), 128, null, null, false, null, null, null, null, null);
final byte[] json = mapper.writeValueAsBytes(tuningConfig);
final ParallelIndexTuningConfig fromJson = (ParallelIndexTuningConfig) mapper.readValue(json, TuningConfig.class);
Assert.assertEquals(fromJson, tuningConfig);
}
Aggregations