use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class IndexTaskSerdeTest method testSerdeTuningConfigWithDynamicPartitionsSpec.
@Test
public void testSerdeTuningConfigWithDynamicPartitionsSpec() throws IOException {
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, null, null, new DynamicPartitionsSpec(1000, 2000L), new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, 1234, 0L);
assertSerdeTuningConfig(tuningConfig);
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class IndexTaskSerdeTest method testSerdeTuningConfigWithDeprecatedHashedPartitionsSpec.
@Test
public void testSerdeTuningConfigWithDeprecatedHashedPartitionsSpec() throws IOException {
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, 10, ImmutableList.of("dim1", "dim2"), null, new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, 1234, null);
assertSerdeTuningConfig(tuningConfig);
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class IndexTaskSerdeTest method testForceGuaranteedRollupWithDynamicPartitionsSpec.
@Test
public void testForceGuaranteedRollupWithDynamicPartitionsSpec() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("DynamicPartitionsSpec cannot be used for perfect rollup");
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, null, null, new DynamicPartitionsSpec(1000, 2000L), new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, true, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, null, null);
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class IndexTaskSerdeTest method testBestEffortRollupWithHashedPartitionsSpec.
@Test
public void testBestEffortRollupWithHashedPartitionsSpec() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("DynamicPartitionsSpec must be used for best-effort rollup");
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, null, null, new HashedPartitionsSpec(null, 10, ImmutableList.of("dim1", "dim2")), new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, null, null);
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class ClientCompactionTaskQuerySerdeTest method testCompactionTaskToClientCompactionTaskQuery.
@Test
public void testCompactionTaskToClientCompactionTaskQuery() throws IOException {
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
final CompactionTask.Builder builder = new CompactionTask.Builder("datasource", new SegmentCacheManagerFactory(mapper), new RetryPolicyFactory(new RetryPolicyConfig()));
final CompactionTask task = builder.inputSpec(new CompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true).tuningConfig(new ParallelIndexTuningConfig(null, null, null, 40000, 2000L, null, null, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, null, null, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), null, 100, 5, 1000L, new Duration(3000L), 7, 1000, 100, null, null, null, null, null, null)).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true)).dimensionsSpec(DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))).setDimensionExclusions(ImmutableList.of("__time", "val")).build()).metricsSpec(new AggregatorFactory[] { new CountAggregatorFactory("cnt") }).transformSpec(new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null))).build();
final ClientCompactionTaskQuery expected = new ClientCompactionTaskQuery(task.getId(), "datasource", new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true), new ClientCompactionTaskQueryTuningConfig(100, 40000, 2000L, 30000L, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100), new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true), new ClientCompactionTaskDimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null)), new HashMap<>());
final byte[] json = mapper.writeValueAsBytes(task);
final ClientCompactionTaskQuery actual = (ClientCompactionTaskQuery) mapper.readValue(json, ClientTaskQuery.class);
Assert.assertEquals(expected, actual);
}
Aggregations