use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class CompactionTaskRunTest method testCompactionWithFilterInTransformSpec.
@Test
public void testCompactionWithFilterInTransformSpec() throws Exception {
runIndexTask();
final Builder builder = new Builder(DATA_SOURCE, segmentCacheManagerFactory, RETRY_POLICY_FACTORY);
// day segmentGranularity
final CompactionTask compactionTask = builder.interval(Intervals.of("2014-01-01/2014-01-02")).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null)).transformSpec(new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim", "a", null))).build();
Pair<TaskStatus, List<DataSegment>> resultPair = runTask(compactionTask);
Assert.assertTrue(resultPair.lhs.isSuccess());
List<DataSegment> segments = resultPair.rhs;
Assert.assertEquals(1, segments.size());
Assert.assertEquals(Intervals.of("2014-01-01/2014-01-02"), segments.get(0).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(0).getShardSpec());
ObjectMapper mapper = new DefaultObjectMapper();
Map<String, String> expectedLongSumMetric = new HashMap<>();
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
expectedLongSumMetric.put("expression", null);
CompactionState expectedCompactionState = new CompactionState(new DynamicPartitionsSpec(5000000, Long.MAX_VALUE), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), ImmutableList.of(expectedLongSumMetric), getObjectMapper().readValue(getObjectMapper().writeValueAsString(compactionTask.getTransformSpec()), Map.class), mapper.readValue(mapper.writeValueAsString(new IndexSpec()), Map.class), mapper.readValue(mapper.writeValueAsString(new UniformGranularitySpec(Granularities.DAY, Granularities.MINUTE, true, ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00")))), Map.class));
Assert.assertEquals(expectedCompactionState, segments.get(0).getLastCompactionState());
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class IndexTaskSerdeTest method testSerdeTuningConfigWithDynamicPartitionsSpec.
@Test
public void testSerdeTuningConfigWithDynamicPartitionsSpec() throws IOException {
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, null, null, new DynamicPartitionsSpec(1000, 2000L), new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, false, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, 1234, 0L);
assertSerdeTuningConfig(tuningConfig);
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class IndexTaskSerdeTest method testForceGuaranteedRollupWithDynamicPartitionsSpec.
@Test
public void testForceGuaranteedRollupWithDynamicPartitionsSpec() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("DynamicPartitionsSpec cannot be used for perfect rollup");
final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, 100, 2000L, null, null, null, null, null, new DynamicPartitionsSpec(1000, 2000L), new IndexSpec(new RoaringBitmapSerdeFactory(false), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), null, null, true, null, null, 100L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), true, 10, 100, null, null);
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class ClientCompactionTaskQuerySerdeTest method testCompactionTaskToClientCompactionTaskQuery.
@Test
public void testCompactionTaskToClientCompactionTaskQuery() throws IOException {
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
final CompactionTask.Builder builder = new CompactionTask.Builder("datasource", new SegmentCacheManagerFactory(mapper), new RetryPolicyFactory(new RetryPolicyConfig()));
final CompactionTask task = builder.inputSpec(new CompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true).tuningConfig(new ParallelIndexTuningConfig(null, null, null, 40000, 2000L, null, null, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, null, null, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), null, 100, 5, 1000L, new Duration(3000L), 7, 1000, 100, null, null, null, null, null, null)).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true)).dimensionsSpec(DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))).setDimensionExclusions(ImmutableList.of("__time", "val")).build()).metricsSpec(new AggregatorFactory[] { new CountAggregatorFactory("cnt") }).transformSpec(new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null))).build();
final ClientCompactionTaskQuery expected = new ClientCompactionTaskQuery(task.getId(), "datasource", new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true), new ClientCompactionTaskQueryTuningConfig(100, 40000, 2000L, 30000L, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100), new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true), new ClientCompactionTaskDimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null)), new HashMap<>());
final byte[] json = mapper.writeValueAsBytes(task);
final ClientCompactionTaskQuery actual = (ClientCompactionTaskQuery) mapper.readValue(json, ClientTaskQuery.class);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class TaskSerdeTest method testIndexTaskSerde.
@Test
public void testIndexTaskSerde() throws Exception {
final IndexTask task = new IndexTask(null, null, new IndexIngestionSpec(new DataSchema("foo", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(Intervals.of("2010-01-01/P2D"))), null), new IndexIOConfig(null, new LocalInputSource(new File("lol"), "rofl"), new NoopInputFormat(), true, false), new IndexTuningConfig(null, null, null, 10, null, null, null, 9999, null, null, new DynamicPartitionsSpec(10000, null), indexSpec, null, 3, false, null, null, null, null, null, null, null, null, 1L)), null);
final String json = jsonMapper.writeValueAsString(task);
// Just want to run the clock a bit to make sure the task id doesn't change
Thread.sleep(100);
final IndexTask task2 = (IndexTask) jsonMapper.readValue(json, Task.class);
Assert.assertEquals("foo", task.getDataSource());
Assert.assertEquals(task.getId(), task2.getId());
Assert.assertEquals(task.getGroupId(), task2.getGroupId());
Assert.assertEquals(task.getDataSource(), task2.getDataSource());
IndexTask.IndexIOConfig taskIoConfig = task.getIngestionSchema().getIOConfig();
IndexTask.IndexIOConfig task2IoConfig = task2.getIngestionSchema().getIOConfig();
Assert.assertTrue(taskIoConfig.getInputSource() instanceof LocalInputSource);
Assert.assertTrue(task2IoConfig.getInputSource() instanceof LocalInputSource);
Assert.assertEquals(taskIoConfig.isAppendToExisting(), task2IoConfig.isAppendToExisting());
IndexTask.IndexTuningConfig taskTuningConfig = task.getIngestionSchema().getTuningConfig();
IndexTask.IndexTuningConfig task2TuningConfig = task2.getIngestionSchema().getTuningConfig();
Assert.assertEquals(taskTuningConfig.getBasePersistDirectory(), task2TuningConfig.getBasePersistDirectory());
Assert.assertEquals(taskTuningConfig.getIndexSpec(), task2TuningConfig.getIndexSpec());
Assert.assertEquals(taskTuningConfig.getIntermediatePersistPeriod(), task2TuningConfig.getIntermediatePersistPeriod());
Assert.assertEquals(taskTuningConfig.getMaxPendingPersists(), task2TuningConfig.getMaxPendingPersists());
Assert.assertEquals(taskTuningConfig.getMaxRowsInMemory(), task2TuningConfig.getMaxRowsInMemory());
Assert.assertEquals(taskTuningConfig.getNumShards(), task2TuningConfig.getNumShards());
Assert.assertEquals(taskTuningConfig.getMaxRowsPerSegment(), task2TuningConfig.getMaxRowsPerSegment());
Assert.assertEquals(taskTuningConfig.isReportParseExceptions(), task2TuningConfig.isReportParseExceptions());
Assert.assertEquals(taskTuningConfig.getAwaitSegmentAvailabilityTimeoutMillis(), task2TuningConfig.getAwaitSegmentAvailabilityTimeoutMillis());
}
Aggregations