use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class IngestSegmentFirehoseFactoryTest method constructorFeeder.
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() throws IOException {
final IndexSpec indexSpec = new IndexSpec();
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(JodaUtils.MIN_INSTANT).withDimensionsSpec(ROW_PARSER).withMetrics(new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME), new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME)).build();
final IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(MAX_ROWS * MAX_SHARD_NUMBER).build();
for (Integer i = 0; i < MAX_ROWS; ++i) {
index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));
}
FileUtils.mkdirp(PERSIST_DIR);
INDEX_MERGER_V9.persist(index, PERSIST_DIR, indexSpec, null);
final CoordinatorClient cc = new CoordinatorClient(null, null) {
@Override
public Collection<DataSegment> fetchUsedSegmentsInDataSourceForIntervals(String dataSource, List<Interval> intervals) {
return ImmutableSet.copyOf(SEGMENT_SET);
}
};
SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
EasyMock.replay(notifierFactory);
final SegmentCacheManagerFactory slf = new SegmentCacheManagerFactory(MAPPER);
final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig());
Collection<Object[]> values = new ArrayList<>();
for (InputRowParser parser : Arrays.<InputRowParser>asList(ROW_PARSER, new MapInputRowParser(new JSONParseSpec(new TimestampSpec(TIME_COLUMN, "auto", null), DimensionsSpec.builder().setDimensionExclusions(ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME)).build(), null, null, null)))) {
for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
for (List<String> metric_names : Arrays.<List<String>>asList(null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
for (Boolean wrapInCombining : Arrays.asList(false, true)) {
final IngestSegmentFirehoseFactory isfFactory = new IngestSegmentFirehoseFactory(TASK.getDataSource(), Intervals.ETERNITY, null, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, null, INDEX_IO, cc, slf, retryPolicyFactory);
final FirehoseFactory factory = wrapInCombining ? new CombiningFirehoseFactory(ImmutableList.of(isfFactory)) : isfFactory;
values.add(new Object[] { StringUtils.format("DimNames[%s]MetricNames[%s]ParserDimNames[%s]WrapInCombining[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", parser == ROW_PARSER ? "dims" : "null", wrapInCombining), factory, parser });
}
}
}
}
return values;
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class KafkaIndexTaskTuningConfigTest method testSerdeWithModifiedTuningConfigAddedField.
@Test
public void testSerdeWithModifiedTuningConfigAddedField() throws IOException {
KafkaIndexTaskTuningConfig base = new KafkaIndexTaskTuningConfig(null, 1, null, null, 2, 10L, new Period("PT3S"), new File("/tmp/xxx"), 4, new IndexSpec(), new IndexSpec(), true, 5L, null, null, null, true, 42, 42);
String serialized = mapper.writeValueAsString(base);
TestModifiedKafkaIndexTaskTuningConfig deserialized = mapper.readValue(serialized, TestModifiedKafkaIndexTaskTuningConfig.class);
Assert.assertEquals(null, deserialized.getExtra());
Assert.assertEquals(base.getAppendableIndexSpec(), deserialized.getAppendableIndexSpec());
Assert.assertEquals(base.getMaxRowsInMemory(), deserialized.getMaxRowsInMemory());
Assert.assertEquals(base.getMaxBytesInMemory(), deserialized.getMaxBytesInMemory());
Assert.assertEquals(base.getMaxRowsPerSegment(), deserialized.getMaxRowsPerSegment());
Assert.assertEquals(base.getMaxTotalRows(), deserialized.getMaxTotalRows());
Assert.assertEquals(base.getIntermediatePersistPeriod(), deserialized.getIntermediatePersistPeriod());
Assert.assertEquals(base.getBasePersistDirectory(), deserialized.getBasePersistDirectory());
Assert.assertEquals(base.getMaxPendingPersists(), deserialized.getMaxPendingPersists());
Assert.assertEquals(base.getIndexSpec(), deserialized.getIndexSpec());
Assert.assertEquals(base.isReportParseExceptions(), deserialized.isReportParseExceptions());
Assert.assertEquals(base.getHandoffConditionTimeout(), deserialized.getHandoffConditionTimeout());
Assert.assertEquals(base.isResetOffsetAutomatically(), deserialized.isResetOffsetAutomatically());
Assert.assertEquals(base.getSegmentWriteOutMediumFactory(), deserialized.getSegmentWriteOutMediumFactory());
Assert.assertEquals(base.getIntermediateHandoffPeriod(), deserialized.getIntermediateHandoffPeriod());
Assert.assertEquals(base.isLogParseExceptions(), deserialized.isLogParseExceptions());
Assert.assertEquals(base.getMaxParseExceptions(), deserialized.getMaxParseExceptions());
Assert.assertEquals(base.getMaxSavedParseExceptions(), deserialized.getMaxSavedParseExceptions());
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class KafkaIndexTaskTuningConfigTest method testConvert.
@Test
public void testConvert() {
KafkaSupervisorTuningConfig original = new KafkaSupervisorTuningConfig(null, 1, null, null, 2, 10L, new Period("PT3S"), new File("/tmp/xxx"), 4, new IndexSpec(), new IndexSpec(), true, 5L, null, null, null, null, null, null, null, null, null, null, null, null);
KafkaIndexTaskTuningConfig copy = (KafkaIndexTaskTuningConfig) original.convertToTaskTuningConfig();
Assert.assertEquals(original.getAppendableIndexSpec(), copy.getAppendableIndexSpec());
Assert.assertEquals(1, copy.getMaxRowsInMemory());
Assert.assertEquals(2, copy.getMaxRowsPerSegment().intValue());
Assert.assertNotEquals(null, copy.getMaxTotalRows());
Assert.assertEquals(10L, copy.getMaxTotalRows().longValue());
Assert.assertEquals(new Period("PT3S"), copy.getIntermediatePersistPeriod());
Assert.assertEquals(new File("/tmp/xxx"), copy.getBasePersistDirectory());
Assert.assertEquals(4, copy.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), copy.getIndexSpec());
Assert.assertEquals(true, copy.isReportParseExceptions());
Assert.assertEquals(5L, copy.getHandoffConditionTimeout());
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class KafkaIndexTaskTuningConfigTest method testSerdeWithModifiedTuningConfigRemovedField.
@Test
public void testSerdeWithModifiedTuningConfigRemovedField() throws IOException {
TestModifiedKafkaIndexTaskTuningConfig base = new TestModifiedKafkaIndexTaskTuningConfig(null, 1, null, null, 2, 10L, new Period("PT3S"), new File("/tmp/xxx"), 4, new IndexSpec(), new IndexSpec(), true, 5L, null, null, null, true, 42, 42, "extra string");
String serialized = mapper.writeValueAsString(base);
KafkaIndexTaskTuningConfig deserialized = mapper.readValue(serialized, KafkaIndexTaskTuningConfig.class);
Assert.assertEquals(base.getAppendableIndexSpec(), deserialized.getAppendableIndexSpec());
Assert.assertEquals(base.getMaxRowsInMemory(), deserialized.getMaxRowsInMemory());
Assert.assertEquals(base.getMaxBytesInMemory(), deserialized.getMaxBytesInMemory());
Assert.assertEquals(base.getMaxRowsPerSegment(), deserialized.getMaxRowsPerSegment());
Assert.assertEquals(base.getMaxTotalRows(), deserialized.getMaxTotalRows());
Assert.assertEquals(base.getIntermediatePersistPeriod(), deserialized.getIntermediatePersistPeriod());
Assert.assertEquals(base.getBasePersistDirectory(), deserialized.getBasePersistDirectory());
Assert.assertEquals(base.getMaxPendingPersists(), deserialized.getMaxPendingPersists());
Assert.assertEquals(base.getIndexSpec(), deserialized.getIndexSpec());
Assert.assertEquals(base.isReportParseExceptions(), deserialized.isReportParseExceptions());
Assert.assertEquals(base.getHandoffConditionTimeout(), deserialized.getHandoffConditionTimeout());
Assert.assertEquals(base.isResetOffsetAutomatically(), deserialized.isResetOffsetAutomatically());
Assert.assertEquals(base.getSegmentWriteOutMediumFactory(), deserialized.getSegmentWriteOutMediumFactory());
Assert.assertEquals(base.getIntermediateHandoffPeriod(), deserialized.getIntermediateHandoffPeriod());
Assert.assertEquals(base.isLogParseExceptions(), deserialized.isLogParseExceptions());
Assert.assertEquals(base.getMaxParseExceptions(), deserialized.getMaxParseExceptions());
Assert.assertEquals(base.getMaxSavedParseExceptions(), deserialized.getMaxSavedParseExceptions());
}
use of org.apache.druid.segment.IndexSpec in project druid by druid-io.
the class KinesisIndexTaskTuningConfigTest method testSerdeWithModifiedTuningConfigAddedField.
@Test
public void testSerdeWithModifiedTuningConfigAddedField() throws IOException {
KinesisIndexTaskTuningConfig base = new KinesisIndexTaskTuningConfig(null, 1, 3L, null, 2, 100L, new Period("PT3S"), new File("/tmp/xxx"), 4, new IndexSpec(), new IndexSpec(), true, 5L, true, false, 1000, 1000, 500, null, 42, null, false, 500, 500, 6000, new Period("P3D"));
String serialized = mapper.writeValueAsString(base);
TestModifiedKinesisIndexTaskTuningConfig deserialized = mapper.readValue(serialized, TestModifiedKinesisIndexTaskTuningConfig.class);
Assert.assertEquals(null, deserialized.getExtra());
Assert.assertEquals(base.getAppendableIndexSpec(), deserialized.getAppendableIndexSpec());
Assert.assertEquals(base.getMaxRowsInMemory(), deserialized.getMaxRowsInMemory());
Assert.assertEquals(base.getMaxBytesInMemory(), deserialized.getMaxBytesInMemory());
Assert.assertEquals(base.getMaxRowsPerSegment(), deserialized.getMaxRowsPerSegment());
Assert.assertEquals(base.getMaxTotalRows(), deserialized.getMaxTotalRows());
Assert.assertEquals(base.getIntermediatePersistPeriod(), deserialized.getIntermediatePersistPeriod());
Assert.assertEquals(base.getBasePersistDirectory(), deserialized.getBasePersistDirectory());
Assert.assertEquals(base.getMaxPendingPersists(), deserialized.getMaxPendingPersists());
Assert.assertEquals(base.getIndexSpec(), deserialized.getIndexSpec());
Assert.assertEquals(base.isReportParseExceptions(), deserialized.isReportParseExceptions());
Assert.assertEquals(base.getHandoffConditionTimeout(), deserialized.getHandoffConditionTimeout());
Assert.assertEquals(base.isResetOffsetAutomatically(), deserialized.isResetOffsetAutomatically());
Assert.assertEquals(base.getSegmentWriteOutMediumFactory(), deserialized.getSegmentWriteOutMediumFactory());
Assert.assertEquals(base.getIntermediateHandoffPeriod(), deserialized.getIntermediateHandoffPeriod());
Assert.assertEquals(base.isLogParseExceptions(), deserialized.isLogParseExceptions());
Assert.assertEquals(base.getMaxParseExceptions(), deserialized.getMaxParseExceptions());
Assert.assertEquals(base.getMaxSavedParseExceptions(), deserialized.getMaxSavedParseExceptions());
Assert.assertEquals(base.getRecordBufferFullWait(), deserialized.getRecordBufferFullWait());
Assert.assertEquals(base.getRecordBufferOfferTimeout(), deserialized.getRecordBufferOfferTimeout());
Assert.assertEquals(base.getRecordBufferSize(), deserialized.getRecordBufferSize());
Assert.assertEquals(base.getMaxRecordsPerPoll(), deserialized.getMaxRecordsPerPoll());
}
Aggregations