use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class DruidSegmentReaderTest method testReaderWithInclusiveColumnsFilterNoTimestamp.
@Test
public void testReaderWithInclusiveColumnsFilterNoTimestamp() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.inclusionBased(ImmutableSet.of("s", "d")), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "foo").put("d", 1.23d).build()), new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "bar").put("d", 4.56d).build())), readRows(reader));
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class DruidSegmentReaderTest method testReaderWithInclusiveColumnsFilter.
@Test
public void testReaderWithInclusiveColumnsFilter() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.inclusionBased(ImmutableSet.of("__time", "s", "d")), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).build())), readRows(reader));
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class CompactionTaskTest method testSegmentGranularityAndNullQueryGranularity.
@Test
public void testSegmentGranularityAndNullQueryGranularity() throws IOException, SegmentLoadingException {
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(new PeriodGranularity(Period.months(3), null, null), null, null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))));
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), Collections.singletonList(COMPACTION_INTERVAL), new PeriodGranularity(Period.months(3), null, null), Granularities.NONE, IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class CompactionTaskTest method testQueryGranularityAndSegmentGranularityNonNull.
@Test
public void testQueryGranularityAndSegmentGranularityNonNull() throws IOException, SegmentLoadingException {
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(new PeriodGranularity(Period.months(3), null, null), new PeriodGranularity(Period.months(3), null, null), null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))));
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), Collections.singletonList(COMPACTION_INTERVAL), new PeriodGranularity(Period.months(3), null, null), new PeriodGranularity(Period.months(3), null, null), IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class CompactionTaskTest method testCreateDimensionSchema.
@Test
public void testCreateDimensionSchema() {
final String dimensionName = "dim";
DimensionHandlerUtils.registerDimensionHandlerProvider(ExtensionDimensionHandler.TYPE_NAME, d -> new ExtensionDimensionHandler(d));
DimensionSchema stringSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleSingleValueStringColumnCapabilities().setHasBitmapIndexes(true).setDictionaryEncoded(true).setDictionaryValuesUnique(true).setDictionaryValuesUnique(true), DimensionSchema.MultiValueHandling.SORTED_SET);
Assert.assertTrue(stringSchema instanceof StringDimensionSchema);
DimensionSchema floatSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.FLOAT), null);
Assert.assertTrue(floatSchema instanceof FloatDimensionSchema);
DimensionSchema doubleSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.DOUBLE), null);
Assert.assertTrue(doubleSchema instanceof DoubleDimensionSchema);
DimensionSchema longSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.LONG), null);
Assert.assertTrue(longSchema instanceof LongDimensionSchema);
DimensionSchema extensionSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.ofComplex(ExtensionDimensionHandler.TYPE_NAME)), null);
Assert.assertTrue(extensionSchema instanceof ExtensionDimensionSchema);
}
Aggregations