use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class InputRowSerdeTest method testSerde.
@Test
public void testSerde() {
// Prepare the mocks & set close() call count expectation to 1
final Aggregator mockedAggregator = EasyMock.createMock(DoubleSumAggregator.class);
EasyMock.expect(mockedAggregator.isNull()).andReturn(false).times(1);
EasyMock.expect(mockedAggregator.getDouble()).andReturn(0d).times(1);
mockedAggregator.aggregate();
EasyMock.expectLastCall().times(1);
mockedAggregator.close();
EasyMock.expectLastCall().times(1);
EasyMock.replay(mockedAggregator);
final Aggregator mockedNullAggregator = EasyMock.createMock(DoubleSumAggregator.class);
EasyMock.expect(mockedNullAggregator.isNull()).andReturn(true).times(1);
mockedNullAggregator.aggregate();
EasyMock.expectLastCall().times(1);
mockedNullAggregator.close();
EasyMock.expectLastCall().times(1);
EasyMock.replay(mockedNullAggregator);
final AggregatorFactory mockedAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
EasyMock.expect(mockedAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedAggregator);
EasyMock.expect(mockedAggregatorFactory.getIntermediateType()).andReturn(ColumnType.DOUBLE).anyTimes();
EasyMock.expect(mockedAggregatorFactory.getName()).andReturn("mockedAggregator").anyTimes();
final AggregatorFactory mockedNullAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
EasyMock.expect(mockedNullAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedNullAggregator);
EasyMock.expect(mockedNullAggregatorFactory.getName()).andReturn("mockedNullAggregator").anyTimes();
EasyMock.expect(mockedNullAggregatorFactory.getIntermediateType()).andReturn(ColumnType.DOUBLE).anyTimes();
EasyMock.replay(mockedAggregatorFactory, mockedNullAggregatorFactory);
InputRow in = new MapBasedInputRow(timestamp, dims, event);
AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new DoubleSumAggregatorFactory("agg_non_existing", "agg_non_existing_in"), new DoubleSumAggregatorFactory("m1out", "m1"), new LongSumAggregatorFactory("m2out", "m2"), new HyperUniquesAggregatorFactory("m3out", "m3"), // Unparseable from String to Long
new LongSumAggregatorFactory("unparseable", "m3"), mockedAggregatorFactory, mockedNullAggregatorFactory };
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("d1"), new StringDimensionSchema("d2"), new LongDimensionSchema("d3"), new FloatDimensionSchema("d4"), new DoubleDimensionSchema("d5")));
byte[] data = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories).getSerializedRow();
InputRow out = InputRowSerde.fromBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), data, aggregatorFactories);
Assert.assertEquals(timestamp, out.getTimestampFromEpoch());
Assert.assertEquals(dims, out.getDimensions());
Assert.assertEquals(Collections.emptyList(), out.getDimension("dim_non_existing"));
Assert.assertEquals(ImmutableList.of("d1v"), out.getDimension("d1"));
Assert.assertEquals(ImmutableList.of("d2v1", "d2v2"), out.getDimension("d2"));
Assert.assertEquals(200L, out.getRaw("d3"));
Assert.assertEquals(300.1f, out.getRaw("d4"));
Assert.assertEquals(400.5d, out.getRaw("d5"));
Assert.assertEquals(NullHandling.defaultDoubleValue(), out.getMetric("agg_non_existing"));
Assert.assertEquals(5.0f, out.getMetric("m1out").floatValue(), 0.00001);
Assert.assertEquals(100L, out.getMetric("m2out"));
Assert.assertEquals(1, ((HyperLogLogCollector) out.getRaw("m3out")).estimateCardinality(), 0.001);
Assert.assertEquals(NullHandling.defaultLongValue(), out.getMetric("unparseable"));
EasyMock.verify(mockedAggregator);
EasyMock.verify(mockedNullAggregator);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class CompactionTaskTest method testCreateDimensionSchema.
@Test
public void testCreateDimensionSchema() {
final String dimensionName = "dim";
DimensionHandlerUtils.registerDimensionHandlerProvider(ExtensionDimensionHandler.TYPE_NAME, d -> new ExtensionDimensionHandler(d));
DimensionSchema stringSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleSingleValueStringColumnCapabilities().setHasBitmapIndexes(true).setDictionaryEncoded(true).setDictionaryValuesUnique(true).setDictionaryValuesUnique(true), DimensionSchema.MultiValueHandling.SORTED_SET);
Assert.assertTrue(stringSchema instanceof StringDimensionSchema);
DimensionSchema floatSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.FLOAT), null);
Assert.assertTrue(floatSchema instanceof FloatDimensionSchema);
DimensionSchema doubleSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.DOUBLE), null);
Assert.assertTrue(doubleSchema instanceof DoubleDimensionSchema);
DimensionSchema longSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.LONG), null);
Assert.assertTrue(longSchema instanceof LongDimensionSchema);
DimensionSchema extensionSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.ofComplex(ExtensionDimensionHandler.TYPE_NAME)), null);
Assert.assertTrue(extensionSchema instanceof ExtensionDimensionSchema);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class CompactionTaskTest method testQueryGranularityAndSegmentGranularityNonNull.
@Test
public void testQueryGranularityAndSegmentGranularityNonNull() throws IOException, SegmentLoadingException {
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(new PeriodGranularity(Period.months(3), null, null), new PeriodGranularity(Period.months(3), null, null), null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))));
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), Collections.singletonList(COMPACTION_INTERVAL), new PeriodGranularity(Period.months(3), null, null), new PeriodGranularity(Period.months(3), null, null), IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class CompactionTaskTest method testCreateIngestionSchemaWithCustomDimensionsSpec.
@Test
public void testCreateIngestionSchemaWithCustomDimensionsSpec() throws IOException, SegmentLoadingException {
final DimensionsSpec customSpec = new DimensionsSpec(Lists.newArrayList(new LongDimensionSchema("timestamp"), new StringDimensionSchema("string_dim_0"), new StringDimensionSchema("string_dim_1"), new StringDimensionSchema("string_dim_2"), new StringDimensionSchema("string_dim_3"), new StringDimensionSchema("string_dim_4"), new LongDimensionSchema("long_dim_0"), new LongDimensionSchema("long_dim_1"), new LongDimensionSchema("long_dim_2"), new LongDimensionSchema("long_dim_3"), new LongDimensionSchema("long_dim_4"), new FloatDimensionSchema("float_dim_0"), new FloatDimensionSchema("float_dim_1"), new FloatDimensionSchema("float_dim_2"), new FloatDimensionSchema("float_dim_3"), new FloatDimensionSchema("float_dim_4"), new DoubleDimensionSchema("double_dim_0"), new DoubleDimensionSchema("double_dim_1"), new DoubleDimensionSchema("double_dim_2"), new DoubleDimensionSchema("double_dim_3"), new DoubleDimensionSchema("double_dim_4"), new StringDimensionSchema(MIXED_TYPE_COLUMN)));
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), customSpec, null, null, null, COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(6, ingestionSpecs.size());
final List<DimensionsSpec> dimensionsSpecs = new ArrayList<>(6);
IntStream.range(0, 6).forEach(i -> dimensionsSpecs.add(customSpec));
assertIngestionSchema(ingestionSpecs, dimensionsSpecs, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), SEGMENT_INTERVALS, Granularities.MONTH, Granularities.NONE, IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderTimestampSpecDefault.
@Test
public void testReaderTimestampSpecDefault() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec(null, null, DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Aggregations