use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class IncrementalIndexTest method constructorFeeder.
@Parameterized.Parameters(name = "{index}: {0}, {1}, deserialize={2}")
public static Collection<?> constructorFeeder() {
DimensionsSpec dimensions = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("string"), new FloatDimensionSchema("float"), new LongDimensionSchema("long"), new DoubleDimensionSchema("double")));
AggregatorFactory[] metrics = { new FilteredAggregatorFactory(new CountAggregatorFactory("cnt"), new SelectorDimFilter("billy", "A", null)) };
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withDimensionsSpec(dimensions).withMetrics(metrics).build();
return IncrementalIndexCreator.indexTypeCartesianProduct(ImmutableList.of("rollup", "plain"), ImmutableList.of(true, false), ImmutableList.of(schema));
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class InputRowSerdeTest method testSerde.
@Test
public void testSerde() {
// Prepare the mocks & set close() call count expectation to 1
final Aggregator mockedAggregator = EasyMock.createMock(DoubleSumAggregator.class);
EasyMock.expect(mockedAggregator.isNull()).andReturn(false).times(1);
EasyMock.expect(mockedAggregator.getDouble()).andReturn(0d).times(1);
mockedAggregator.aggregate();
EasyMock.expectLastCall().times(1);
mockedAggregator.close();
EasyMock.expectLastCall().times(1);
EasyMock.replay(mockedAggregator);
final Aggregator mockedNullAggregator = EasyMock.createMock(DoubleSumAggregator.class);
EasyMock.expect(mockedNullAggregator.isNull()).andReturn(true).times(1);
mockedNullAggregator.aggregate();
EasyMock.expectLastCall().times(1);
mockedNullAggregator.close();
EasyMock.expectLastCall().times(1);
EasyMock.replay(mockedNullAggregator);
final AggregatorFactory mockedAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
EasyMock.expect(mockedAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedAggregator);
EasyMock.expect(mockedAggregatorFactory.getIntermediateType()).andReturn(ColumnType.DOUBLE).anyTimes();
EasyMock.expect(mockedAggregatorFactory.getName()).andReturn("mockedAggregator").anyTimes();
final AggregatorFactory mockedNullAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
EasyMock.expect(mockedNullAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedNullAggregator);
EasyMock.expect(mockedNullAggregatorFactory.getName()).andReturn("mockedNullAggregator").anyTimes();
EasyMock.expect(mockedNullAggregatorFactory.getIntermediateType()).andReturn(ColumnType.DOUBLE).anyTimes();
EasyMock.replay(mockedAggregatorFactory, mockedNullAggregatorFactory);
InputRow in = new MapBasedInputRow(timestamp, dims, event);
AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new DoubleSumAggregatorFactory("agg_non_existing", "agg_non_existing_in"), new DoubleSumAggregatorFactory("m1out", "m1"), new LongSumAggregatorFactory("m2out", "m2"), new HyperUniquesAggregatorFactory("m3out", "m3"), // Unparseable from String to Long
new LongSumAggregatorFactory("unparseable", "m3"), mockedAggregatorFactory, mockedNullAggregatorFactory };
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("d1"), new StringDimensionSchema("d2"), new LongDimensionSchema("d3"), new FloatDimensionSchema("d4"), new DoubleDimensionSchema("d5")));
byte[] data = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories).getSerializedRow();
InputRow out = InputRowSerde.fromBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), data, aggregatorFactories);
Assert.assertEquals(timestamp, out.getTimestampFromEpoch());
Assert.assertEquals(dims, out.getDimensions());
Assert.assertEquals(Collections.emptyList(), out.getDimension("dim_non_existing"));
Assert.assertEquals(ImmutableList.of("d1v"), out.getDimension("d1"));
Assert.assertEquals(ImmutableList.of("d2v1", "d2v2"), out.getDimension("d2"));
Assert.assertEquals(200L, out.getRaw("d3"));
Assert.assertEquals(300.1f, out.getRaw("d4"));
Assert.assertEquals(400.5d, out.getRaw("d5"));
Assert.assertEquals(NullHandling.defaultDoubleValue(), out.getMetric("agg_non_existing"));
Assert.assertEquals(5.0f, out.getMetric("m1out").floatValue(), 0.00001);
Assert.assertEquals(100L, out.getMetric("m2out"));
Assert.assertEquals(1, ((HyperLogLogCollector) out.getRaw("m3out")).estimateCardinality(), 0.001);
Assert.assertEquals(NullHandling.defaultLongValue(), out.getMetric("unparseable"));
EasyMock.verify(mockedAggregator);
EasyMock.verify(mockedNullAggregator);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class InputRowSerdeTest method testDimensionParseExceptions.
@Test
public void testDimensionParseExceptions() {
InputRowSerde.SerializeResult result;
InputRow in = new MapBasedInputRow(timestamp, dims, event);
AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new LongSumAggregatorFactory("m2out", "m2") };
DimensionsSpec dimensionsSpec = new DimensionsSpec(Collections.singletonList(new LongDimensionSchema("d1")));
result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("could not convert value [d1v] to long"), result.getParseExceptionMessages());
dimensionsSpec = new DimensionsSpec(Collections.singletonList(new FloatDimensionSchema("d1")));
result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("could not convert value [d1v] to float"), result.getParseExceptionMessages());
dimensionsSpec = new DimensionsSpec(Collections.singletonList(new DoubleDimensionSchema("d1")));
result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("could not convert value [d1v] to double"), result.getParseExceptionMessages());
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class BloomFilterSqlAggregatorTest method createQuerySegmentWalker.
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOException {
InputRowParser parser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("t", "iso", null), new DimensionsSpec(ImmutableList.<DimensionSchema>builder().addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2", "dim3"))).add(new DoubleDimensionSchema("d1")).add(new FloatDimensionSchema("f1")).add(new LongDimensionSchema("l1")).build())));
final QueryableIndex index = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "m1")).withDimensionsSpec(parser).withRollup(false).build()).rows(CalciteTests.ROWS1_WITH_NUMERIC_DIMS).buildMMappedIndex();
return new SpecificSegmentsQuerySegmentWalker(conglomerate).add(DataSegment.builder().dataSource(DATA_SOURCE).interval(index.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index);
}
Aggregations