Search in sources :

Example 16 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.

the class InputRowSerdeTest method testDimensionNullOrDefaultForNumerics.

@Test
public void testDimensionNullOrDefaultForNumerics() {
    HashMap<String, Object> eventWithNulls = new HashMap<>();
    eventWithNulls.put("d1", null);
    eventWithNulls.put("d2", Arrays.asList("d2v1", "d2v2"));
    eventWithNulls.put("d3", null);
    eventWithNulls.put("d4", null);
    eventWithNulls.put("d5", null);
    InputRow in = new MapBasedInputRow(timestamp, dims, eventWithNulls);
    DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("d1"), new StringDimensionSchema("d2"), new LongDimensionSchema("d3"), new FloatDimensionSchema("d4"), new DoubleDimensionSchema("d5")));
    byte[] result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, new AggregatorFactory[0]).getSerializedRow();
    if (NullHandling.replaceWithDefault()) {
        long expected = 0;
        // timestamp bytes + dims length
        expected += 9;
        // dim_non_existing writes: 1 16 1 bytes
        expected += 18;
        // d1: writes 1 2 1 bytes
        expected += 4;
        // d2: writes 1 2 1 1 4 1 4 bytes
        expected += 14;
        // d3: writes 1 2 8 bytes
        expected += 11;
        // d4: writes 1 2 4 bytes
        expected += 7;
        // d5: writes 1 2 8 bytes
        expected += 11;
        // writes aggregator length
        expected += 1;
        Assert.assertEquals(expected, result.length);
        Assert.assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0 }, Arrays.copyOfRange(result, 48, 56));
        Assert.assertArrayEquals(new byte[] { 0, 0, 0, 0 }, Arrays.copyOfRange(result, 59, 63));
        Assert.assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0 }, Arrays.copyOfRange(result, 66, 74));
    } else {
        long expected = 9 + 18 + 4 + 14 + 4 + 4 + 4 + 1;
        Assert.assertEquals(expected, result.length);
        Assert.assertEquals(result[48], NullHandling.IS_NULL_BYTE);
        Assert.assertEquals(result[52], NullHandling.IS_NULL_BYTE);
        Assert.assertEquals(result[56], NullHandling.IS_NULL_BYTE);
    }
}
Also used : HashMap(java.util.HashMap) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Test(org.junit.Test)

Example 17 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.

the class InputRowSerdeTest method testThrowParseExceptions.

@Test
public void testThrowParseExceptions() {
    InputRow in = new MapBasedInputRow(timestamp, dims, event);
    AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new DoubleSumAggregatorFactory("agg_non_existing", "agg_non_existing_in"), new DoubleSumAggregatorFactory("m1out", "m1"), new LongSumAggregatorFactory("m2out", "m2"), new HyperUniquesAggregatorFactory("m3out", "m3"), // Unparseable from String to Long
    new LongSumAggregatorFactory("unparseable", "m3") };
    DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("d1"), new StringDimensionSchema("d2"), new LongDimensionSchema("d3"), new FloatDimensionSchema("d4"), new DoubleDimensionSchema("d5")));
    InputRowSerde.SerializeResult result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
    Assert.assertEquals(Collections.singletonList("Unable to parse value[m3v] for field[m3]"), result.getParseExceptionMessages());
}
Also used : DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Test(org.junit.Test)

Example 18 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.

the class DruidSegmentReaderTest method testReaderTimestampAsPosixIncorrectly.

@Test
public void testReaderTimestampAsPosixIncorrectly() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "posix", null), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("31969-04-01T00:00:00.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("31969-05-12T16:00:00.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 19 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.

the class DruidSegmentReaderTest method testReader.

@Test
public void testReader() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 20 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.

the class DruidSegmentReaderTest method setUp.

@Before
public void setUp() throws IOException {
    // Write a segment with two rows in it, with columns: s (string), d (double), cnt (long), met_s (complex).
    final IncrementalIndex incrementalIndex = IndexBuilder.create().schema(new IncrementalIndexSchema.Builder().withDimensionsSpec(new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d")))).withMetrics(new CountAggregatorFactory("cnt"), new HyperUniquesAggregatorFactory("met_s", "s")).withRollup(false).build()).rows(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "foo").put("d", 1.23).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "bar").put("d", 4.56).build()))).buildIncrementalIndex();
    segmentDirectory = temporaryFolder.newFolder();
    try {
        TestHelper.getTestIndexMergerV9(OnHeapMemorySegmentWriteOutMediumFactory.instance()).persist(incrementalIndex, segmentDirectory, new IndexSpec(), null);
    } finally {
        incrementalIndex.close();
    }
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) IndexSpec(org.apache.druid.segment.IndexSpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) Before(org.junit.Before)

Aggregations

DoubleDimensionSchema (org.apache.druid.data.input.impl.DoubleDimensionSchema)44 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)38 Test (org.junit.Test)34 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)26 FloatDimensionSchema (org.apache.druid.data.input.impl.FloatDimensionSchema)20 LongDimensionSchema (org.apache.druid.data.input.impl.LongDimensionSchema)20 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)20 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)18 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)18 NullHandlingTest (org.apache.druid.common.config.NullHandlingTest)16 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)16 StringDimensionSchema (org.apache.druid.data.input.impl.StringDimensionSchema)14 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)12 HyperUniquesAggregatorFactory (org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory)12 InputRow (org.apache.druid.data.input.InputRow)10 LongMaxAggregatorFactory (org.apache.druid.query.aggregation.LongMaxAggregatorFactory)8 FloatFirstAggregatorFactory (org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory)8 DoubleLastAggregatorFactory (org.apache.druid.query.aggregation.last.DoubleLastAggregatorFactory)8 DimensionSchema (org.apache.druid.data.input.impl.DimensionSchema)6 PartitionConfigurationManager (org.apache.druid.indexing.common.task.CompactionTask.PartitionConfigurationManager)6