Search in sources :

Example 26 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.

the class DruidSegmentReaderTest method setUp.

@Before
public void setUp() throws IOException {
    // Write a segment with two rows in it, with columns: s (string), d (double), cnt (long), met_s (complex).
    final IncrementalIndex incrementalIndex = IndexBuilder.create().schema(new IncrementalIndexSchema.Builder().withDimensionsSpec(new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d")))).withMetrics(new CountAggregatorFactory("cnt"), new HyperUniquesAggregatorFactory("met_s", "s")).withRollup(false).build()).rows(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "foo").put("d", 1.23).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "bar").put("d", 4.56).build()))).buildIncrementalIndex();
    segmentDirectory = temporaryFolder.newFolder();
    try {
        TestHelper.getTestIndexMergerV9(OnHeapMemorySegmentWriteOutMediumFactory.instance()).persist(incrementalIndex, segmentDirectory, new IndexSpec(), null);
    } finally {
        incrementalIndex.close();
    }
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) IndexSpec(org.apache.druid.segment.IndexSpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) Before(org.junit.Before)

Example 27 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.

the class DruidSegmentReaderTest method testReaderAutoTimestampFormat.

@Test
public void testReaderAutoTimestampFormat() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "auto", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 28 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.

the class DruidSegmentReaderTest method testReaderTimestampFromDouble.

@Test
public void testReaderTimestampFromDouble() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("d", "posix", null), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("1970-01-01T00:00:01.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("1970-01-01T00:00:04.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 29 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.

the class IncrementalIndexTest method constructorFeeder.

@Parameterized.Parameters(name = "{index}: {0}, {1}, deserialize={2}")
public static Collection<?> constructorFeeder() {
    DimensionsSpec dimensions = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("string"), new FloatDimensionSchema("float"), new LongDimensionSchema("long"), new DoubleDimensionSchema("double")));
    AggregatorFactory[] metrics = { new FilteredAggregatorFactory(new CountAggregatorFactory("cnt"), new SelectorDimFilter("billy", "A", null)) };
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withDimensionsSpec(dimensions).withMetrics(metrics).build();
    return IncrementalIndexCreator.indexTypeCartesianProduct(ImmutableList.of("rollup", "plain"), ImmutableList.of(true, false), ImmutableList.of(schema));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema)

Example 30 with DoubleDimensionSchema

use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.

the class CompactionTaskTest method setupClass.

@BeforeClass
public static void setupClass() {
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-01-01/2017-02-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-02-01/2017-03-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-03-01/2017-04-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-04-01/2017-05-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-05-01/2017-06-01"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-06-01/2017-07-01"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-06-01/2017-06-02"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-06-15/2017-06-16"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
    MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-06-30/2017-07-01"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
    DIMENSIONS = new HashMap<>();
    AGGREGATORS = new ArrayList<>();
    DIMENSIONS.put(ColumnHolder.TIME_COLUMN_NAME, new LongDimensionSchema(ColumnHolder.TIME_COLUMN_NAME));
    DIMENSIONS.put(TIMESTAMP_COLUMN, new LongDimensionSchema(TIMESTAMP_COLUMN));
    int numUmbrellaIntervals = 6;
    for (int i = 0; i < numUmbrellaIntervals; i++) {
        final StringDimensionSchema schema = new StringDimensionSchema("string_dim_" + i, null, null);
        DIMENSIONS.put(schema.getName(), schema);
    }
    for (int i = 0; i < numUmbrellaIntervals; i++) {
        final LongDimensionSchema schema = new LongDimensionSchema("long_dim_" + i);
        DIMENSIONS.put(schema.getName(), schema);
    }
    for (int i = 0; i < numUmbrellaIntervals; i++) {
        final FloatDimensionSchema schema = new FloatDimensionSchema("float_dim_" + i);
        DIMENSIONS.put(schema.getName(), schema);
    }
    for (int i = 0; i < numUmbrellaIntervals; i++) {
        final DoubleDimensionSchema schema = new DoubleDimensionSchema("double_dim_" + i);
        DIMENSIONS.put(schema.getName(), schema);
    }
    AGGREGATORS.add(new CountAggregatorFactory("agg_0"));
    AGGREGATORS.add(new LongSumAggregatorFactory("agg_1", "long_dim_1"));
    AGGREGATORS.add(new LongMaxAggregatorFactory("agg_2", "long_dim_2"));
    AGGREGATORS.add(new FloatFirstAggregatorFactory("agg_3", "float_dim_3", null));
    AGGREGATORS.add(new DoubleLastAggregatorFactory("agg_4", "double_dim_4", null));
    for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) {
        SEGMENT_MAP.put(new DataSegment(DATA_SOURCE, SEGMENT_INTERVALS.get(i), "version_" + i, ImmutableMap.of(), findDimensions(i, SEGMENT_INTERVALS.get(i)), AGGREGATORS.stream().map(AggregatorFactory::getName).collect(Collectors.toList()), new NumberedShardSpec(0, 1), 0, SEGMENT_SIZE_BYTES), new File("file_" + i));
    }
    SEGMENTS = new ArrayList<>(SEGMENT_MAP.keySet());
}
Also used : FloatFirstAggregatorFactory(org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) DataSegment(org.apache.druid.timeline.DataSegment) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) DoubleLastAggregatorFactory(org.apache.druid.query.aggregation.last.DoubleLastAggregatorFactory) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) File(java.io.File) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) BeforeClass(org.junit.BeforeClass)

Aggregations

DoubleDimensionSchema (org.apache.druid.data.input.impl.DoubleDimensionSchema)44 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)38 Test (org.junit.Test)34 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)26 FloatDimensionSchema (org.apache.druid.data.input.impl.FloatDimensionSchema)20 LongDimensionSchema (org.apache.druid.data.input.impl.LongDimensionSchema)20 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)20 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)18 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)18 NullHandlingTest (org.apache.druid.common.config.NullHandlingTest)16 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)16 StringDimensionSchema (org.apache.druid.data.input.impl.StringDimensionSchema)14 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)12 HyperUniquesAggregatorFactory (org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory)12 InputRow (org.apache.druid.data.input.InputRow)10 LongMaxAggregatorFactory (org.apache.druid.query.aggregation.LongMaxAggregatorFactory)8 FloatFirstAggregatorFactory (org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory)8 DoubleLastAggregatorFactory (org.apache.druid.query.aggregation.last.DoubleLastAggregatorFactory)8 DimensionSchema (org.apache.druid.data.input.impl.DimensionSchema)6 PartitionConfigurationManager (org.apache.druid.indexing.common.task.CompactionTask.PartitionConfigurationManager)6