use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class DruidSegmentReaderTest method testReaderAutoTimestampFormat.
@Test
public void testReaderAutoTimestampFormat() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "auto", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class DimensionHandlerUtilsTest method testGetHandlerFromDoubleCapabilities.
@Test
public void testGetHandlerFromDoubleCapabilities() {
ColumnCapabilities capabilities = ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.DOUBLE);
DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(DIM_NAME, capabilities, null);
Assert.assertTrue(handler instanceof DoubleDimensionHandler);
Assert.assertTrue(handler.getDimensionSchema(capabilities) instanceof DoubleDimensionSchema);
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by apache.
the class QueryableIndexColumnCapabilitiesTest method setup.
@BeforeClass
public static void setup() throws IOException {
MapInputRowParser parser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>builder().addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1", "d2"))).add(new DoubleDimensionSchema("d3")).add(new FloatDimensionSchema("d4")).add(new LongDimensionSchema("d5")).build())));
AggregatorFactory[] metricsSpecs = new AggregatorFactory[] { new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "d3"), new FloatSumAggregatorFactory("m2", "d4"), new LongSumAggregatorFactory("m3", "d5"), new HyperUniquesAggregatorFactory("m4", "d1") };
List<InputRow> rows = new ArrayList<>();
Map<String, Object> event = ImmutableMap.<String, Object>builder().put("time", DateTimes.nowUtc().getMillis()).put("d1", "some string").put("d2", ImmutableList.of("some", "list")).put("d3", 1.234).put("d4", 1.234f).put("d5", 10L).build();
rows.add(Iterables.getOnlyElement(parser.parseBatch(event)));
IndexBuilder builder = IndexBuilder.create().rows(rows).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
INC_INDEX = builder.buildIncrementalIndex();
MMAP_INDEX = builder.buildMMappedIndex();
List<InputRow> rowsWithNulls = new ArrayList<>();
rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(event)));
Map<String, Object> eventWithNulls = new HashMap<>();
eventWithNulls.put("time", DateTimes.nowUtc().getMillis());
eventWithNulls.put("d1", null);
eventWithNulls.put("d2", ImmutableList.of());
eventWithNulls.put("d3", null);
eventWithNulls.put("d4", null);
eventWithNulls.put("d5", null);
rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(eventWithNulls)));
IndexBuilder builderWithNulls = IndexBuilder.create().rows(rowsWithNulls).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
INC_INDEX_WITH_NULLS = builderWithNulls.buildIncrementalIndex();
MMAP_INDEX_WITH_NULLS = builderWithNulls.buildMMappedIndex();
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderWithInclusiveColumnsFilterNoTimestamp.
@Test
public void testReaderWithInclusiveColumnsFilterNoTimestamp() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.inclusionBased(ImmutableSet.of("s", "d")), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "foo").put("d", 1.23d).build()), new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "bar").put("d", 4.56d).build())), readRows(reader));
}
use of org.apache.druid.data.input.impl.DoubleDimensionSchema in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderWithFilter.
@Test
public void testReaderWithFilter() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), new SelectorDimFilter("d", "1.23", null), temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build())), readRows(reader));
}
Aggregations