use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DoublesSketchComplexMetricSerdeTest method testExtractorOnLeadingDecimalNumber.
@Test
public void testExtractorOnLeadingDecimalNumber() {
final DoublesSketchComplexMetricSerde serde = new DoublesSketchComplexMetricSerde();
final ComplexMetricExtractor extractor = serde.getExtractor();
final DoublesSketch sketch = (DoublesSketch) extractor.extractValue(new MapBasedInputRow(0L, ImmutableList.of(), ImmutableMap.of("foo", ".1")), "foo");
Assert.assertEquals(1, sketch.getRetainedItems());
Assert.assertEquals(0.1d, sketch.getMaxValue(), 0.01d);
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class ProtobufInputRowParser method parseBatch.
@Override
public List<InputRow> parseBatch(ByteBuffer input) {
if (parser == null) {
parser = parseSpec.makeParser();
}
Map<String, Object> record;
DateTime timestamp;
if (isFlatSpec) {
try {
DynamicMessage message = protobufBytesDecoder.parse(input);
record = CollectionUtils.mapKeys(message.getAllFields(), k -> k.getJsonName());
timestamp = this.timestampSpec.extractTimestamp(record);
} catch (Exception ex) {
throw new ParseException(null, ex, "Protobuf message could not be parsed");
}
} else {
try {
DynamicMessage message = protobufBytesDecoder.parse(input);
String json = JsonFormat.printer().print(message);
record = parser.parseToMap(json);
timestamp = this.timestampSpec.extractTimestamp(record);
} catch (InvalidProtocolBufferException e) {
throw new ParseException(null, e, "Protobuf message could not be parsed");
}
}
final List<String> dimensions;
if (!this.dimensions.isEmpty()) {
dimensions = this.dimensions;
} else {
dimensions = Lists.newArrayList(Sets.difference(record.keySet(), parseSpec.getDimensionsSpec().getDimensionExclusions()));
}
return ImmutableList.of(new MapBasedInputRow(timestamp, dimensions, record));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderWithInclusiveColumnsFilterNoTimestamp.
@Test
public void testReaderWithInclusiveColumnsFilterNoTimestamp() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.inclusionBased(ImmutableSet.of("s", "d")), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "foo").put("d", 1.23d).build()), new MapBasedInputRow(DateTimes.of("1971"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "bar").put("d", 4.56d).build())), readRows(reader));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderWithFilter.
@Test
public void testReaderWithFilter() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), new SelectorDimFilter("d", "1.23", null), temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build())), readRows(reader));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DruidSegmentReaderTest method setUp.
@Before
public void setUp() throws IOException {
// Write a segment with two rows in it, with columns: s (string), d (double), cnt (long), met_s (complex).
final IncrementalIndex incrementalIndex = IndexBuilder.create().schema(new IncrementalIndexSchema.Builder().withDimensionsSpec(new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d")))).withMetrics(new CountAggregatorFactory("cnt"), new HyperUniquesAggregatorFactory("met_s", "s")).withRollup(false).build()).rows(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "foo").put("d", 1.23).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("s", "bar").put("d", 4.56).build()))).buildIncrementalIndex();
segmentDirectory = temporaryFolder.newFolder();
try {
TestHelper.getTestIndexMergerV9(OnHeapMemorySegmentWriteOutMediumFactory.instance()).persist(incrementalIndex, segmentDirectory, new IndexSpec(), null);
} finally {
incrementalIndex.close();
}
}
Aggregations