use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DoublesSketchComplexMetricSerdeTest method testExtractorOnDecimalNumber.
@Test
public void testExtractorOnDecimalNumber() {
final DoublesSketchComplexMetricSerde serde = new DoublesSketchComplexMetricSerde();
final ComplexMetricExtractor extractor = serde.getExtractor();
final DoublesSketch sketch = (DoublesSketch) extractor.extractValue(new MapBasedInputRow(0L, ImmutableList.of(), ImmutableMap.of("foo", "3.1")), "foo");
Assert.assertEquals(1, sketch.getRetainedItems());
Assert.assertEquals(3.1d, sketch.getMaxValue(), 0.01d);
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DoublesSketchComplexMetricSerdeTest method testExtractorOnNegativeNumber.
@Test
public void testExtractorOnNegativeNumber() {
final DoublesSketchComplexMetricSerde serde = new DoublesSketchComplexMetricSerde();
final ComplexMetricExtractor extractor = serde.getExtractor();
final DoublesSketch sketch = (DoublesSketch) extractor.extractValue(new MapBasedInputRow(0L, ImmutableList.of(), ImmutableMap.of("foo", "-133")), "foo");
Assert.assertEquals(1, sketch.getRetainedItems());
Assert.assertEquals(-133d, sketch.getMaxValue(), 0.01d);
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DoublesSketchComplexMetricSerdeTest method testExtractorOnEmptyString.
@Test
public void testExtractorOnEmptyString() {
final DoublesSketchComplexMetricSerde serde = new DoublesSketchComplexMetricSerde();
final ComplexMetricExtractor extractor = serde.getExtractor();
final DoublesSketch sketch = (DoublesSketch) extractor.extractValue(new MapBasedInputRow(0L, ImmutableList.of(), ImmutableMap.of("foo", "")), "foo");
Assert.assertEquals(0, sketch.getRetainedItems());
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class KafkaInputFormatTest method testTimestampFromHeader.
@Test
public void testTimestampFromHeader() throws IOException {
Iterable<Header> sample_header_with_ts = Iterables.unmodifiableIterable(Iterables.concat(SAMPLE_HEADERS, ImmutableList.of(new Header() {
@Override
public String key() {
return "headerTs";
}
@Override
public byte[] value() {
return "2021-06-24".getBytes(StandardCharsets.UTF_8);
}
})));
final byte[] key = StringUtils.toUtf8("{\n" + " \"key\": \"sampleKey\"\n" + "}");
final byte[] payload = StringUtils.toUtf8("{\n" + " \"timestamp\": \"2021-06-24\",\n" + " \"bar\": null,\n" + " \"foo\": \"x\",\n" + " \"baz\": 4,\n" + " \"o\": {\n" + " \"mg\": 1\n" + " }\n" + "}");
Headers headers = new RecordHeaders(sample_header_with_ts);
inputEntity = new KafkaRecordEntity(new ConsumerRecord<byte[], byte[]>("sample", 0, 0, timestamp, null, null, 0, 0, key, payload, headers));
final InputEntityReader reader = format.createReader(new InputRowSchema(new TimestampSpec("kafka.newheader.headerTs", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "foo", "kafka.newheader.encoding", "kafka.newheader.kafkapkc"))), ColumnsFilter.all()), inputEntity, null);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
final MapBasedInputRow mrow = (MapBasedInputRow) row;
// Payload verifications
Assert.assertEquals(DateTimes.of("2021-06-24"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
// Header verification
Assert.assertEquals("application/json", Iterables.getOnlyElement(row.getDimension("kafka.newheader.encoding")));
Assert.assertEquals("pkc-bar", Iterables.getOnlyElement(row.getDimension("kafka.newheader.kafkapkc")));
Assert.assertEquals(String.valueOf(DateTimes.of("2021-06-24").getMillis()), Iterables.getOnlyElement(row.getDimension("kafka.newts.timestamp")));
Assert.assertEquals("2021-06-24", Iterables.getOnlyElement(row.getDimension("kafka.newheader.headerTs")));
Assert.assertEquals("2021-06-24", Iterables.getOnlyElement(row.getDimension("timestamp")));
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class KafkaInputReader method buildRowsWithoutValuePayload.
private CloseableIterator<InputRow> buildRowsWithoutValuePayload(Map<String, Object> headerKeyList) {
HashSet<String> newDimensions = new HashSet<String>(headerKeyList.keySet());
InputRow row = new MapBasedInputRow(inputRowSchema.getTimestampSpec().extractTimestamp(headerKeyList), getFinalDimensionList(newDimensions), headerKeyList);
List<InputRow> rows = Collections.singletonList(row);
return CloseableIterators.withEmptyBaggage(rows.iterator());
}
Aggregations