use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderTimestampAsPosixIncorrectly.
@Test
public void testReaderTimestampAsPosixIncorrectly() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "posix", null), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("31969-04-01T00:00:00.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("31969-05-12T16:00:00.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DruidSegmentReaderTest method testReaderWithInclusiveColumnsFilter.
@Test
public void testReaderWithInclusiveColumnsFilter() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.inclusionBased(ImmutableSet.of("__time", "s", "d")), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).build())), readRows(reader));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DruidSegmentReaderTest method testReader.
@Test
public void testReader() throws IOException {
final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class SingleDimensionShardSpecTest method testIsInChunk.
@Test
public void testIsInChunk() {
Map<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> tests = ImmutableMap.<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>>builder().put(makeSpec(null, null), makeListOfPairs(true, null, true, "a", true, "h", true, "p", true, "y")).put(makeSpec(null, "m"), makeListOfPairs(true, null, true, "a", true, "h", false, "p", false, "y")).put(makeSpec("a", "h"), makeListOfPairs(false, null, true, "a", false, "h", false, "p", false, "y")).put(makeSpec("d", "u"), makeListOfPairs(false, null, false, "a", true, "h", true, "p", false, "y")).put(makeSpec("h", null), makeListOfPairs(false, null, false, "a", true, "h", true, "p", true, "y")).build();
for (Map.Entry<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> entry : tests.entrySet()) {
SingleDimensionShardSpec spec = entry.getKey();
for (Pair<Boolean, Map<String, String>> pair : entry.getValue()) {
final InputRow inputRow = new MapBasedInputRow(0, ImmutableList.of("billy"), Maps.transformValues(pair.rhs, input -> input));
Assert.assertEquals(StringUtils.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow));
}
}
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class ThriftInputRowParser method parseBatch.
@Override
public List<InputRow> parseBatch(Object input) {
if (parser == null) {
// parser should be created when it is really used to avoid unnecessary initialization of the underlying
// parseSpec.
parser = parseSpec.makeParser();
}
// Place it this initialization in constructor will get ClassNotFoundException
try {
if (thriftClass == null) {
thriftClass = getThriftClass();
}
} catch (IOException e) {
throw new IAE(e, "failed to load jar [%s]", jarPath);
} catch (ClassNotFoundException e) {
throw new IAE(e, "class [%s] not found in jar", thriftClassName);
} catch (InstantiationException | IllegalAccessException e) {
throw new IAE(e, "instantiation thrift instance failed");
}
final String json;
try {
if (input instanceof ByteBuffer) {
// realtime stream
final byte[] bytes = ((ByteBuffer) input).array();
TBase o = thriftClass.newInstance();
ThriftDeserialization.detectAndDeserialize(bytes, o);
json = ThriftDeserialization.SERIALIZER_SIMPLE_JSON.get().toString(o);
} else if (input instanceof BytesWritable) {
// sequence file
final byte[] bytes = ((BytesWritable) input).getBytes();
TBase o = thriftClass.newInstance();
ThriftDeserialization.detectAndDeserialize(bytes, o);
json = ThriftDeserialization.SERIALIZER_SIMPLE_JSON.get().toString(o);
} else if (input instanceof ThriftWritable) {
// LzoBlockThrift file
TBase o = (TBase) ((ThriftWritable) input).get();
json = ThriftDeserialization.SERIALIZER_SIMPLE_JSON.get().toString(o);
} else {
throw new IAE("unsupport input class of [%s]", input.getClass());
}
} catch (IllegalAccessException | InstantiationException | TException e) {
throw new IAE("some thing wrong with your thrift?");
}
Map<String, Object> record = parser.parseToMap(json);
final List<String> dimensions;
if (!this.dimensions.isEmpty()) {
dimensions = this.dimensions;
} else {
dimensions = Lists.newArrayList(Sets.difference(record.keySet(), parseSpec.getDimensionsSpec().getDimensionExclusions()));
}
return ImmutableList.of(new MapBasedInputRow(parseSpec.getTimestampSpec().extractTimestamp(record), dimensions, record));
}
Aggregations