use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class FiniteAppenderatorDriverTest method testMaxRowsPerSegment.
@Test
public void testMaxRowsPerSegment() throws Exception {
final int numSegments = 3;
final TestCommitterSupplier<Integer> committerSupplier = new TestCommitterSupplier<>();
Assert.assertNull(driver.startJob());
for (int i = 0; i < numSegments * MAX_ROWS_PER_SEGMENT; i++) {
committerSupplier.setMetadata(i + 1);
InputRow row = new MapBasedInputRow(new DateTime("2000T01"), ImmutableList.of("dim2"), ImmutableMap.<String, Object>of("dim2", String.format("bar-%d", i), "met1", 2.0));
Assert.assertNotNull(driver.add(row, "dummy", committerSupplier));
}
final SegmentsAndMetadata segmentsAndMetadata = driver.finish(makeOkPublisher(), committerSupplier.get());
Assert.assertEquals(numSegments, segmentsAndMetadata.getSegments().size());
Assert.assertEquals(numSegments * MAX_ROWS_PER_SEGMENT, segmentsAndMetadata.getCommitMetadata());
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class SingleDimensionShardSpecTest method testIsInChunk.
@Test
public void testIsInChunk() throws Exception {
Map<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> tests = ImmutableMap.<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>>builder().put(makeSpec(null, null), makeList(true, null, true, "a", true, "h", true, "p", true, "y")).put(makeSpec(null, "m"), makeList(true, null, true, "a", true, "h", false, "p", false, "y")).put(makeSpec("a", "h"), makeList(false, null, true, "a", false, "h", false, "p", false, "y")).put(makeSpec("d", "u"), makeList(false, null, false, "a", true, "h", true, "p", false, "y")).put(makeSpec("h", null), makeList(false, null, false, "a", true, "h", true, "p", true, "y")).build();
for (Map.Entry<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> entry : tests.entrySet()) {
SingleDimensionShardSpec spec = entry.getKey();
for (Pair<Boolean, Map<String, String>> pair : entry.getValue()) {
final InputRow inputRow = new MapBasedInputRow(0, ImmutableList.of("billy"), Maps.transformValues(pair.rhs, new Function<String, Object>() {
@Override
public Object apply(String input) {
return input;
}
}));
Assert.assertEquals(String.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow.getTimestampFromEpoch(), inputRow));
}
}
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class HashBasedNumberedShardSpecTest method testGetGroupKey.
@Test
public void testGetGroupKey() throws Exception {
final HashBasedNumberedShardSpec shardSpec1 = new HashBasedNumberedShardSpec(1, 2, ImmutableList.of("visitor_id"), TestUtil.MAPPER);
final DateTime time = new DateTime();
final InputRow inputRow = new MapBasedInputRow(time, ImmutableList.of("visitor_id", "cnt"), ImmutableMap.<String, Object>of("visitor_id", "v1", "cnt", 10));
Assert.assertEquals(ImmutableList.of(Lists.newArrayList("v1")), shardSpec1.getGroupKey(time.getMillis(), inputRow));
final HashBasedNumberedShardSpec shardSpec2 = new HashBasedNumberedShardSpec(1, 2, null, TestUtil.MAPPER);
Assert.assertEquals(ImmutableList.of(time.getMillis(), ImmutableMap.of("cnt", Lists.newArrayList(10), "visitor_id", Lists.newArrayList("v1"))).toString(), shardSpec2.getGroupKey(time.getMillis(), inputRow).toString());
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class BenchmarkDataGenerator method nextRow.
public InputRow nextRow() {
Map<String, Object> event = new HashMap<>();
for (BenchmarkColumnValueGenerator generator : columnGenerators) {
event.put(generator.getSchema().getName(), generator.generateRowValue());
}
MapBasedInputRow row = new MapBasedInputRow(nextTimestamp(), dimensionNames, event);
return row;
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class OrcHadoopInputRowParser method parse.
@Override
public InputRow parse(OrcStruct input) {
Map<String, Object> map = Maps.newHashMap();
List<? extends StructField> fields = oip.getAllStructFieldRefs();
for (StructField field : fields) {
ObjectInspector objectInspector = field.getFieldObjectInspector();
switch(objectInspector.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) objectInspector;
map.put(field.getFieldName(), primitiveObjectInspector.getPrimitiveJavaObject(oip.getStructFieldData(input, field)));
break;
case // array case - only 1-depth array supported yet
LIST:
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
map.put(field.getFieldName(), getListObject(listObjectInspector, oip.getStructFieldData(input, field)));
break;
default:
break;
}
}
TimestampSpec timestampSpec = parseSpec.getTimestampSpec();
DateTime dateTime = timestampSpec.extractTimestamp(map);
return new MapBasedInputRow(dateTime, dimensions, map);
}
Aggregations