use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testObjectColumnSelectorOnVaryingColumnSchema.
@Test
public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception {
IncrementalIndex index = indexCreator.createIndex();
index.add(new MapBasedInputRow(DateTimes.of("2014-09-01T00:00:00"), Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
index.add(new MapBasedInputRow(DateTimes.of("2014-09-01T01:00:00"), Lists.newArrayList("billy", "sally"), ImmutableMap.of("billy", "hip", "sally", "hop")));
try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {
@Override
public int getMaxIntermediateRows() {
return 5;
}
}), pool);
final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).addAggregator(new JavaScriptAggregatorFactory("fieldLength", Arrays.asList("sally", "billy"), "function(current, s, b) { return current + (s == null ? 0 : s.length) + (b == null ? 0 : b.length); }", "function() { return 0; }", "function(a,b) { return a + b; }", JavaScriptConfig.getEnabledInstance())).build(), new IncrementalIndexStorageAdapter(index));
final List<Row> results = rows.toList();
Assert.assertEquals(2, results.size());
MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L, "fieldLength", 2.0), row.getEvent());
row = (MapBasedRow) results.get(1);
Assert.assertEquals(ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1L, "fieldLength", 6.0), row.getEvent());
}
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testSanity.
@Test
public void testSanity() throws Exception {
IncrementalIndex index = indexCreator.createIndex();
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("sally"), ImmutableMap.of("sally", "bo")));
try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {
@Override
public int getMaxIntermediateRows() {
return 5;
}
}), pool);
final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).build(), new IncrementalIndexStorageAdapter(index));
final List<Row> results = rows.toList();
Assert.assertEquals(2, results.size());
MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
row = (MapBasedRow) results.get(1);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
}
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexTest method sameRow.
@Test
public void sameRow() throws IndexSizeExceededException {
MapBasedInputRow row = new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B"));
IncrementalIndex index = indexCreator.createIndex();
index.add(row);
index.add(row);
index.add(row);
Assert.assertEquals(1, index.size());
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexTest method controlTest.
@Test
public void controlTest() throws IndexSizeExceededException {
IncrementalIndex index = indexCreator.createIndex();
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "C", "joe", "B")));
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexTest method testDuplicateDimensions.
@Test(expected = ISE.class)
public void testDuplicateDimensions() throws IndexSizeExceededException {
IncrementalIndex index = indexCreator.createIndex();
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
}
Aggregations