use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class MaterializedViewQueryQueryToolChestTest method testDecorateObjectMapper.
@Test
public void testDecorateObjectMapper() throws IOException {
GroupByQuery realQuery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)).build();
QueryToolChest queryToolChest = new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse(ImmutableMap.<Class<? extends Query>, QueryToolChest>builder().put(GroupByQuery.class, new GroupByQueryQueryToolChest(null)).build()));
ObjectMapper objectMapper = queryToolChest.decorateObjectMapper(JSON_MAPPER, realQuery);
List<ResultRow> results = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(realQuery, "2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L), GroupByQueryRunnerTestHelper.createExpectedRow(realQuery, "2011-04-01", "alias", "business", "rows", 1L, "idx", 118L));
List<MapBasedRow> expectedResults = results.stream().map(resultRow -> resultRow.toMapBasedRow(realQuery)).collect(Collectors.toList());
Assert.assertEquals("decorate-object-mapper", JSON_MAPPER.writerFor(new TypeReference<List<MapBasedRow>>() {
}).writeValueAsString(expectedResults), objectMapper.writeValueAsString(results));
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class MovingAverageHelper method getDimKeyFromRow.
/**
* @param dimensions A list of DimensionSpec in the specified in the query
* @param row The Row to be used for looking up dimension values
*
* @return A Map of dimension/value from the row
*/
public static Map<String, Object> getDimKeyFromRow(Collection<DimensionSpec> dimensions, Row row) {
Map<String, Object> key = new HashMap<>();
Map<String, Object> event = ((MapBasedRow) row).getEvent();
for (DimensionSpec dimension : dimensions) {
key.put(dimension.getOutputName(), event.get(dimension.getOutputName()));
}
return key;
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class MovingAverageQueryTest method testQuery.
/**
* Validate that the specified query behaves correctly.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testQuery() throws IOException {
Query<?> query = jsonMapper.readValue(getQueryString(), Query.class);
Assert.assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType()));
List<MapBasedRow> expectedResults = jsonMapper.readValue(getExpectedResultString(), getExpectedResultType());
Assert.assertNotNull(expectedResults);
Assert.assertThat(expectedResults, IsInstanceOf.instanceOf(List.class));
CachingClusteredClient baseClient = new CachingClusteredClient(warehouse, new TimelineServerView() {
@Override
public Optional<? extends TimelineLookup<String, ServerSelector>> getTimeline(DataSourceAnalysis analysis) {
return Optional.empty();
}
@Override
public List<ImmutableDruidServer> getDruidServers() {
return null;
}
@Override
public <T> QueryRunner<T> getQueryRunner(DruidServer server) {
return null;
}
@Override
public void registerTimelineCallback(Executor exec, TimelineCallback callback) {
}
@Override
public void registerSegmentCallback(Executor exec, SegmentCallback callback) {
}
@Override
public void registerServerRemovedCallback(Executor exec, ServerRemovedCallback callback) {
}
}, MapCache.create(100000), jsonMapper, new ForegroundCachePopulator(jsonMapper, new CachePopulatorStats(), -1), new CacheConfig(), new DruidHttpClientConfig() {
@Override
public long getMaxQueuedBytes() {
return 0L;
}
}, new DruidProcessingConfig() {
@Override
public String getFormatString() {
return null;
}
}, ForkJoinPool.commonPool(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), new NoopServiceEmitter());
ClientQuerySegmentWalker walker = new ClientQuerySegmentWalker(new ServiceEmitter("", "", null) {
@Override
public void emit(Event event) {
}
}, baseClient, null, /* local client; unused in this test, so pass in null */
warehouse, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), retryConfig, jsonMapper, serverConfig, null, new CacheConfig());
defineMocks();
QueryPlus queryPlus = QueryPlus.wrap(query);
final Sequence<?> res = query.getRunner(walker).run(queryPlus);
List actualResults = new ArrayList();
actualResults = (List<MapBasedRow>) res.accumulate(actualResults, Accumulators.list());
expectedResults = consistentTypeCasting(expectedResults);
actualResults = consistentTypeCasting(actualResults);
Assert.assertEquals(expectedResults, actualResults);
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class MovingAverageIterableTest method testCompleteData.
@Test
public void testCompleteData() {
Map<String, Object> event1 = new HashMap<>();
Map<String, Object> event2 = new HashMap<>();
Map<String, Object> event3 = new HashMap<>();
event1.put("gender", "m");
event1.put("pageViews", 10L);
event2.put("gender", "f");
event2.put("pageViews", 20L);
event3.put("gender", "u");
event3.put("pageViews", 30L);
List<DimensionSpec> ds = new ArrayList<>();
ds.add(new DefaultDimensionSpec("gender", "gender"));
Row jan1Row1 = new MapBasedRow(JAN_1, event1);
Row jan1Row2 = new MapBasedRow(JAN_1, event2);
Row jan1Row3 = new MapBasedRow(JAN_1, event3);
Row jan2Row1 = new MapBasedRow(JAN_2, event1);
Row jan2Row2 = new MapBasedRow(JAN_2, event2);
Row jan2Row3 = new MapBasedRow(JAN_2, event3);
Sequence<RowBucket> seq = Sequences.simple(Arrays.asList(new RowBucket(JAN_1, Arrays.asList(jan1Row1, jan1Row2, jan1Row3)), new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3))));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), Collections.emptyList(), Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))).iterator();
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertFalse(iter.hasNext());
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class MovingAverageIterableTest method testMissingDataAtTheEnd.
// test injection when the data is missing at the end
@Test
public void testMissingDataAtTheEnd() {
Map<String, Object> event1 = new HashMap<>();
Map<String, Object> event2 = new HashMap<>();
Map<String, Object> event3 = new HashMap<>();
event1.put("gender", "m");
event1.put("pageViews", 10L);
event2.put("gender", "f");
event2.put("pageViews", 20L);
event3.put("gender", "u");
event3.put("pageViews", 30L);
List<DimensionSpec> ds = new ArrayList<>();
ds.add(new DefaultDimensionSpec("gender", "gender"));
Row jan1Row1 = new MapBasedRow(JAN_1, event1);
Row jan1Row2 = new MapBasedRow(JAN_1, event2);
Row jan1Row3 = new MapBasedRow(JAN_1, event3);
Row jan2Row1 = new MapBasedRow(JAN_2, event1);
Sequence<RowBucket> seq = Sequences.simple(Arrays.asList(new RowBucket(JAN_1, Arrays.asList(jan1Row1, jan1Row2, jan1Row3)), new RowBucket(JAN_2, Collections.singletonList(jan2Row1))));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), Collections.emptyList(), Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))).iterator();
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertTrue(iter.hasNext());
result = iter.next();
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertFalse(iter.hasNext());
}
Aggregations