Search in sources :

Example 16 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class MaterializedViewQueryQueryToolChestTest method testDecorateObjectMapper.

@Test
public void testDecorateObjectMapper() throws IOException {
    GroupByQuery realQuery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)).build();
    QueryToolChest queryToolChest = new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse(ImmutableMap.<Class<? extends Query>, QueryToolChest>builder().put(GroupByQuery.class, new GroupByQueryQueryToolChest(null)).build()));
    ObjectMapper objectMapper = queryToolChest.decorateObjectMapper(JSON_MAPPER, realQuery);
    List<ResultRow> results = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(realQuery, "2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L), GroupByQueryRunnerTestHelper.createExpectedRow(realQuery, "2011-04-01", "alias", "business", "rows", 1L, "idx", 118L));
    List<MapBasedRow> expectedResults = results.stream().map(resultRow -> resultRow.toMapBasedRow(realQuery)).collect(Collectors.toList());
    Assert.assertEquals("decorate-object-mapper", JSON_MAPPER.writerFor(new TypeReference<List<MapBasedRow>>() {
    }).writeValueAsString(expectedResults), objectMapper.writeValueAsString(results));
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) Arrays(java.util.Arrays) MapBasedRow(org.apache.druid.data.input.MapBasedRow) TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Druids(org.apache.druid.query.Druids) Query(org.apache.druid.query.Query) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) Map(java.util.Map) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) TypeReference(com.fasterxml.jackson.core.type.TypeReference) MetricManipulationFn(org.apache.druid.query.aggregation.MetricManipulationFn) DateTimes(org.apache.druid.java.util.common.DateTimes) Function(com.google.common.base.Function) ImmutableMap(com.google.common.collect.ImmutableMap) ResultRow(org.apache.druid.query.groupby.ResultRow) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) MapQueryToolChestWarehouse(org.apache.druid.query.MapQueryToolChestWarehouse) QueryToolChest(org.apache.druid.query.QueryToolChest) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Test(org.junit.Test) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) GroupByQueryRunnerTestHelper(org.apache.druid.query.groupby.GroupByQueryRunnerTestHelper) Result(org.apache.druid.query.Result) List(java.util.List) QueryRunnerTestHelper(org.apache.druid.query.QueryRunnerTestHelper) GroupByQueryQueryToolChest(org.apache.druid.query.groupby.GroupByQueryQueryToolChest) Assert(org.junit.Assert) Query(org.apache.druid.query.Query) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryToolChest(org.apache.druid.query.QueryToolChest) GroupByQueryQueryToolChest(org.apache.druid.query.groupby.GroupByQueryQueryToolChest) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) GroupByQueryQueryToolChest(org.apache.druid.query.groupby.GroupByQueryQueryToolChest) MapBasedRow(org.apache.druid.data.input.MapBasedRow) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) MapQueryToolChestWarehouse(org.apache.druid.query.MapQueryToolChestWarehouse) List(java.util.List) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 17 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class MovingAverageHelper method getDimKeyFromRow.

/**
 * @param dimensions A list of DimensionSpec in the specified in the query
 * @param row        The Row to be used for looking up dimension values
 *
 * @return A Map of dimension/value from the row
 */
public static Map<String, Object> getDimKeyFromRow(Collection<DimensionSpec> dimensions, Row row) {
    Map<String, Object> key = new HashMap<>();
    Map<String, Object> event = ((MapBasedRow) row).getEvent();
    for (DimensionSpec dimension : dimensions) {
        key.put(dimension.getOutputName(), event.get(dimension.getOutputName()));
    }
    return key;
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) DimensionSpec(org.apache.druid.query.dimension.DimensionSpec) HashMap(java.util.HashMap)

Example 18 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class MovingAverageQueryTest method testQuery.

/**
 * Validate that the specified query behaves correctly.
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testQuery() throws IOException {
    Query<?> query = jsonMapper.readValue(getQueryString(), Query.class);
    Assert.assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType()));
    List<MapBasedRow> expectedResults = jsonMapper.readValue(getExpectedResultString(), getExpectedResultType());
    Assert.assertNotNull(expectedResults);
    Assert.assertThat(expectedResults, IsInstanceOf.instanceOf(List.class));
    CachingClusteredClient baseClient = new CachingClusteredClient(warehouse, new TimelineServerView() {

        @Override
        public Optional<? extends TimelineLookup<String, ServerSelector>> getTimeline(DataSourceAnalysis analysis) {
            return Optional.empty();
        }

        @Override
        public List<ImmutableDruidServer> getDruidServers() {
            return null;
        }

        @Override
        public <T> QueryRunner<T> getQueryRunner(DruidServer server) {
            return null;
        }

        @Override
        public void registerTimelineCallback(Executor exec, TimelineCallback callback) {
        }

        @Override
        public void registerSegmentCallback(Executor exec, SegmentCallback callback) {
        }

        @Override
        public void registerServerRemovedCallback(Executor exec, ServerRemovedCallback callback) {
        }
    }, MapCache.create(100000), jsonMapper, new ForegroundCachePopulator(jsonMapper, new CachePopulatorStats(), -1), new CacheConfig(), new DruidHttpClientConfig() {

        @Override
        public long getMaxQueuedBytes() {
            return 0L;
        }
    }, new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return null;
        }
    }, ForkJoinPool.commonPool(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), new NoopServiceEmitter());
    ClientQuerySegmentWalker walker = new ClientQuerySegmentWalker(new ServiceEmitter("", "", null) {

        @Override
        public void emit(Event event) {
        }
    }, baseClient, null, /* local client; unused in this test, so pass in null */
    warehouse, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), retryConfig, jsonMapper, serverConfig, null, new CacheConfig());
    defineMocks();
    QueryPlus queryPlus = QueryPlus.wrap(query);
    final Sequence<?> res = query.getRunner(walker).run(queryPlus);
    List actualResults = new ArrayList();
    actualResults = (List<MapBasedRow>) res.accumulate(actualResults, Accumulators.list());
    expectedResults = consistentTypeCasting(expectedResults);
    actualResults = consistentTypeCasting(actualResults);
    Assert.assertEquals(expectedResults, actualResults);
}
Also used : ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) ArrayList(java.util.ArrayList) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) DruidHttpClientConfig(org.apache.druid.guice.http.DruidHttpClientConfig) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Executor(java.util.concurrent.Executor) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) List(java.util.List) ArrayList(java.util.ArrayList) TimelineServerView(org.apache.druid.client.TimelineServerView) CacheConfig(org.apache.druid.client.cache.CacheConfig) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) QueryPlus(org.apache.druid.query.QueryPlus) CachingClusteredClient(org.apache.druid.client.CachingClusteredClient) Optional(java.util.Optional) DruidServer(org.apache.druid.client.DruidServer) ImmutableDruidServer(org.apache.druid.client.ImmutableDruidServer) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) QueryRunner(org.apache.druid.query.QueryRunner) ClientQuerySegmentWalker(org.apache.druid.server.ClientQuerySegmentWalker) Event(org.apache.druid.java.util.emitter.core.Event) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) TimelineLookup(org.apache.druid.timeline.TimelineLookup) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 19 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class MovingAverageIterableTest method testCompleteData.

@Test
public void testCompleteData() {
    Map<String, Object> event1 = new HashMap<>();
    Map<String, Object> event2 = new HashMap<>();
    Map<String, Object> event3 = new HashMap<>();
    event1.put("gender", "m");
    event1.put("pageViews", 10L);
    event2.put("gender", "f");
    event2.put("pageViews", 20L);
    event3.put("gender", "u");
    event3.put("pageViews", 30L);
    List<DimensionSpec> ds = new ArrayList<>();
    ds.add(new DefaultDimensionSpec("gender", "gender"));
    Row jan1Row1 = new MapBasedRow(JAN_1, event1);
    Row jan1Row2 = new MapBasedRow(JAN_1, event2);
    Row jan1Row3 = new MapBasedRow(JAN_1, event3);
    Row jan2Row1 = new MapBasedRow(JAN_2, event1);
    Row jan2Row2 = new MapBasedRow(JAN_2, event2);
    Row jan2Row3 = new MapBasedRow(JAN_2, event3);
    Sequence<RowBucket> seq = Sequences.simple(Arrays.asList(new RowBucket(JAN_1, Arrays.asList(jan1Row1, jan1Row2, jan1Row3)), new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3))));
    Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), Collections.emptyList(), Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))).iterator();
    Assert.assertTrue(iter.hasNext());
    Row result = iter.next();
    Assert.assertEquals("m", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_1, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("f", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_1, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("u", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_1, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("m", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_2, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("f", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_2, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("u", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_2, (result.getTimestamp()));
    Assert.assertFalse(iter.hasNext());
}
Also used : DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) DimensionSpec(org.apache.druid.query.dimension.DimensionSpec) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) MapBasedRow(org.apache.druid.data.input.MapBasedRow) LongMeanAveragerFactory(org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 20 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class MovingAverageIterableTest method testMissingDataAtTheEnd.

// test injection when the data is missing at the end
@Test
public void testMissingDataAtTheEnd() {
    Map<String, Object> event1 = new HashMap<>();
    Map<String, Object> event2 = new HashMap<>();
    Map<String, Object> event3 = new HashMap<>();
    event1.put("gender", "m");
    event1.put("pageViews", 10L);
    event2.put("gender", "f");
    event2.put("pageViews", 20L);
    event3.put("gender", "u");
    event3.put("pageViews", 30L);
    List<DimensionSpec> ds = new ArrayList<>();
    ds.add(new DefaultDimensionSpec("gender", "gender"));
    Row jan1Row1 = new MapBasedRow(JAN_1, event1);
    Row jan1Row2 = new MapBasedRow(JAN_1, event2);
    Row jan1Row3 = new MapBasedRow(JAN_1, event3);
    Row jan2Row1 = new MapBasedRow(JAN_2, event1);
    Sequence<RowBucket> seq = Sequences.simple(Arrays.asList(new RowBucket(JAN_1, Arrays.asList(jan1Row1, jan1Row2, jan1Row3)), new RowBucket(JAN_2, Collections.singletonList(jan2Row1))));
    Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), Collections.emptyList(), Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))).iterator();
    Assert.assertTrue(iter.hasNext());
    Row result = iter.next();
    Assert.assertEquals("m", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_1, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("f", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_1, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("u", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_1, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("m", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_2, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("u", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_2, (result.getTimestamp()));
    Assert.assertTrue(iter.hasNext());
    result = iter.next();
    Assert.assertEquals("f", (result.getDimension("gender")).get(0));
    Assert.assertEquals(JAN_2, (result.getTimestamp()));
    Assert.assertFalse(iter.hasNext());
}
Also used : DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) DimensionSpec(org.apache.druid.query.dimension.DimensionSpec) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) MapBasedRow(org.apache.druid.data.input.MapBasedRow) LongMeanAveragerFactory(org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

MapBasedRow (org.apache.druid.data.input.MapBasedRow)65 Test (org.junit.Test)50 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)36 ArrayList (java.util.ArrayList)21 Row (org.apache.druid.data.input.Row)16 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)16 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)16 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)15 HashMap (java.util.HashMap)13 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)12 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)10 List (java.util.List)9 ResultRow (org.apache.druid.query.groupby.ResultRow)9 LongMeanAveragerFactory (org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory)9 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)8 File (java.io.File)7 ByteBuffer (java.nio.ByteBuffer)6 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)6 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)6 IOException (java.io.IOException)5