Search in sources :

Example 31 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class TopNQueryQueryToolChestTest method doTestCacheStrategy.

private void doTestCacheStrategy(final ColumnType valueType, final Object dimValue) throws IOException {
    CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy = new TopNQueryQueryToolChest(null, null).getCacheStrategy(new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test", valueType), new NumericTopNMetricSpec("metric1"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1"), getComplexAggregatorFactoryForValueType(valueType.getType())), ImmutableList.of(new ConstantPostAggregator("post", 10)), null));
    final Result<TopNResultValue> result1 = new Result<>(// test timestamps that result in integer size millis
    DateTimes.utc(123L), new TopNResultValue(Collections.singletonList(ImmutableMap.of("test", dimValue, "metric1", 2, "complexMetric", getIntermediateComplexValue(valueType.getType(), dimValue)))));
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
    ObjectMapper objectMapper = TestHelper.makeJsonMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<TopNResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result1, fromCacheResult);
    final Result<TopNResultValue> result2 = new Result<>(// test timestamps that result in integer size millis
    DateTimes.utc(123L), new TopNResultValue(Collections.singletonList(ImmutableMap.of("test", dimValue, "metric1", 2, "complexMetric", dimValue, "post", 10))));
    // Please see the comments on aggregator serde and type handling in CacheStrategy.fetchAggregatorsFromCache()
    final Result<TopNResultValue> typeAdjustedResult2;
    if (valueType.is(ValueType.FLOAT)) {
        typeAdjustedResult2 = new Result<>(DateTimes.utc(123L), new TopNResultValue(Collections.singletonList(ImmutableMap.of("test", dimValue, "metric1", 2, "complexMetric", 2.1d, "post", 10))));
    } else if (valueType.is(ValueType.LONG)) {
        typeAdjustedResult2 = new Result<>(DateTimes.utc(123L), new TopNResultValue(Collections.singletonList(ImmutableMap.of("test", dimValue, "metric1", 2, "complexMetric", 2, "post", 10))));
    } else {
        typeAdjustedResult2 = result2;
    }
    Object preparedResultCacheValue = strategy.prepareForCache(true).apply(result2);
    Object fromResultCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultCacheValue), strategy.getCacheObjectClazz());
    Result<TopNResultValue> fromResultCacheResult = strategy.pullFromCache(true).apply(fromResultCacheValue);
    Assert.assertEquals(typeAdjustedResult2, fromResultCacheResult);
}
Also used : ConstantPostAggregator(org.apache.druid.query.aggregation.post.ConstantPostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Result(org.apache.druid.query.Result) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 32 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class LoggingRequestLogger method logNativeQuery.

@Override
public void logNativeQuery(RequestLogLine requestLogLine) throws IOException {
    final Map mdc = MDC.getCopyOfContextMap();
    // MDC must be set during the `LOG.info` call at the end of the try block.
    try {
        if (setMDC) {
            try {
                final Query query = requestLogLine.getQuery();
                MDC.put("queryId", query.getId());
                MDC.put(BaseQuery.SQL_QUERY_ID, StringUtils.nullToEmptyNonDruidDataString(query.getSqlQueryId()));
                MDC.put("dataSource", String.join(",", query.getDataSource().getTableNames()));
                MDC.put("queryType", query.getType());
                MDC.put("isNested", String.valueOf(!(query.getDataSource() instanceof TableDataSource)));
                MDC.put("hasFilters", Boolean.toString(query.hasFilters()));
                MDC.put("remoteAddr", requestLogLine.getRemoteAddr());
                MDC.put("duration", query.getDuration().toString());
                MDC.put("descending", Boolean.toString(query.isDescending()));
                if (setContextMDC) {
                    final Iterable<Map.Entry<String, Object>> entries = query.getContext() == null ? ImmutableList.of() : query.getContext().entrySet();
                    for (Map.Entry<String, Object> entry : entries) {
                        MDC.put(entry.getKey(), entry.getValue() == null ? "NULL" : entry.getValue().toString());
                    }
                }
            } catch (RuntimeException re) {
                LOG.error(re, "Error preparing MDC");
            }
        }
        final String line = requestLogLine.getNativeQueryLine(mapper);
        // MDC must be set here
        LOG.info("%s", line);
    } finally {
        if (setMDC) {
            if (mdc != null) {
                MDC.setContextMap(mdc);
            } else {
                MDC.clear();
            }
        }
    }
}
Also used : Query(org.apache.druid.query.Query) BaseQuery(org.apache.druid.query.BaseQuery) TableDataSource(org.apache.druid.query.TableDataSource) Map(java.util.Map)

Example 33 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class TimeBoundaryQueryRunnerTest method testMergeResults.

@Test
public void testMergeResults() {
    List<Result<TimeBoundaryResultValue>> results = Arrays.asList(new Result<>(DateTimes.nowUtc(), new TimeBoundaryResultValue(ImmutableMap.of("maxTime", "2012-01-01", "minTime", "2011-01-01"))), new Result<>(DateTimes.nowUtc(), new TimeBoundaryResultValue(ImmutableMap.of("maxTime", "2012-02-01", "minTime", "2011-01-01"))));
    TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null);
    Iterable<Result<TimeBoundaryResultValue>> actual = query.mergeResults(results);
    Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(DateTimes.of("2012-02-01")));
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Example 34 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class TimeBoundaryQueryRunnerTest method testMergeResultsEmptyResults.

@Test
public void testMergeResultsEmptyResults() {
    List<Result<TimeBoundaryResultValue>> results = new ArrayList<>();
    TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null);
    Iterable<Result<TimeBoundaryResultValue>> actual = query.mergeResults(results);
    Assert.assertFalse(actual.iterator().hasNext());
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) ArrayList(java.util.ArrayList) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Example 35 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ScanQuerySpecTest method testSerialization.

@Test
public void testSerialization() throws Exception {
    String legacy = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"limit\":3," + "\"context\":null}";
    String current = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
    ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, ScanQuery.Order.NONE, null, null, Arrays.asList("market", "quality", "index"), null, null);
    String actual = JSON_MAPPER.writeValueAsString(query);
    Assert.assertEquals(current, actual);
    Assert.assertEquals(query, JSON_MAPPER.readValue(actual, ScanQuery.class));
    Assert.assertEquals(query, JSON_MAPPER.readValue(legacy, ScanQuery.class));
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Aggregations

TableDataSource (org.apache.druid.query.TableDataSource)118 Test (org.junit.Test)94 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)43 QueryDataSource (org.apache.druid.query.QueryDataSource)41 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)40 Parameters (junitparams.Parameters)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)19 LookupDataSource (org.apache.druid.query.LookupDataSource)18 DataSegment (org.apache.druid.timeline.DataSegment)15 Result (org.apache.druid.query.Result)14 CountDownLatch (java.util.concurrent.CountDownLatch)11 Query (org.apache.druid.query.Query)11 TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)11 Interval (org.joda.time.Interval)11 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)10 ArrayList (java.util.ArrayList)9 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)9 ISE (org.apache.druid.java.util.common.ISE)8 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)8