Search in sources :

Example 1 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class SegmentMetadataQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dummy"), QuerySegmentSpecs.create("2015-01-01/2015-01-02"), null, null, null, null, false, false);
    CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(query);
    // Test cache key generation
    byte[] expectedKey = { 0x04, 0x01, (byte) 0xFF, 0x00, 0x02, 0x04 };
    byte[] actualKey = strategy.computeCacheKey(query);
    Assert.assertArrayEquals(expectedKey, actualKey);
    SegmentAnalysis result = new SegmentAnalysis("testSegment", ImmutableList.of(new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), true, 10881, 1, "preferred", "preferred", null)), 71982, 100, null, null, null, null);
    Object preparedValue = strategy.prepareForCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    SegmentAnalysis fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    SegmentAnalysis fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : TableDataSource(io.druid.query.TableDataSource) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 2 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class SearchQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> strategy = new SearchQueryQueryToolChest(null, null).getCacheStrategy(new SearchQuery(new TableDataSource("dummy"), null, Granularities.ALL, 1, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, null));
    final Result<SearchResultValue> result = new Result<>(new DateTime(123L), new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))));
    Object preparedValue = strategy.prepareForCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<SearchResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : SearchQuery(io.druid.query.search.search.SearchQuery) FragmentSearchQuerySpec(io.druid.query.search.search.FragmentSearchQuerySpec) SearchHit(io.druid.query.search.search.SearchHit) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) TableDataSource(io.druid.query.TableDataSource) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 3 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class SelectQuerySpecTest method testSerializationLegacyString.

@Test
public void testSerializationLegacyString() throws Exception {
    String legacy = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":[\"market\",\"quality\"]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":null," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3}," + "\"context\":null}";
    String current = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":" + "[{\"type\":\"default\",\"dimension\":\"market\",\"outputName\":\"market\",\"outputType\":\"STRING\"}," + "{\"type\":\"default\",\"dimension\":\"quality\",\"outputName\":\"quality\",\"outputType\":\"STRING\"}]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":[]," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3,\"fromNext\":true}," + "\"context\":null}";
    SelectQuery query = new SelectQuery(new TableDataSource(QueryRunnerTestHelper.dataSource), new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, DefaultDimensionSpec.toSpec(Arrays.<String>asList("market", "quality")), Arrays.<String>asList("index"), null, new PagingSpec(null, 3, null), null);
    String actual = objectMapper.writeValueAsString(query);
    Assert.assertEquals(current, actual);
    Assert.assertEquals(query, objectMapper.readValue(actual, SelectQuery.class));
    Assert.assertEquals(query, objectMapper.readValue(legacy, SelectQuery.class));
}
Also used : TableDataSource(io.druid.query.TableDataSource) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 4 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class TimeBoundaryQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> strategy = new TimeBoundaryQueryQueryToolChest().getCacheStrategy(new TimeBoundaryQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, null, null));
    final Result<TimeBoundaryResultValue> result = new Result<>(new DateTime(123L), new TimeBoundaryResultValue(ImmutableMap.of(TimeBoundaryQuery.MIN_TIME, new DateTime(0L).toString(), TimeBoundaryQuery.MAX_TIME, new DateTime("2015-01-01").toString())));
    Object preparedValue = strategy.prepareForCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<TimeBoundaryResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) TableDataSource(io.druid.query.TableDataSource) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 5 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class TopNQueryQueryToolChestTest method testComputeCacheKeyWithDifferentPostAgg.

@Test
public void testComputeCacheKeyWithDifferentPostAgg() throws Exception {
    final TopNQuery query1 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("post", 10)), null);
    final TopNQuery query2 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")), ImmutableList.<PostAggregator>of(new ArithmeticPostAggregator("post", "+", ImmutableList.<PostAggregator>of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric1")))), null);
    final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy1 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query1);
    final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy2 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query2);
    Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
}
Also used : ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) Result(io.druid.query.Result) TableDataSource(io.druid.query.TableDataSource) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

TableDataSource (io.druid.query.TableDataSource)25 Interval (org.joda.time.Interval)18 Test (org.junit.Test)17 Result (io.druid.query.Result)8 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 TimelineObjectHolder (io.druid.timeline.TimelineObjectHolder)7 DateTime (org.joda.time.DateTime)7 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)6 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)5 DataSegment (io.druid.timeline.DataSegment)5 Pair (io.druid.java.util.common.Pair)4 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)4 TimelineLookup (io.druid.timeline.TimelineLookup)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 Function (com.google.common.base.Function)3 ImmutableMap (com.google.common.collect.ImmutableMap)3 Query (io.druid.query.Query)3 QueryRunner (io.druid.query.QueryRunner)3 SegmentDescriptor (io.druid.query.SegmentDescriptor)3 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)3