Search in sources :

Example 71 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class SegmentMetadataQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dummy"), QuerySegmentSpecs.create("2015-01-01/2015-01-02"), null, null, null, null, false, false);
    CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(query);
    // Test cache key generation
    byte[] expectedKey = { 0x04, 0x01, (byte) 0xFF, 0x00, 0x02, 0x04 };
    byte[] actualKey = strategy.computeCacheKey(query);
    Assert.assertArrayEquals(expectedKey, actualKey);
    SegmentAnalysis result = new SegmentAnalysis("testSegment", ImmutableList.of(new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), true, 10881, 1, "preferred", "preferred", null)), 71982, 100, null, null, null, null);
    Object preparedValue = strategy.prepareForCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    SegmentAnalysis fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    SegmentAnalysis fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : TableDataSource(io.druid.query.TableDataSource) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 72 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class SearchQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> strategy = new SearchQueryQueryToolChest(null, null).getCacheStrategy(new SearchQuery(new TableDataSource("dummy"), null, Granularities.ALL, 1, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, null));
    final Result<SearchResultValue> result = new Result<>(new DateTime(123L), new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))));
    Object preparedValue = strategy.prepareForCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<SearchResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : SearchQuery(io.druid.query.search.search.SearchQuery) FragmentSearchQuerySpec(io.druid.query.search.search.FragmentSearchQuerySpec) SearchHit(io.druid.query.search.search.SearchHit) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) TableDataSource(io.druid.query.TableDataSource) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 73 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class SelectQuerySpecTest method testSerializationLegacyString.

@Test
public void testSerializationLegacyString() throws Exception {
    String legacy = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":[\"market\",\"quality\"]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":null," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3}," + "\"context\":null}";
    String current = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":" + "[{\"type\":\"default\",\"dimension\":\"market\",\"outputName\":\"market\",\"outputType\":\"STRING\"}," + "{\"type\":\"default\",\"dimension\":\"quality\",\"outputName\":\"quality\",\"outputType\":\"STRING\"}]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":[]," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3,\"fromNext\":true}," + "\"context\":null}";
    SelectQuery query = new SelectQuery(new TableDataSource(QueryRunnerTestHelper.dataSource), new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, DefaultDimensionSpec.toSpec(Arrays.<String>asList("market", "quality")), Arrays.<String>asList("index"), null, new PagingSpec(null, 3, null), null);
    String actual = objectMapper.writeValueAsString(query);
    Assert.assertEquals(current, actual);
    Assert.assertEquals(query, objectMapper.readValue(actual, SelectQuery.class));
    Assert.assertEquals(query, objectMapper.readValue(legacy, SelectQuery.class));
}
Also used : TableDataSource(io.druid.query.TableDataSource) LegacySegmentSpec(io.druid.query.spec.LegacySegmentSpec) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 74 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class QuerySegmentSpecTest method testSerializationSegments.

@Test
public void testSerializationSegments() throws Exception {
    QuerySegmentSpec spec = jsonMapper.convertValue(ImmutableMap.<String, Object>of("type", "segments", "segments", ImmutableList.<Map<String, Object>>of(ImmutableMap.<String, Object>of("itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 0), ImmutableMap.<String, Object>of("itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 1), ImmutableMap.<String, Object>of("itvl", "2011-11-01/2011-11-10", "ver", "2", "part", 10))), QuerySegmentSpec.class);
    Assert.assertTrue(spec instanceof MultipleSpecificSegmentSpec);
    Assert.assertEquals(ImmutableList.of(new Interval("2011-07-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), spec.getIntervals());
    Assert.assertEquals(ImmutableList.of(new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 0), new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 1), new SegmentDescriptor(new Interval("2011-11-01/2011-11-10"), "2", 10)), ((MultipleSpecificSegmentSpec) spec).getDescriptors());
}
Also used : SegmentDescriptor(io.druid.query.SegmentDescriptor) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 75 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class SpecificSegmentQueryRunnerTest method testRetry.

@Test
public void testRetry() throws Exception {
    final ObjectMapper mapper = new DefaultObjectMapper();
    SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
    final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return new Sequence() {

                @Override
                public Object accumulate(Object initValue, Accumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }

                @Override
                public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }
            };
        }
    }, new SpecificSegmentSpec(descriptor));
    // from accumulate
    Map<String, Object> responseContext = Maps.newHashMap();
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
    Sequence results = queryRunner.run(query, responseContext);
    Sequences.toList(results, Lists.newArrayList());
    validate(mapper, descriptor, responseContext);
    // from toYielder
    responseContext = Maps.newHashMap();
    results = queryRunner.run(query, responseContext);
    results.toYielder(null, new YieldingAccumulator() {

        final List lists = Lists.newArrayList();

        @Override
        public Object accumulate(Object accumulated, Object in) {
            lists.add(in);
            return in;
        }
    });
    validate(mapper, descriptor, responseContext);
}
Also used : Accumulator(io.druid.java.util.common.guava.Accumulator) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) Yielder(io.druid.java.util.common.guava.Yielder) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) SegmentMissingException(io.druid.segment.SegmentMissingException) Sequence(io.druid.java.util.common.guava.Sequence) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) QueryRunner(io.druid.query.QueryRunner) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

Interval (org.joda.time.Interval)1052 Test (org.junit.Test)604 DateTime (org.joda.time.DateTime)316 ArrayList (java.util.ArrayList)186 DataSegment (org.apache.druid.timeline.DataSegment)145 DataSegment (io.druid.timeline.DataSegment)138 List (java.util.List)134 Map (java.util.Map)131 HashMap (java.util.HashMap)106 File (java.io.File)91 IOException (java.io.IOException)75 ImmutableList (com.google.common.collect.ImmutableList)71 ImmutableMap (com.google.common.collect.ImmutableMap)56 Period (org.joda.time.Period)56 TreeMap (java.util.TreeMap)55 ISE (org.apache.druid.java.util.common.ISE)53 HashSet (java.util.HashSet)50 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)49 QueryRunner (io.druid.query.QueryRunner)47 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)45