use of org.joda.time.Interval in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dummy"), QuerySegmentSpecs.create("2015-01-01/2015-01-02"), null, null, null, null, false, false);
CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(query);
// Test cache key generation
byte[] expectedKey = { 0x04, 0x01, (byte) 0xFF, 0x00, 0x02, 0x04 };
byte[] actualKey = strategy.computeCacheKey(query);
Assert.assertArrayEquals(expectedKey, actualKey);
SegmentAnalysis result = new SegmentAnalysis("testSegment", ImmutableList.of(new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), true, 10881, 1, "preferred", "preferred", null)), 71982, 100, null, null, null, null);
Object preparedValue = strategy.prepareForCache().apply(result);
ObjectMapper objectMapper = new DefaultObjectMapper();
SegmentAnalysis fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
SegmentAnalysis fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
Assert.assertEquals(result, fromCacheResult);
}
use of org.joda.time.Interval in project druid by druid-io.
the class SearchQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> strategy = new SearchQueryQueryToolChest(null, null).getCacheStrategy(new SearchQuery(new TableDataSource("dummy"), null, Granularities.ALL, 1, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, null));
final Result<SearchResultValue> result = new Result<>(new DateTime(123L), new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))));
Object preparedValue = strategy.prepareForCache().apply(result);
ObjectMapper objectMapper = new DefaultObjectMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<SearchResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
Assert.assertEquals(result, fromCacheResult);
}
use of org.joda.time.Interval in project druid by druid-io.
the class SelectQuerySpecTest method testSerializationLegacyString.
@Test
public void testSerializationLegacyString() throws Exception {
String legacy = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":[\"market\",\"quality\"]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":null," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3}," + "\"context\":null}";
String current = "{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"descending\":true," + "\"filter\":null," + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":" + "[{\"type\":\"default\",\"dimension\":\"market\",\"outputName\":\"market\",\"outputType\":\"STRING\"}," + "{\"type\":\"default\",\"dimension\":\"quality\",\"outputName\":\"quality\",\"outputType\":\"STRING\"}]," + "\"metrics\":[\"index\"]," + "\"virtualColumns\":[]," + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3,\"fromNext\":true}," + "\"context\":null}";
SelectQuery query = new SelectQuery(new TableDataSource(QueryRunnerTestHelper.dataSource), new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, DefaultDimensionSpec.toSpec(Arrays.<String>asList("market", "quality")), Arrays.<String>asList("index"), null, new PagingSpec(null, 3, null), null);
String actual = objectMapper.writeValueAsString(query);
Assert.assertEquals(current, actual);
Assert.assertEquals(query, objectMapper.readValue(actual, SelectQuery.class));
Assert.assertEquals(query, objectMapper.readValue(legacy, SelectQuery.class));
}
use of org.joda.time.Interval in project druid by druid-io.
the class QuerySegmentSpecTest method testSerializationSegments.
@Test
public void testSerializationSegments() throws Exception {
QuerySegmentSpec spec = jsonMapper.convertValue(ImmutableMap.<String, Object>of("type", "segments", "segments", ImmutableList.<Map<String, Object>>of(ImmutableMap.<String, Object>of("itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 0), ImmutableMap.<String, Object>of("itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 1), ImmutableMap.<String, Object>of("itvl", "2011-11-01/2011-11-10", "ver", "2", "part", 10))), QuerySegmentSpec.class);
Assert.assertTrue(spec instanceof MultipleSpecificSegmentSpec);
Assert.assertEquals(ImmutableList.of(new Interval("2011-07-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), spec.getIntervals());
Assert.assertEquals(ImmutableList.of(new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 0), new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 1), new SegmentDescriptor(new Interval("2011-11-01/2011-11-10"), "2", 10)), ((MultipleSpecificSegmentSpec) spec).getDescriptors());
}
use of org.joda.time.Interval in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
Sequences.toList(results, Lists.newArrayList());
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = Maps.newHashMap();
results = queryRunner.run(query, responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = Lists.newArrayList();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
Aggregations