use of org.apache.druid.query.Result in project druid by druid-io.
the class SearchQueryQueryToolChest method getCacheStrategy.
@Override
public CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> getCacheStrategy(final SearchQuery query) {
return new CacheStrategy<Result<SearchResultValue>, Object, SearchQuery>() {
private final List<DimensionSpec> dimensionSpecs = query.getDimensions() != null ? query.getDimensions() : Collections.emptyList();
private final List<String> dimOutputNames = dimensionSpecs.size() > 0 ? Lists.transform(dimensionSpecs, DimensionSpec::getOutputName) : Collections.emptyList();
@Override
public boolean isCacheable(SearchQuery query, boolean willMergeRunners) {
return true;
}
@Override
public byte[] computeCacheKey(SearchQuery query) {
final DimFilter dimFilter = query.getDimensionsFilter();
final byte[] filterBytes = dimFilter == null ? new byte[] {} : dimFilter.getCacheKey();
final byte[] querySpecBytes = query.getQuery().getCacheKey();
final byte[] granularityBytes = query.getGranularity().getCacheKey();
final List<DimensionSpec> dimensionSpecs = query.getDimensions() != null ? query.getDimensions() : Collections.emptyList();
final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][];
int dimensionsBytesSize = 0;
int index = 0;
for (DimensionSpec dimensionSpec : dimensionSpecs) {
dimensionsBytes[index] = dimensionSpec.getCacheKey();
dimensionsBytesSize += dimensionsBytes[index].length;
++index;
}
final byte[] sortSpecBytes = query.getSort().getCacheKey();
final ByteBuffer queryCacheKey = ByteBuffer.allocate(1 + 4 + granularityBytes.length + filterBytes.length + querySpecBytes.length + dimensionsBytesSize + sortSpecBytes.length).put(SEARCH_QUERY).put(Ints.toByteArray(query.getLimit())).put(granularityBytes).put(filterBytes).put(querySpecBytes).put(sortSpecBytes);
for (byte[] bytes : dimensionsBytes) {
queryCacheKey.put(bytes);
}
return queryCacheKey.array();
}
@Override
public byte[] computeResultLevelCacheKey(SearchQuery query) {
return computeCacheKey(query);
}
@Override
public TypeReference<Object> getCacheObjectClazz() {
return OBJECT_TYPE_REFERENCE;
}
@Override
public Function<Result<SearchResultValue>, Object> prepareForCache(boolean isResultLevelCache) {
return new Function<Result<SearchResultValue>, Object>() {
@Override
public Object apply(Result<SearchResultValue> input) {
return dimensionSpecs.size() > 0 ? Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue(), dimOutputNames) : Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue());
}
};
}
@Override
public Function<Object, Result<SearchResultValue>> pullFromCache(boolean isResultLevelCache) {
return new Function<Object, Result<SearchResultValue>>() {
@Override
@SuppressWarnings("unchecked")
public Result<SearchResultValue> apply(Object input) {
List<Object> result = (List<Object>) input;
boolean needsRename = false;
final Map<String, String> outputNameMap = new HashMap<>();
if (hasOutputName(result)) {
List<String> cachedOutputNames = (List) result.get(2);
Preconditions.checkArgument(cachedOutputNames.size() == dimOutputNames.size(), "cache hit, but number of dimensions mismatch");
needsRename = false;
for (int idx = 0; idx < cachedOutputNames.size(); idx++) {
String cachedOutputName = cachedOutputNames.get(idx);
String outputName = dimOutputNames.get(idx);
if (!cachedOutputName.equals(outputName)) {
needsRename = true;
}
outputNameMap.put(cachedOutputName, outputName);
}
}
return !needsRename ? new Result<>(DateTimes.utc(((Number) result.get(0)).longValue()), new SearchResultValue(Lists.transform((List) result.get(1), new Function<Object, SearchHit>() {
@Override
public SearchHit apply(@Nullable Object input) {
if (input instanceof Map) {
return new SearchHit((String) ((Map) input).get("dimension"), (String) ((Map) input).get("value"), (Integer) ((Map) input).get("count"));
} else if (input instanceof SearchHit) {
return (SearchHit) input;
} else {
throw new IAE("Unknown format [%s]", input.getClass());
}
}
}))) : new Result<>(DateTimes.utc(((Number) result.get(0)).longValue()), new SearchResultValue(Lists.transform((List) result.get(1), new Function<Object, SearchHit>() {
@Override
public SearchHit apply(@Nullable Object input) {
String dim;
String val;
Integer count;
if (input instanceof Map) {
dim = outputNameMap.get((String) ((Map) input).get("dimension"));
val = (String) ((Map) input).get("value");
count = (Integer) ((Map) input).get("count");
} else if (input instanceof SearchHit) {
SearchHit cached = (SearchHit) input;
dim = outputNameMap.get(cached.getDimension());
val = cached.getValue();
count = cached.getCount();
} else {
throw new IAE("Unknown format [%s]", input.getClass());
}
return new SearchHit(dim, val, count);
}
})));
}
};
}
private boolean hasOutputName(List<Object> cachedEntry) {
/*
* cached entry is list of two or three objects
* 1. timestamp
* 2. SearchResultValue
* 3. outputName of each dimension (optional)
*
* if a cached entry has three objects, dimension name of SearchResultValue should be check if rename is needed
*/
return cachedEntry.size() == 3;
}
};
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class StringColumnAggregationTest method testTimeseries.
@Test
public void testTimeseries() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals("1970/2050").aggregators(new DoubleSumAggregatorFactory("singleDoubleSum", singleValue), new DoubleSumAggregatorFactory("multiDoubleSum", multiValue), new DoubleMaxAggregatorFactory("singleDoubleMax", singleValue), new DoubleMaxAggregatorFactory("multiDoubleMax", multiValue), new DoubleMinAggregatorFactory("singleDoubleMin", singleValue), new DoubleMinAggregatorFactory("multiDoubleMin", multiValue), new FloatSumAggregatorFactory("singleFloatSum", singleValue), new FloatSumAggregatorFactory("multiFloatSum", multiValue), new FloatMaxAggregatorFactory("singleFloatMax", singleValue), new FloatMaxAggregatorFactory("multiFloatMax", multiValue), new FloatMinAggregatorFactory("singleFloatMin", singleValue), new FloatMinAggregatorFactory("multiFloatMin", multiValue), new LongSumAggregatorFactory("singleLongSum", singleValue), new LongSumAggregatorFactory("multiLongSum", multiValue), new LongMaxAggregatorFactory("singleLongMax", singleValue), new LongMaxAggregatorFactory("multiLongMax", multiValue), new LongMinAggregatorFactory("singleLongMin", singleValue), new LongMinAggregatorFactory("multiLongMin", multiValue), new LongSumAggregatorFactory("count", "count")).build();
Sequence seq = AggregationTestHelper.createTimeseriesQueryAggregationTestHelper(Collections.emptyList(), tempFolder).runQueryOnSegmentsObjs(segments, query);
TimeseriesResultValue result = ((Result<TimeseriesResultValue>) Iterables.getOnlyElement(seq.toList())).getValue();
Assert.assertEquals(numRows, result.getLongMetric("count").longValue());
Assert.assertEquals(singleValueSum, result.getDoubleMetric("singleDoubleSum").doubleValue(), 0.0001d);
Assert.assertEquals(multiValueSum, result.getDoubleMetric("multiDoubleSum").doubleValue(), 0.0001d);
Assert.assertEquals(singleValueMax, result.getDoubleMetric("singleDoubleMax").doubleValue(), 0.0001d);
Assert.assertEquals(multiValueMax, result.getDoubleMetric("multiDoubleMax").doubleValue(), 0.0001d);
Assert.assertEquals(singleValueMin, result.getDoubleMetric("singleDoubleMin").doubleValue(), 0.0001d);
Assert.assertEquals(multiValueMin, result.getDoubleMetric("multiDoubleMin").doubleValue(), 0.0001d);
Assert.assertEquals(singleValueSum, result.getFloatMetric("singleFloatSum").floatValue(), 0.0001f);
Assert.assertEquals(multiValueSum, result.getFloatMetric("multiFloatSum").floatValue(), 0.0001f);
Assert.assertEquals(singleValueMax, result.getFloatMetric("singleFloatMax").floatValue(), 0.0001f);
Assert.assertEquals(multiValueMax, result.getFloatMetric("multiFloatMax").floatValue(), 0.0001f);
Assert.assertEquals(singleValueMin, result.getFloatMetric("singleFloatMin").floatValue(), 0.0001f);
Assert.assertEquals(multiValueMin, result.getFloatMetric("multiFloatMin").floatValue(), 0.0001f);
Assert.assertEquals((long) singleValueSum, result.getLongMetric("singleLongSum").longValue());
Assert.assertEquals((long) multiValueSum, result.getLongMetric("multiLongSum").longValue());
Assert.assertEquals((long) singleValueMax, result.getLongMetric("singleLongMax").longValue());
Assert.assertEquals((long) multiValueMax, result.getLongMetric("multiLongMax").longValue());
Assert.assertEquals((long) singleValueMin, result.getLongMetric("singleLongMin").longValue());
Assert.assertEquals((long) multiValueMin, result.getLongMetric("multiLongMin").longValue());
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> strategy = TOOL_CHEST.getCacheStrategy(new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1"), new LongSumAggregatorFactory("metric0", "metric0"), new StringLastAggregatorFactory("complexMetric", "test", null, null)), ImmutableList.of(new ConstantPostAggregator("post", 10)), 0, null));
final Result<TimeseriesResultValue> result1 = new Result<>(// test timestamps that result in integer size millis
DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", new SerializablePairLongString(123L, "val1"))));
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
ObjectMapper objectMapper = TestHelper.makeJsonMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<TimeseriesResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result1, fromCacheResult);
final Result<TimeseriesResultValue> result2 = new Result<>(// test timestamps that result in integer size millis
DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
Object preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result2);
Object fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
Result<TimeseriesResultValue> fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
Assert.assertEquals(result2, fromResultLevelCacheRes);
final Result<TimeseriesResultValue> result3 = new Result<>(// null timestamp similar to grandTotal
null, new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result3);
fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
Assert.assertEquals(result3, fromResultLevelCacheRes);
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithLimit.
@Test
public void testTimeseriesWithLimit() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.QUALITY_UNIQUES)).descending(descending).limit(10).context(makeContext()).build();
// Must create a toolChest so we can run mergeResults.
QueryToolChest<Result<TimeseriesResultValue>, TimeseriesQuery> toolChest = new TimeseriesQueryQueryToolChest();
// Must wrapped in a results finalizer to stop the runner's builtin finalizer from being called.
final FinalizeResultsQueryRunner finalRunner = new FinalizeResultsQueryRunner(toolChest.mergeResults(runner), toolChest);
final List list = finalRunner.run(QueryPlus.wrap(query)).toList();
Assert.assertEquals(10, list.size());
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithInvertedFilterOnNonExistentDimension.
@Test
public void testTimeseriesWithInvertedFilterOnNonExistentDimension() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters(new NotDimFilter(new SelectorDimFilter("bobby", "sally", null))).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(aggregatorFactoryList).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, "uniques", QueryRunnerTestHelper.UNIQUES_9))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
assertExpectedResults(expectedResults, results);
}
Aggregations