use of io.druid.query.search.search.SearchHit in project druid by druid-io.
the class AppendTest method testFilteredSearch.
@Test
public void testFilteredSearch() {
List<Result<SearchResultValue>> expectedResults = Arrays.asList(new Result<SearchResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new SearchResultValue(Arrays.<SearchHit>asList(new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market")))));
SearchQuery query = makeFilteredSearchQuery();
QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
use of io.druid.query.search.search.SearchHit in project druid by druid-io.
the class SearchBenchmark method querySingleIncrementalIndex.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception {
QueryRunner<SearchHit> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "incIndex", new IncrementalIndexSegment(incIndexes.get(0), "incIndex"));
List<Result<SearchResultValue>> results = SearchBenchmark.runQuery(factory, runner, query);
List<SearchHit> hits = results.get(0).getValue().getValue();
for (SearchHit hit : hits) {
blackhole.consume(hit);
}
}
use of io.druid.query.search.search.SearchHit in project druid by druid-io.
the class SearchBenchmark method queryMultiQueryableIndex.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList();
QueryToolChest toolChest = factory.getToolchest();
for (int i = 0; i < numSegments; i++) {
String segmentName = "qIndex" + i;
final QueryRunner<Result<SearchResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));
Sequence<Result<SearchResultValue>> queryResult = theRunner.run(query, Maps.<String, Object>newHashMap());
List<Result<SearchResultValue>> results = Sequences.toList(queryResult, Lists.<Result<SearchResultValue>>newArrayList());
for (Result<SearchResultValue> result : results) {
List<SearchHit> hits = result.getValue().getValue();
for (SearchHit hit : hits) {
blackhole.consume(hit);
}
}
}
use of io.druid.query.search.search.SearchHit in project druid by druid-io.
the class SearchQueryQueryToolChest method getCacheStrategy.
@Override
public CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> getCacheStrategy(final SearchQuery query) {
return new CacheStrategy<Result<SearchResultValue>, Object, SearchQuery>() {
private final List<DimensionSpec> dimensionSpecs = query.getDimensions() != null ? query.getDimensions() : Collections.<DimensionSpec>emptyList();
private final List<String> dimOutputNames = dimensionSpecs.size() > 0 ? Lists.transform(dimensionSpecs, new Function<DimensionSpec, String>() {
@Override
public String apply(DimensionSpec input) {
return input.getOutputName();
}
}) : Collections.<String>emptyList();
@Override
public boolean isCacheable(SearchQuery query, boolean willMergeRunners) {
return true;
}
@Override
public byte[] computeCacheKey(SearchQuery query) {
final DimFilter dimFilter = query.getDimensionsFilter();
final byte[] filterBytes = dimFilter == null ? new byte[] {} : dimFilter.getCacheKey();
final byte[] querySpecBytes = query.getQuery().getCacheKey();
final byte[] granularityBytes = query.getGranularity().getCacheKey();
final List<DimensionSpec> dimensionSpecs = query.getDimensions() != null ? query.getDimensions() : Collections.<DimensionSpec>emptyList();
final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][];
int dimensionsBytesSize = 0;
int index = 0;
for (DimensionSpec dimensionSpec : dimensionSpecs) {
dimensionsBytes[index] = dimensionSpec.getCacheKey();
dimensionsBytesSize += dimensionsBytes[index].length;
++index;
}
final byte[] sortSpecBytes = query.getSort().getCacheKey();
final ByteBuffer queryCacheKey = ByteBuffer.allocate(1 + 4 + granularityBytes.length + filterBytes.length + querySpecBytes.length + dimensionsBytesSize + sortSpecBytes.length).put(SEARCH_QUERY).put(Ints.toByteArray(query.getLimit())).put(granularityBytes).put(filterBytes).put(querySpecBytes).put(sortSpecBytes);
for (byte[] bytes : dimensionsBytes) {
queryCacheKey.put(bytes);
}
return queryCacheKey.array();
}
@Override
public TypeReference<Object> getCacheObjectClazz() {
return OBJECT_TYPE_REFERENCE;
}
@Override
public Function<Result<SearchResultValue>, Object> prepareForCache() {
return new Function<Result<SearchResultValue>, Object>() {
@Override
public Object apply(Result<SearchResultValue> input) {
return dimensionSpecs.size() > 0 ? Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue(), dimOutputNames) : Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue());
}
};
}
@Override
public Function<Object, Result<SearchResultValue>> pullFromCache() {
return new Function<Object, Result<SearchResultValue>>() {
@Override
@SuppressWarnings("unchecked")
public Result<SearchResultValue> apply(Object input) {
List<Object> result = (List<Object>) input;
boolean needsRename = false;
final Map<String, String> outputNameMap = Maps.newHashMap();
if (hasOutputName(result)) {
List<String> cachedOutputNames = (List) result.get(2);
Preconditions.checkArgument(cachedOutputNames.size() == dimOutputNames.size(), "cache hit, but number of dimensions mismatch");
needsRename = false;
for (int idx = 0; idx < cachedOutputNames.size(); idx++) {
String cachedOutputName = cachedOutputNames.get(idx);
String outputName = dimOutputNames.get(idx);
if (!cachedOutputName.equals(outputName)) {
needsRename = true;
}
outputNameMap.put(cachedOutputName, outputName);
}
}
return !needsRename ? new Result<>(new DateTime(((Number) result.get(0)).longValue()), new SearchResultValue(Lists.transform((List) result.get(1), new Function<Object, SearchHit>() {
@Override
public SearchHit apply(@Nullable Object input) {
if (input instanceof Map) {
return new SearchHit((String) ((Map) input).get("dimension"), (String) ((Map) input).get("value"), (Integer) ((Map) input).get("count"));
} else if (input instanceof SearchHit) {
return (SearchHit) input;
} else {
throw new IAE("Unknown format [%s]", input.getClass());
}
}
}))) : new Result<>(new DateTime(((Number) result.get(0)).longValue()), new SearchResultValue(Lists.transform((List) result.get(1), new Function<Object, SearchHit>() {
@Override
public SearchHit apply(@Nullable Object input) {
String dim = null;
String val = null;
Integer cnt = null;
if (input instanceof Map) {
dim = outputNameMap.get((String) ((Map) input).get("dimension"));
val = (String) ((Map) input).get("value");
cnt = (Integer) ((Map) input).get("count");
} else if (input instanceof SearchHit) {
SearchHit cached = (SearchHit) input;
dim = outputNameMap.get(cached.getDimension());
val = cached.getValue();
cnt = cached.getCount();
} else {
throw new IAE("Unknown format [%s]", input.getClass());
}
return new SearchHit(dim, val, cnt);
}
})));
}
};
}
private boolean hasOutputName(List<Object> cachedEntry) {
/*
* cached entry is list of two or three objects
* 1. timestamp
* 2. SearchResultValue
* 3. outputName of each dimension (optional)
*
* if a cached entry has three objects, dimension name of SearchResultValue should be check if rename is needed
*/
return cachedEntry.size() == 3;
}
};
}
use of io.druid.query.search.search.SearchHit in project druid by druid-io.
the class CachingClusteredClientTest method makeSearchResults.
private Iterable<Result<SearchResultValue>> makeSearchResults(String dim, Object... objects) {
List<Result<SearchResultValue>> retVal = Lists.newArrayList();
int index = 0;
while (index < objects.length) {
DateTime timestamp = (DateTime) objects[index++];
List<SearchHit> values = Lists.newArrayList();
while (index < objects.length && !(objects[index] instanceof DateTime)) {
values.add(new SearchHit(dim, objects[index++].toString(), (Integer) objects[index++]));
}
retVal.add(new Result<>(timestamp, new SearchResultValue(values)));
}
return retVal;
}
Aggregations