use of io.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.
the class SegmentMetadataQueryRunnerFactory method mergeRunners.
@Override
public QueryRunner<SegmentAnalysis> mergeRunners(ExecutorService exec, Iterable<QueryRunner<SegmentAnalysis>> queryRunners) {
final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(exec);
return new ConcatQueryRunner<SegmentAnalysis>(Sequences.map(Sequences.simple(queryRunners), new Function<QueryRunner<SegmentAnalysis>, QueryRunner<SegmentAnalysis>>() {
@Override
public QueryRunner<SegmentAnalysis> apply(final QueryRunner<SegmentAnalysis> input) {
return new QueryRunner<SegmentAnalysis>() {
@Override
public Sequence<SegmentAnalysis> run(final Query<SegmentAnalysis> query, final Map<String, Object> responseContext) {
final int priority = BaseQuery.getContextPriority(query, 0);
ListenableFuture<Sequence<SegmentAnalysis>> future = queryExecutor.submit(new AbstractPrioritizedCallable<Sequence<SegmentAnalysis>>(priority) {
@Override
public Sequence<SegmentAnalysis> call() throws Exception {
return Sequences.simple(Sequences.toList(input.run(query, responseContext), new ArrayList<SegmentAnalysis>()));
}
});
try {
queryWatcher.registerQuery(query, future);
final Number timeout = query.getContextValue(QueryContextKeys.TIMEOUT, (Number) null);
return timeout == null ? future.get() : future.get(timeout.longValue(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
log.warn(e, "Query interrupted, cancelling pending results, query id [%s]", query.getId());
future.cancel(true);
throw new QueryInterruptedException(e);
} catch (CancellationException e) {
throw new QueryInterruptedException(e);
} catch (TimeoutException e) {
log.info("Query timeout, cancelling pending results for query id [%s]", query.getId());
future.cancel(true);
throw new QueryInterruptedException(e);
} catch (ExecutionException e) {
throw Throwables.propagate(e.getCause());
}
}
};
}
}));
}
use of io.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.
the class SegmentAnalyzerTest method testIncrementalWorksHelper.
private void testIncrementalWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisType> analyses) throws Exception {
final List<SegmentAnalysis> results = getSegmentAnalysises(new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), null), analyses);
Assert.assertEquals(1, results.size());
final SegmentAnalysis analysis = results.get(0);
Assert.assertEquals(null, analysis.getId());
final Map<String, ColumnAnalysis> columns = analysis.getColumns();
Assert.assertEquals(TestIndex.COLUMNS.length, columns.size());
for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) {
final String dimension = schema.getName();
final ColumnAnalysis columnAnalysis = columns.get(dimension);
final boolean isString = schema.getValueType().name().equals(ValueType.STRING.name());
Assert.assertEquals(dimension, schema.getValueType().name(), columnAnalysis.getType());
Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
if (isString) {
if (analyses == null) {
Assert.assertTrue(dimension, columnAnalysis.getCardinality() > 0);
} else {
Assert.assertEquals(dimension, 0, columnAnalysis.getCardinality().longValue());
}
} else {
Assert.assertNull(dimension, columnAnalysis.getCardinality());
}
}
for (String metric : TestIndex.METRICS) {
final ColumnAnalysis columnAnalysis = columns.get(metric);
Assert.assertEquals(metric, ValueType.FLOAT.name(), columnAnalysis.getType());
Assert.assertEquals(metric, 0, columnAnalysis.getSize());
Assert.assertNull(metric, columnAnalysis.getCardinality());
}
}
use of io.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsAllNull.
@Test
public void testMergeAggregatorsAllNull() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null);
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertNull(mergeLenient(analysis1, analysis2).getAggregators());
}
use of io.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithTimestampSpecMerge.
@Test
public void testSegmentMetadataQueryWithTimestampSpecMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, new TimestampSpec("ds", "auto", null), null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.TIMESTAMPSPEC).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of io.druid.query.metadata.metadata.SegmentAnalysis in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithAggregatorsMerge.
@Test
public void testSegmentMetadataQueryWithAggregatorsMerge() {
final Map<String, AggregatorFactory> expectedAggregators = Maps.newHashMap();
for (AggregatorFactory agg : TestIndex.METRIC_AGGS) {
expectedAggregators.put(agg.getName(), agg.getCombiningFactory());
}
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), expectedAggregators, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.AGGREGATORS).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
Aggregations