Search in sources :

Example 1 with BySegmentResultValueClass

use of org.apache.druid.query.BySegmentResultValueClass in project druid by druid-io.

the class CachingClusteredClientTest method toFilteredQueryableTimeseriesResults.

private Sequence<Result<TimeseriesResultValue>> toFilteredQueryableTimeseriesResults(TimeseriesQuery query, List<SegmentId> segmentIds, List<Interval> queryIntervals, List<Iterable<Result<TimeseriesResultValue>>> results) {
    MultipleSpecificSegmentSpec spec = (MultipleSpecificSegmentSpec) query.getQuerySegmentSpec();
    List<Result<TimeseriesResultValue>> ret = new ArrayList<>();
    for (SegmentDescriptor descriptor : spec.getDescriptors()) {
        SegmentId id = SegmentId.dummy(StringUtils.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()));
        int index = segmentIds.indexOf(id);
        if (index != -1) {
            Result result = new Result(results.get(index).iterator().next().getTimestamp(), new BySegmentResultValueClass(Lists.newArrayList(results.get(index)), id.toString(), descriptor.getInterval()));
            ret.add(result);
        } else {
            throw new ISE("Descriptor %s not found in server", id);
        }
    }
    return Sequences.simple(ret);
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) SegmentId(org.apache.druid.timeline.SegmentId) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ArrayList(java.util.ArrayList) BySegmentResultValueClass(org.apache.druid.query.BySegmentResultValueClass) ISE(org.apache.druid.java.util.common.ISE) Result(org.apache.druid.query.Result)

Example 2 with BySegmentResultValueClass

use of org.apache.druid.query.BySegmentResultValueClass in project druid by druid-io.

the class TopNQueryRunnerTest method testTopNBySegmentResults.

@Test
public void testTopNBySegmentResults() {
    TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.dependentPostAggMetric).threshold(4).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Lists.newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, QueryRunnerTestHelper.DEPENDENT_POST_AGG).context(ImmutableMap.of(QueryContexts.FINALIZE_KEY, true, QueryContexts.BY_SEGMENT_KEY, true)).build();
    TopNResultValue topNResult = new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>builder().put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market").put("rows", 186L).put("index", 215679.82879638672D).put("addRowsIndexConstant", 215866.82879638672D).put(QueryRunnerTestHelper.dependentPostAggMetric, 216053.82879638672D).put("uniques", QueryRunnerTestHelper.UNIQUES_2).put("maxIndex", 1743.92175D).put("minIndex", 792.3260498046875D).build(), ImmutableMap.<String, Object>builder().put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront").put("rows", 186L).put("index", 192046.1060180664D).put("addRowsIndexConstant", 192233.1060180664D).put(QueryRunnerTestHelper.dependentPostAggMetric, 192420.1060180664D).put("uniques", QueryRunnerTestHelper.UNIQUES_2).put("maxIndex", 1870.061029D).put("minIndex", 545.9906005859375D).build(), ImmutableMap.<String, Object>builder().put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot").put("rows", 837L).put("index", 95606.57232284546D).put("addRowsIndexConstant", 96444.57232284546D).put(QueryRunnerTestHelper.dependentPostAggMetric, 97282.57232284546D).put("uniques", QueryRunnerTestHelper.UNIQUES_9).put("maxIndex", 277.273533D).put("minIndex", 59.02102279663086D).build()));
    // TODO: fix this test
    @SuppressWarnings("unused") List<Result<BySegmentResultValueClass<Result<TopNResultValue>>>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), topNResult)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z"))));
    Sequence<Result<TopNResultValue>> results = runWithMerge(query);
    for (Result<TopNResultValue> result : results.toList()) {
        // TODO: fix this test
        Assert.assertEquals(result.getValue(), result.getValue());
    }
}
Also used : DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) BySegmentResultValueClass(org.apache.druid.query.BySegmentResultValueClass) DoubleMinAggregatorFactory(org.apache.druid.query.aggregation.DoubleMinAggregatorFactory) Result(org.apache.druid.query.Result) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 3 with BySegmentResultValueClass

use of org.apache.druid.query.BySegmentResultValueClass in project druid by druid-io.

the class SegmentMetadataQueryTest method testBySegmentResults.

@Test
public void testBySegmentResults() {
    Result<BySegmentResultValue> bySegmentResult = new Result<BySegmentResultValue>(expectedSegmentAnalysis1.getIntervals().get(0).getStart(), new BySegmentResultValueClass(Collections.singletonList(expectedSegmentAnalysis1), expectedSegmentAnalysis1.getId(), testQuery.getIntervals().get(0)));
    QueryToolChest toolChest = FACTORY.getToolchest();
    QueryRunner singleSegmentQueryRunner = toolChest.preMergeQueryDecoration(runner1);
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(Execs.directExecutor(), // the bug surfaces only when ordering is used which happens only when you have 2 things to compare
    Lists.newArrayList(singleSegmentQueryRunner, singleSegmentQueryRunner))), toolChest);
    TestHelper.assertExpectedObjects(ImmutableList.of(bySegmentResult, bySegmentResult), myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)))), "failed SegmentMetadata bySegment query");
    exec.shutdownNow();
}
Also used : BySegmentResultValue(org.apache.druid.query.BySegmentResultValue) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ExecutorService(java.util.concurrent.ExecutorService) BySegmentResultValueClass(org.apache.druid.query.BySegmentResultValueClass) QueryToolChest(org.apache.druid.query.QueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Aggregations

BySegmentResultValueClass (org.apache.druid.query.BySegmentResultValueClass)3 Result (org.apache.druid.query.Result)3 Test (org.junit.Test)2 ArrayList (java.util.ArrayList)1 ExecutorService (java.util.concurrent.ExecutorService)1 ISE (org.apache.druid.java.util.common.ISE)1 BySegmentResultValue (org.apache.druid.query.BySegmentResultValue)1 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)1 QueryRunner (org.apache.druid.query.QueryRunner)1 QueryToolChest (org.apache.druid.query.QueryToolChest)1 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)1 DoubleMaxAggregatorFactory (org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory)1 DoubleMinAggregatorFactory (org.apache.druid.query.aggregation.DoubleMinAggregatorFactory)1 MultipleSpecificSegmentSpec (org.apache.druid.query.spec.MultipleSpecificSegmentSpec)1 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)1 SegmentId (org.apache.druid.timeline.SegmentId)1