use of org.apache.druid.query.scan.ScanResultValue in project druid by druid-io.
the class StreamAppenderatorTest method testQueryBySegments.
@Test
public void testQueryBySegments() throws Exception {
try (final StreamAppenderatorTester tester = new StreamAppenderatorTester(2, true)) {
final Appenderator appenderator = tester.getAppenderator();
appenderator.startJob();
appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 2), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(1), ir("2000", "foo", 4), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), ir("2001", "foo", 8), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), ir("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), ir("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), ir("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil()));
// Query1: segment #2
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).aggregators(Arrays.asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(IDENTIFIERS.get(2).getInterval(), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
final List<Result<TimeseriesResultValue>> results1 = QueryPlus.wrap(query1).run(appenderator, ResponseContext.createEmpty()).toList();
Assert.assertEquals("query1", ImmutableList.of(new Result<>(DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)))), results1);
// Query2: segment #2, partial
final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).aggregators(Arrays.asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
final List<Result<TimeseriesResultValue>> results2 = QueryPlus.wrap(query2).run(appenderator, ResponseContext.createEmpty()).toList();
Assert.assertEquals("query2", ImmutableList.of(new Result<>(DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)))), results2);
// Query3: segment #2, two disjoint intervals
final TimeseriesQuery query3 = Druids.newTimeseriesQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).aggregators(Arrays.asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum()), new SegmentDescriptor(Intervals.of("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
final List<Result<TimeseriesResultValue>> results3 = QueryPlus.wrap(query3).run(appenderator, ResponseContext.createEmpty()).toList();
Assert.assertEquals("query3", ImmutableList.of(new Result<>(DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)))), results3);
final ScanQuery query4 = Druids.newScanQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum()), new SegmentDescriptor(Intervals.of("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).order(ScanQuery.Order.ASCENDING).batchSize(10).resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).build();
final List<ScanResultValue> results4 = QueryPlus.wrap(query4).run(appenderator, ResponseContext.createEmpty()).toList();
// 2 segments, 1 row per segment
Assert.assertEquals(2, results4.size());
Assert.assertArrayEquals(new String[] { "__time", "dim", "count", "met" }, results4.get(0).getColumns().toArray());
Assert.assertArrayEquals(new Object[] { DateTimes.of("2001").getMillis(), "foo", 1L, 8L }, ((List<Object>) ((List<Object>) results4.get(0).getEvents()).get(0)).toArray());
Assert.assertArrayEquals(new String[] { "__time", "dim", "count", "met" }, results4.get(0).getColumns().toArray());
Assert.assertArrayEquals(new Object[] { DateTimes.of("2001T03").getMillis(), "foo", 1L, 64L }, ((List<Object>) ((List<Object>) results4.get(1).getEvents()).get(0)).toArray());
}
}
use of org.apache.druid.query.scan.ScanResultValue in project druid by druid-io.
the class DoubleStorageTest method testSelectValues.
@Test
public void testSelectValues() {
QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(SCAN_QUERY_RUNNER_FACTORY, SEGMENT_ID, new QueryableIndexSegment(index, SEGMENT_ID), null);
ScanQuery query = newTestQuery().intervals(new LegacySegmentSpec(INTERVAL)).virtualColumns().build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
ScanResultValue expectedScanResult = new ScanResultValue(SEGMENT_ID.toString(), ImmutableList.of(TIME_COLUMN, DIM_NAME, DIM_FLOAT_NAME), getStreamOfEvents().collect(Collectors.toList()));
List<ScanResultValue> expectedResults = Collections.singletonList(expectedScanResult);
ScanQueryRunnerTest.verify(expectedResults, results);
}
use of org.apache.druid.query.scan.ScanResultValue in project druid by druid-io.
the class SetAndVerifyContextQueryRunnerTest method testTimeoutDefaultTooBigAndOverflows.
@Test
public void testTimeoutDefaultTooBigAndOverflows() {
Query<ScanResultValue> query = new Druids.ScanQueryBuilder().dataSource("foo").intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.ETERNITY))).build();
ServerConfig defaultConfig = new ServerConfig() {
@Override
public long getDefaultQueryTimeout() {
return Long.MAX_VALUE;
}
};
QueryRunner<ScanResultValue> mockRunner = EasyMock.createMock(QueryRunner.class);
SetAndVerifyContextQueryRunner<ScanResultValue> queryRunner = new SetAndVerifyContextQueryRunner<>(defaultConfig, mockRunner);
Query<ScanResultValue> transformed = queryRunner.withTimeoutAndMaxScatterGatherBytes(query, defaultConfig);
// timeout is not set, default timeout has been set to long.max, make sure timeout is still in the future
Assert.assertEquals((Long) Long.MAX_VALUE, transformed.getContextValue(DirectDruidClient.QUERY_FAIL_TIME));
}
use of org.apache.druid.query.scan.ScanResultValue in project druid by druid-io.
the class SetAndVerifyContextQueryRunnerTest method testTimeoutZeroIsNotImmediateTimeoutDefaultServersideMax.
@Test
public void testTimeoutZeroIsNotImmediateTimeoutDefaultServersideMax() {
Query<ScanResultValue> query = new Druids.ScanQueryBuilder().dataSource("foo").intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.ETERNITY))).context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 0)).build();
ServerConfig defaultConfig = new ServerConfig();
QueryRunner<ScanResultValue> mockRunner = EasyMock.createMock(QueryRunner.class);
SetAndVerifyContextQueryRunner<ScanResultValue> queryRunner = new SetAndVerifyContextQueryRunner<>(defaultConfig, mockRunner);
Query<ScanResultValue> transformed = queryRunner.withTimeoutAndMaxScatterGatherBytes(query, defaultConfig);
// timeout is set to 0, so withTimeoutAndMaxScatterGatherBytes should set QUERY_FAIL_TIME to be the current
// time + max query timeout at the time the method was called
// since default is long max, expect long max since current time would overflow
Assert.assertEquals((Long) Long.MAX_VALUE, transformed.getContextValue(DirectDruidClient.QUERY_FAIL_TIME));
}
use of org.apache.druid.query.scan.ScanResultValue in project druid by druid-io.
the class SetAndVerifyContextQueryRunnerTest method testTimeoutIsUsedIfTimeoutIsNonZero.
@Test
public void testTimeoutIsUsedIfTimeoutIsNonZero() throws InterruptedException {
Query<ScanResultValue> query = new Druids.ScanQueryBuilder().dataSource("foo").intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.ETERNITY))).context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1)).build();
ServerConfig defaultConfig = new ServerConfig();
QueryRunner<ScanResultValue> mockRunner = EasyMock.createMock(QueryRunner.class);
SetAndVerifyContextQueryRunner<ScanResultValue> queryRunner = new SetAndVerifyContextQueryRunner<>(defaultConfig, mockRunner);
Query<ScanResultValue> transformed = queryRunner.withTimeoutAndMaxScatterGatherBytes(query, defaultConfig);
Thread.sleep(100);
// timeout is set to 1, so withTimeoutAndMaxScatterGatherBytes should set QUERY_FAIL_TIME to be the current
// time + 1 at the time the method was called
// this means that after sleeping for 1 millis, the fail time should be less than the current time when checking
Assert.assertTrue(System.currentTimeMillis() > (Long) transformed.getContextValue(DirectDruidClient.QUERY_FAIL_TIME));
}
Aggregations