use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class GroupByQueryRunnerFactoryTest method testMergeRunnersEnsureGroupMerging.
@Test
public void testMergeRunnersEnsureGroupMerging() {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return factory.getToolchest().mergeResults(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
final Query query = queryPlus.getQuery();
try {
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(factory.createRunner(createSegment()).run(queryPlus, responseContext), factory.createRunner(createSegment()).run(queryPlus, responseContext))));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}).run(queryPlus, responseContext);
}
});
Sequence<ResultRow> result = mergedRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t2", "count", 4L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "");
}
use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class ScanQuerySpecTest method testSerialization.
@Test
public void testSerialization() throws Exception {
String legacy = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"limit\":3," + "\"context\":null}";
String current = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, ScanQuery.Order.NONE, null, null, Arrays.asList("market", "quality", "index"), null, null);
String actual = JSON_MAPPER.writeValueAsString(query);
Assert.assertEquals(current, actual);
Assert.assertEquals(query, JSON_MAPPER.readValue(actual, ScanQuery.class));
Assert.assertEquals(query, JSON_MAPPER.readValue(legacy, ScanQuery.class));
}
use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class ScanQuerySpecTest method testSerializationLegacyString.
@Test
public void testSerializationLegacyString() throws Exception {
ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 1, 3, ScanQuery.Order.NONE, null, null, Arrays.asList("market", "quality", "index"), null, null);
final String serialized = JSON_MAPPER.writeValueAsString(query);
final ScanQuery deserialized = (ScanQuery) JSON_MAPPER.readValue(serialized, Query.class);
Assert.assertEquals(query, deserialized);
}
use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class ScanQuerySpecTest method testSerializationWithOrderBy.
@Test
public void testSerializationWithOrderBy() throws Exception {
String originalJson = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}]," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
ScanQuery expectedQuery = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, null, Collections.singletonList(new ScanQuery.OrderBy("quality", ScanQuery.Order.ASCENDING)), null, Arrays.asList("market", "quality", "index", "__time"), null, null);
String serializedJson = JSON_MAPPER.writeValueAsString(expectedQuery);
Assert.assertEquals(originalJson, serializedJson);
Assert.assertEquals(expectedQuery, JSON_MAPPER.readValue(originalJson, ScanQuery.class));
Assert.assertEquals(ScanQuery.Order.NONE, expectedQuery.getTimeOrder());
Assert.assertEquals(Collections.singletonList(new ScanQuery.OrderBy("quality", ScanQuery.Order.ASCENDING)), expectedQuery.getOrderBys());
}
use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class DoubleStorageTest method testSelectValues.
@Test
public void testSelectValues() {
QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(SCAN_QUERY_RUNNER_FACTORY, SEGMENT_ID, new QueryableIndexSegment(index, SEGMENT_ID), null);
ScanQuery query = newTestQuery().intervals(new LegacySegmentSpec(INTERVAL)).virtualColumns().build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
ScanResultValue expectedScanResult = new ScanResultValue(SEGMENT_ID.toString(), ImmutableList.of(TIME_COLUMN, DIM_NAME, DIM_FLOAT_NAME), getStreamOfEvents().collect(Collectors.toList()));
List<ScanResultValue> expectedResults = Collections.singletonList(expectedScanResult);
ScanQueryRunnerTest.verify(expectedResults, results);
}
Aggregations