use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class ScanQuerySpecTest method testSerializationWithTimeOrder.
@Test
public void testSerializationWithTimeOrder() throws Exception {
String originalJson = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"order\":\"ascending\"," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
ScanQuery expectedQuery = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, ScanQuery.Order.ASCENDING, null, null, Arrays.asList("market", "quality", "index", "__time"), null, null);
String serializedJson = JSON_MAPPER.writeValueAsString(expectedQuery);
Assert.assertEquals(originalJson, serializedJson);
Assert.assertEquals(expectedQuery, JSON_MAPPER.readValue(originalJson, ScanQuery.class));
Assert.assertEquals(ScanQuery.Order.ASCENDING, expectedQuery.getTimeOrder());
Assert.assertEquals(Collections.singletonList(new ScanQuery.OrderBy("__time", ScanQuery.Order.ASCENDING)), expectedQuery.getOrderBys());
}
use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dummy"), new LegacySegmentSpec("2015-01-01/2015-01-02"), null, null, null, null, false, false);
CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(query);
// Test cache key generation
byte[] expectedKey = { 0x04, 0x09, 0x01, 0x0A, 0x00, 0x00, 0x00, 0x03, 0x00, 0x02, 0x04 };
byte[] actualKey = strategy.computeCacheKey(query);
Assert.assertArrayEquals(expectedKey, actualKey);
SegmentAnalysis result = new SegmentAnalysis("testSegment", ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.name(), true, false, 10881, 1, "preferred", "preferred", null)), 71982, 100, null, null, null, null);
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result);
ObjectMapper objectMapper = new DefaultObjectMapper();
SegmentAnalysis fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
SegmentAnalysis fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result, fromCacheResult);
}
use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.
the class SegmentAnalyzerTest method getSegmentAnalysises.
/**
* *Awesome* method name auto-generated by IntelliJ! I love IntelliJ!
*
* @param index
* @return
*/
private List<SegmentAnalysis> getSegmentAnalysises(Segment index, EnumSet<SegmentMetadataQuery.AnalysisType> analyses) {
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner((QueryRunnerFactory) new SegmentMetadataQueryRunnerFactory(new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()), QueryRunnerTestHelper.NOOP_QUERYWATCHER), index, null);
final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("test"), new LegacySegmentSpec("2011/2012"), null, null, null, analyses, false, false);
return runner.run(QueryPlus.wrap(query)).toList();
}
Aggregations