Search in sources :

Example 26 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class ScanQuerySpecTest method testSerializationWithTimeOrder.

@Test
public void testSerializationWithTimeOrder() throws Exception {
    String originalJson = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"order\":\"ascending\"," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
    ScanQuery expectedQuery = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, ScanQuery.Order.ASCENDING, null, null, Arrays.asList("market", "quality", "index", "__time"), null, null);
    String serializedJson = JSON_MAPPER.writeValueAsString(expectedQuery);
    Assert.assertEquals(originalJson, serializedJson);
    Assert.assertEquals(expectedQuery, JSON_MAPPER.readValue(originalJson, ScanQuery.class));
    Assert.assertEquals(ScanQuery.Order.ASCENDING, expectedQuery.getTimeOrder());
    Assert.assertEquals(Collections.singletonList(new ScanQuery.OrderBy("__time", ScanQuery.Order.ASCENDING)), expectedQuery.getOrderBys());
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Example 27 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class SegmentMetadataQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dummy"), new LegacySegmentSpec("2015-01-01/2015-01-02"), null, null, null, null, false, false);
    CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(query);
    // Test cache key generation
    byte[] expectedKey = { 0x04, 0x09, 0x01, 0x0A, 0x00, 0x00, 0x00, 0x03, 0x00, 0x02, 0x04 };
    byte[] actualKey = strategy.computeCacheKey(query);
    Assert.assertArrayEquals(expectedKey, actualKey);
    SegmentAnalysis result = new SegmentAnalysis("testSegment", ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.name(), true, false, 10881, 1, "preferred", "preferred", null)), 71982, 100, null, null, null, null);
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    SegmentAnalysis fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    SegmentAnalysis fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 28 with LegacySegmentSpec

use of org.apache.druid.query.spec.LegacySegmentSpec in project druid by druid-io.

the class SegmentAnalyzerTest method getSegmentAnalysises.

/**
 * *Awesome* method name auto-generated by IntelliJ!  I love IntelliJ!
 *
 * @param index
 * @return
 */
private List<SegmentAnalysis> getSegmentAnalysises(Segment index, EnumSet<SegmentMetadataQuery.AnalysisType> analyses) {
    final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner((QueryRunnerFactory) new SegmentMetadataQueryRunnerFactory(new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()), QueryRunnerTestHelper.NOOP_QUERYWATCHER), index, null);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("test"), new LegacySegmentSpec("2011/2012"), null, null, null, analyses, false, false);
    return runner.run(QueryPlus.wrap(query)).toList();
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) QueryRunner(org.apache.druid.query.QueryRunner) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec)

Aggregations

LegacySegmentSpec (org.apache.druid.query.spec.LegacySegmentSpec)28 Test (org.junit.Test)27 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)21 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)21 QueryableIndexSegment (org.apache.druid.segment.QueryableIndexSegment)21 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)20 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)20 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)20 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)20 ResultRow (org.apache.druid.query.groupby.ResultRow)18 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)15 TableDataSource (org.apache.druid.query.TableDataSource)6 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)3 Query (org.apache.druid.query.Query)2 QueryRunner (org.apache.druid.query.QueryRunner)2 InDimFilter (org.apache.druid.query.filter.InDimFilter)2 SegmentMetadataQuery (org.apache.druid.query.metadata.metadata.SegmentMetadataQuery)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 IOException (java.io.IOException)1 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)1