Search in sources :

Example 76 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class TimeBoundaryQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> strategy = new TimeBoundaryQueryQueryToolChest().getCacheStrategy(new TimeBoundaryQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, null, null));
    final Result<TimeBoundaryResultValue> result = new Result<>(DateTimes.utc(123L), new TimeBoundaryResultValue(ImmutableMap.of(TimeBoundaryQuery.MIN_TIME, DateTimes.EPOCH.toString(), TimeBoundaryQuery.MAX_TIME, DateTimes.of("2015-01-01").toString())));
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<TimeBoundaryResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Example 77 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class SegmentManager method getIndexedTables.

/**
 * Returns the collection of {@link IndexedTable} for the entire timeline (since join conditions do not currently
 * consider the queries intervals), if the timeline exists for each of its segments that are joinable.
 */
public Optional<Stream<ReferenceCountingIndexedTable>> getIndexedTables(DataSourceAnalysis analysis) {
    return getTimeline(analysis).map(timeline -> {
        // join doesn't currently consider intervals, so just consider all segments
        final Stream<ReferenceCountingSegment> segments = timeline.lookup(Intervals.ETERNITY).stream().flatMap(x -> StreamSupport.stream(x.getObject().payloads().spliterator(), false));
        final TableDataSource tableDataSource = getTableDataSource(analysis);
        ConcurrentHashMap<SegmentId, ReferenceCountingIndexedTable> tables = Optional.ofNullable(dataSources.get(tableDataSource.getName())).map(DataSourceState::getTablesLookup).orElseThrow(() -> new ISE("Datasource %s does not have IndexedTables", tableDataSource.getName()));
        return segments.map(segment -> tables.get(segment.getId())).filter(Objects::nonNull);
    });
}
Also used : ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) ReferenceCountingIndexedTable(org.apache.druid.segment.join.table.ReferenceCountingIndexedTable) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) Intervals(org.apache.druid.java.util.common.Intervals) Inject(com.google.inject.Inject) SegmentLazyLoadFailCallback(org.apache.druid.segment.SegmentLazyLoadFailCallback) SegmentLoadingException(org.apache.druid.segment.loading.SegmentLoadingException) CollectionUtils(org.apache.druid.utils.CollectionUtils) SegmentLoader(org.apache.druid.segment.loading.SegmentLoader) PartitionChunk(org.apache.druid.timeline.partition.PartitionChunk) SettableSupplier(org.apache.druid.common.guava.SettableSupplier) Map(java.util.Map) StreamSupport(java.util.stream.StreamSupport) ShardSpec(org.apache.druid.timeline.partition.ShardSpec) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) Closer(org.apache.druid.java.util.common.io.Closer) IndexedTable(org.apache.druid.segment.join.table.IndexedTable) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) IOException(java.io.IOException) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) TableDataSource(org.apache.druid.query.TableDataSource) Objects(java.util.Objects) Stream(java.util.stream.Stream) Ordering(com.google.common.collect.Ordering) DataSegment(org.apache.druid.timeline.DataSegment) Optional(java.util.Optional) VisibleForTesting(com.google.common.annotations.VisibleForTesting) SegmentId(org.apache.druid.timeline.SegmentId) ReferenceCountingIndexedTable(org.apache.druid.segment.join.table.ReferenceCountingIndexedTable) TableDataSource(org.apache.druid.query.TableDataSource) SegmentId(org.apache.druid.timeline.SegmentId) Objects(java.util.Objects) ISE(org.apache.druid.java.util.common.ISE)

Example 78 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class UnifiedIndexerAppenderatorsManager method getBundle.

@VisibleForTesting
<T> DatasourceBundle getBundle(final Query<T> query) {
    final DataSourceAnalysis analysis = DataSourceAnalysis.forDataSource(query.getDataSource());
    final TableDataSource table = analysis.getBaseTableDataSource().orElseThrow(() -> new ISE("Cannot handle datasource: %s", analysis.getDataSource()));
    final DatasourceBundle bundle;
    synchronized (this) {
        bundle = datasourceBundles.get(table.getName());
    }
    if (bundle == null) {
        throw new IAE("Could not find segment walker for datasource [%s]", table.getName());
    }
    return bundle;
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) ISE(org.apache.druid.java.util.common.ISE) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) IAE(org.apache.druid.java.util.common.IAE) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 79 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class SegmentMetadataQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dummy"), new LegacySegmentSpec("2015-01-01/2015-01-02"), null, null, null, null, false, false);
    CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(query);
    // Test cache key generation
    byte[] expectedKey = { 0x04, 0x09, 0x01, 0x0A, 0x00, 0x00, 0x00, 0x03, 0x00, 0x02, 0x04 };
    byte[] actualKey = strategy.computeCacheKey(query);
    Assert.assertArrayEquals(expectedKey, actualKey);
    SegmentAnalysis result = new SegmentAnalysis("testSegment", ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of("placement", new ColumnAnalysis(ColumnType.STRING, ValueType.STRING.name(), true, false, 10881, 1, "preferred", "preferred", null)), 71982, 100, null, null, null, null);
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    SegmentAnalysis fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    SegmentAnalysis fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 80 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ResultRowTest method testMapBasedRowWithNullValues.

@Test
public void testMapBasedRowWithNullValues() {
    GroupByQuery query = new GroupByQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), null, null, Granularities.ALL, ImmutableList.of(new DefaultDimensionSpec("dim1", "dim1"), new DefaultDimensionSpec("dim2", "dim2"), new DefaultDimensionSpec("dim3", "dim3")), ImmutableList.of(new CountAggregatorFactory("count")), null, null, null, null, null);
    final ResultRow row = ResultRow.of("1", "2", null);
    MapBasedRow mapBasedRow = row.toMapBasedRow(query);
    // Let's make sure values are there as expected
    Assert.assertEquals("1", mapBasedRow.getRaw("dim1"));
    Assert.assertEquals("2", mapBasedRow.getRaw("dim2"));
    Assert.assertNull(mapBasedRow.getRaw("dim3"));
    // Also, let's make sure that the dimension with null value is actually present in the map
    Assert.assertTrue(mapBasedRow.getEvent().containsKey("dim3"));
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Aggregations

TableDataSource (org.apache.druid.query.TableDataSource)118 Test (org.junit.Test)94 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)43 QueryDataSource (org.apache.druid.query.QueryDataSource)41 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)40 Parameters (junitparams.Parameters)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)19 LookupDataSource (org.apache.druid.query.LookupDataSource)18 DataSegment (org.apache.druid.timeline.DataSegment)15 Result (org.apache.druid.query.Result)14 CountDownLatch (java.util.concurrent.CountDownLatch)11 Query (org.apache.druid.query.Query)11 TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)11 Interval (org.joda.time.Interval)11 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)10 ArrayList (java.util.ArrayList)9 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)9 ISE (org.apache.druid.java.util.common.ISE)8 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)8