Search in sources :

Example 36 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ScanQuerySpecTest method testSerializationLegacyString.

@Test
public void testSerializationLegacyString() throws Exception {
    ScanQuery query = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 1, 3, ScanQuery.Order.NONE, null, null, Arrays.asList("market", "quality", "index"), null, null);
    final String serialized = JSON_MAPPER.writeValueAsString(query);
    final ScanQuery deserialized = (ScanQuery) JSON_MAPPER.readValue(serialized, Query.class);
    Assert.assertEquals(query, deserialized);
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) Query(org.apache.druid.query.Query) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Example 37 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ScanQuerySpecTest method testSerializationWithOrderBy.

@Test
public void testSerializationWithOrderBy() throws Exception {
    String originalJson = "{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"}," + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]}," + "\"virtualColumns\":[]," + "\"resultFormat\":\"list\"," + "\"batchSize\":20480," + "\"limit\":3," + "\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}]," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," + "\"context\":null," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}";
    ScanQuery expectedQuery = new ScanQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, 0, 0, 3, null, Collections.singletonList(new ScanQuery.OrderBy("quality", ScanQuery.Order.ASCENDING)), null, Arrays.asList("market", "quality", "index", "__time"), null, null);
    String serializedJson = JSON_MAPPER.writeValueAsString(expectedQuery);
    Assert.assertEquals(originalJson, serializedJson);
    Assert.assertEquals(expectedQuery, JSON_MAPPER.readValue(originalJson, ScanQuery.class));
    Assert.assertEquals(ScanQuery.Order.NONE, expectedQuery.getTimeOrder());
    Assert.assertEquals(Collections.singletonList(new ScanQuery.OrderBy("quality", ScanQuery.Order.ASCENDING)), expectedQuery.getOrderBys());
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) LegacySegmentSpec(org.apache.druid.query.spec.LegacySegmentSpec) Test(org.junit.Test)

Example 38 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class SearchQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> strategy = new SearchQueryQueryToolChest(null, null).getCacheStrategy(new SearchQuery(new TableDataSource("dummy"), null, Granularities.ALL, 1, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, null));
    final Result<SearchResultValue> result = new Result<>(DateTimes.utc(123L), new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))));
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<SearchResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) Result(org.apache.druid.query.Result) TableDataSource(org.apache.druid.query.TableDataSource) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 39 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ClientInfoResource method getDatasource.

@GET
@Path("/{dataSourceName}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Map<String, Object> getDatasource(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval, @QueryParam("full") String full) {
    if (full == null) {
        return ImmutableMap.of(KEY_DIMENSIONS, getDataSourceDimensions(dataSourceName, interval), KEY_METRICS, getDataSourceMetrics(dataSourceName, interval));
    }
    Interval theInterval;
    if (interval == null || interval.isEmpty()) {
        DateTime now = getCurrentTime();
        theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now);
    } else {
        theInterval = Intervals.of(interval);
    }
    final Optional<? extends TimelineLookup<String, ServerSelector>> maybeTimeline = timelineServerView.getTimeline(DataSourceAnalysis.forDataSource(new TableDataSource(dataSourceName)));
    final Optional<Iterable<TimelineObjectHolder<String, ServerSelector>>> maybeServersLookup = maybeTimeline.map(timeline -> timeline.lookup(theInterval));
    if (!maybeServersLookup.isPresent() || Iterables.isEmpty(maybeServersLookup.get())) {
        return Collections.emptyMap();
    }
    Map<Interval, Object> servedIntervals = new TreeMap<>(new Comparator<Interval>() {

        @Override
        public int compare(Interval o1, Interval o2) {
            if (o1.equals(o2) || o1.overlaps(o2)) {
                return 0;
            } else {
                return o1.isBefore(o2) ? -1 : 1;
            }
        }
    });
    for (TimelineObjectHolder<String, ServerSelector> holder : maybeServersLookup.get()) {
        final Set<Object> dimensions = new HashSet<>();
        final Set<Object> metrics = new HashSet<>();
        final PartitionHolder<ServerSelector> partitionHolder = holder.getObject();
        if (partitionHolder.isComplete()) {
            for (ServerSelector server : partitionHolder.payloads()) {
                final DataSegment segment = server.getSegment();
                dimensions.addAll(segment.getDimensions());
                metrics.addAll(segment.getMetrics());
            }
        }
        servedIntervals.put(holder.getInterval(), ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics));
    }
    // collapse intervals if they abut and have same set of columns
    Map<String, Object> result = Maps.newLinkedHashMap();
    Interval curr = null;
    Map<String, Set<String>> cols = null;
    for (Map.Entry<Interval, Object> e : servedIntervals.entrySet()) {
        Interval ival = e.getKey();
        if (curr != null && curr.abuts(ival) && cols.equals(e.getValue())) {
            curr = curr.withEnd(ival.getEnd());
        } else {
            if (curr != null) {
                result.put(curr.toString(), cols);
            }
            curr = ival;
            cols = (Map<String, Set<String>>) e.getValue();
        }
    }
    // add the last one in
    if (curr != null) {
        result.put(curr.toString(), cols);
    }
    return result;
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) TreeMap(java.util.TreeMap) DataSegment(org.apache.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) ServerSelector(org.apache.druid.client.selector.ServerSelector) TableDataSource(org.apache.druid.query.TableDataSource) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) TreeMap(java.util.TreeMap) Interval(org.joda.time.Interval) HashSet(java.util.HashSet) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 40 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class DumpSegment method runMetadata.

private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
    final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    withOutputStream(new Function<OutputStream, Object>() {

        @Override
        public Object apply(final OutputStream out) {
            evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {

                @Override
                public Object apply(SegmentAnalysis analysis) {
                    try {
                        objectMapper.writeValue(out, analysis);
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                    return null;
                }
            }));
            return null;
        }
    });
}
Also used : ListColumnIncluderator(org.apache.druid.query.metadata.metadata.ListColumnIncluderator) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Json(org.apache.druid.guice.annotations.Json) IOException(java.io.IOException) TableDataSource(org.apache.druid.query.TableDataSource) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

TableDataSource (org.apache.druid.query.TableDataSource)118 Test (org.junit.Test)94 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)43 QueryDataSource (org.apache.druid.query.QueryDataSource)41 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)40 Parameters (junitparams.Parameters)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)19 LookupDataSource (org.apache.druid.query.LookupDataSource)18 DataSegment (org.apache.druid.timeline.DataSegment)15 Result (org.apache.druid.query.Result)14 CountDownLatch (java.util.concurrent.CountDownLatch)11 Query (org.apache.druid.query.Query)11 TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)11 Interval (org.joda.time.Interval)11 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)10 ArrayList (java.util.ArrayList)9 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)9 ISE (org.apache.druid.java.util.common.ISE)8 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)8