Search in sources :

Example 16 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class DumpSegment method runMetadata.

private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
    final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    withOutputStream(new Function<OutputStream, Object>() {

        @Override
        public Object apply(final OutputStream out) {
            evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {

                @Override
                public Object apply(SegmentAnalysis analysis) {
                    try {
                        objectMapper.writeValue(out, analysis);
                    } catch (IOException e) {
                        throw Throwables.propagate(e);
                    }
                    return null;
                }
            }));
            return null;
        }
    });
}
Also used : ListColumnIncluderator(io.druid.query.metadata.metadata.ListColumnIncluderator) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Json(io.druid.guice.annotations.Json) IOException(java.io.IOException) TableDataSource(io.druid.query.TableDataSource) SpecificSegmentSpec(io.druid.query.spec.SpecificSegmentSpec) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentDescriptor(io.druid.query.SegmentDescriptor) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 17 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class QueryHostFinderTest method testFindServer.

@Test
public void testFindServer() throws Exception {
    EasyMock.expect(brokerSelector.select(EasyMock.<Query>anyObject())).andReturn(new Pair("hotBroker", selector));
    EasyMock.replay(brokerSelector);
    EasyMock.expect(selector.pick()).andReturn(server).once();
    EasyMock.replay(selector);
    QueryHostFinder queryRunner = new QueryHostFinder(brokerSelector);
    Server server = queryRunner.findServer(new TimeBoundaryQuery(new TableDataSource("test"), new MultipleIntervalSegmentSpec(Arrays.<Interval>asList(new Interval("2011-08-31/2011-09-01"))), null, null, null));
    Assert.assertEquals("foo", server.getHost());
}
Also used : DruidServer(io.druid.client.DruidServer) Server(io.druid.client.selector.Server) TableDataSource(io.druid.query.TableDataSource) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) Pair(io.druid.java.util.common.Pair) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 18 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class ServerManager method getQueryRunnerForIntervals.

@Override
public <T> QueryRunner<T> getQueryRunnerForIntervals(Query<T> query, Iterable<Interval> intervals) {
    final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
    if (factory == null) {
        throw new ISE("Unknown query type[%s].", query.getClass());
    }
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = getBuilderFn(toolChest);
    final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
    DataSource dataSource = query.getDataSource();
    if (!(dataSource instanceof TableDataSource)) {
        throw new UnsupportedOperationException("data source type '" + dataSource.getClass().getName() + "' unsupported");
    }
    String dataSourceName = getDataSourceName(dataSource);
    final VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline = dataSources.get(dataSourceName);
    if (timeline == null) {
        return new NoopQueryRunner<T>();
    }
    FunctionalIterable<QueryRunner<T>> queryRunners = FunctionalIterable.create(intervals).transformCat(new Function<Interval, Iterable<TimelineObjectHolder<String, ReferenceCountingSegment>>>() {

        @Override
        public Iterable<TimelineObjectHolder<String, ReferenceCountingSegment>> apply(Interval input) {
            return timeline.lookup(input);
        }
    }).transformCat(new Function<TimelineObjectHolder<String, ReferenceCountingSegment>, Iterable<QueryRunner<T>>>() {

        @Override
        public Iterable<QueryRunner<T>> apply(@Nullable final TimelineObjectHolder<String, ReferenceCountingSegment> holder) {
            if (holder == null) {
                return null;
            }
            return FunctionalIterable.create(holder.getObject()).transform(new Function<PartitionChunk<ReferenceCountingSegment>, QueryRunner<T>>() {

                @Override
                public QueryRunner<T> apply(PartitionChunk<ReferenceCountingSegment> input) {
                    return buildAndDecorateQueryRunner(factory, toolChest, input.getObject(), new SegmentDescriptor(holder.getInterval(), holder.getVersion(), input.getChunkNumber()), builderFn, cpuTimeAccumulator);
                }
            });
        }
    });
    return CPUTimeMetricQueryRunner.safeBuild(new FinalizeResultsQueryRunner<T>(toolChest.mergeResults(factory.mergeRunners(exec, queryRunners)), toolChest), builderFn, emitter, cpuTimeAccumulator, true);
}
Also used : ReferenceCountingSegment(io.druid.segment.ReferenceCountingSegment) Query(io.druid.query.Query) FunctionalIterable(io.druid.java.util.common.guava.FunctionalIterable) Function(com.google.common.base.Function) NoopQueryRunner(io.druid.query.NoopQueryRunner) SegmentDescriptor(io.druid.query.SegmentDescriptor) ISE(io.druid.java.util.common.ISE) PartitionChunk(io.druid.timeline.partition.PartitionChunk) MetricsEmittingQueryRunner(io.druid.query.MetricsEmittingQueryRunner) ReportTimelineMissingSegmentQueryRunner(io.druid.query.ReportTimelineMissingSegmentQueryRunner) BySegmentQueryRunner(io.druid.query.BySegmentQueryRunner) SpecificSegmentQueryRunner(io.druid.query.spec.SpecificSegmentQueryRunner) ReferenceCountingSegmentQueryRunner(io.druid.query.ReferenceCountingSegmentQueryRunner) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) CPUTimeMetricQueryRunner(io.druid.query.CPUTimeMetricQueryRunner) NoopQueryRunner(io.druid.query.NoopQueryRunner) CachingQueryRunner(io.druid.client.CachingQueryRunner) QueryRunner(io.druid.query.QueryRunner) TableDataSource(io.druid.query.TableDataSource) DataSource(io.druid.query.DataSource) AtomicLong(java.util.concurrent.atomic.AtomicLong) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) TableDataSource(io.druid.query.TableDataSource) Interval(org.joda.time.Interval)

Example 19 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class ClientInfoResource method getDatasource.

@GET
@Path("/{dataSourceName}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Map<String, Object> getDatasource(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval, @QueryParam("full") String full) {
    if (full == null) {
        return ImmutableMap.<String, Object>of(KEY_DIMENSIONS, getDatasourceDimensions(dataSourceName, interval), KEY_METRICS, getDatasourceMetrics(dataSourceName, interval));
    }
    Interval theInterval;
    if (interval == null || interval.isEmpty()) {
        DateTime now = getCurrentTime();
        theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now);
    } else {
        theInterval = new Interval(interval);
    }
    TimelineLookup<String, ServerSelector> timeline = timelineServerView.getTimeline(new TableDataSource(dataSourceName));
    Iterable<TimelineObjectHolder<String, ServerSelector>> serversLookup = timeline != null ? timeline.lookup(theInterval) : null;
    if (serversLookup == null || Iterables.isEmpty(serversLookup)) {
        return Collections.EMPTY_MAP;
    }
    Map<Interval, Object> servedIntervals = new TreeMap<>(new Comparator<Interval>() {

        @Override
        public int compare(Interval o1, Interval o2) {
            if (o1.equals(o2) || o1.overlaps(o2)) {
                return 0;
            } else {
                return o1.isBefore(o2) ? -1 : 1;
            }
        }
    });
    for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) {
        final Set<Object> dimensions = Sets.newHashSet();
        final Set<Object> metrics = Sets.newHashSet();
        final PartitionHolder<ServerSelector> partitionHolder = holder.getObject();
        if (partitionHolder.isComplete()) {
            for (ServerSelector server : partitionHolder.payloads()) {
                final DataSegment segment = server.getSegment();
                dimensions.addAll(segment.getDimensions());
                metrics.addAll(segment.getMetrics());
            }
        }
        servedIntervals.put(holder.getInterval(), ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics));
    }
    //collapse intervals if they abut and have same set of columns
    Map<String, Object> result = Maps.newLinkedHashMap();
    Interval curr = null;
    Map<String, Set<String>> cols = null;
    for (Map.Entry<Interval, Object> e : servedIntervals.entrySet()) {
        Interval ival = e.getKey();
        if (curr != null && curr.abuts(ival) && cols.equals(e.getValue())) {
            curr = curr.withEnd(ival.getEnd());
        } else {
            if (curr != null) {
                result.put(curr.toString(), cols);
            }
            curr = ival;
            cols = (Map<String, Set<String>>) e.getValue();
        }
    }
    //add the last one in
    if (curr != null) {
        result.put(curr.toString(), cols);
    }
    return result;
}
Also used : Set(java.util.Set) TreeMap(java.util.TreeMap) DataSegment(io.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) ServerSelector(io.druid.client.selector.ServerSelector) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) TableDataSource(io.druid.query.TableDataSource) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 20 with TableDataSource

use of io.druid.query.TableDataSource in project druid by druid-io.

the class BrokerServerViewTest method testMultipleServerAddedRemovedSegment.

@Test
public void testMultipleServerAddedRemovedSegment() throws Exception {
    segmentViewInitLatch = new CountDownLatch(1);
    segmentAddedLatch = new CountDownLatch(5);
    // temporarily set latch count to 1
    segmentRemovedLatch = new CountDownLatch(1);
    setupViews();
    final List<DruidServer> druidServers = Lists.transform(ImmutableList.<String>of("locahost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), new Function<String, DruidServer>() {

        @Override
        public DruidServer apply(String input) {
            return new DruidServer(input, input, 10000000L, "historical", "default_tier", 0);
        }
    });
    for (DruidServer druidServer : druidServers) {
        setupZNodeForServer(druidServer, zkPathsConfig, jsonMapper);
    }
    final List<DataSegment> segments = Lists.transform(ImmutableList.<Pair<String, String>>of(Pair.of("2011-04-01/2011-04-03", "v1"), Pair.of("2011-04-03/2011-04-06", "v1"), Pair.of("2011-04-01/2011-04-09", "v2"), Pair.of("2011-04-06/2011-04-09", "v3"), Pair.of("2011-04-01/2011-04-02", "v3")), new Function<Pair<String, String>, DataSegment>() {

        @Override
        public DataSegment apply(Pair<String, String> input) {
            return dataSegmentWithIntervalAndVersion(input.lhs, input.rhs);
        }
    });
    for (int i = 0; i < 5; ++i) {
        announceSegmentForServer(druidServers.get(i), segments.get(i), zkPathsConfig, jsonMapper);
    }
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentViewInitLatch));
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentAddedLatch));
    TimelineLookup timeline = brokerServerView.getTimeline(new TableDataSource("test_broker_server_view"));
    assertValues(Arrays.asList(createExpected("2011-04-01/2011-04-02", "v3", druidServers.get(4), segments.get(4)), createExpected("2011-04-02/2011-04-06", "v2", druidServers.get(2), segments.get(2)), createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3))), (List<TimelineObjectHolder>) timeline.lookup(new Interval("2011-04-01/2011-04-09")));
    // unannounce the segment created by dataSegmentWithIntervalAndVersion("2011-04-01/2011-04-09", "v2")
    unannounceSegmentForServer(druidServers.get(2), segments.get(2), zkPathsConfig);
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentRemovedLatch));
    // renew segmentRemovedLatch since we still have 4 segments to unannounce
    segmentRemovedLatch = new CountDownLatch(4);
    timeline = brokerServerView.getTimeline(new TableDataSource("test_broker_server_view"));
    assertValues(Arrays.asList(createExpected("2011-04-01/2011-04-02", "v3", druidServers.get(4), segments.get(4)), createExpected("2011-04-02/2011-04-03", "v1", druidServers.get(0), segments.get(0)), createExpected("2011-04-03/2011-04-06", "v1", druidServers.get(1), segments.get(1)), createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3))), (List<TimelineObjectHolder>) timeline.lookup(new Interval("2011-04-01/2011-04-09")));
    // unannounce all the segments
    for (int i = 0; i < 5; ++i) {
        // skip the one that was previously unannounced
        if (i != 2) {
            unannounceSegmentForServer(druidServers.get(i), segments.get(i), zkPathsConfig);
        }
    }
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentRemovedLatch));
    Assert.assertEquals(0, ((List<TimelineObjectHolder>) timeline.lookup(new Interval("2011-04-01/2011-04-09"))).size());
}
Also used : CountDownLatch(java.util.concurrent.CountDownLatch) DataSegment(io.druid.timeline.DataSegment) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) TableDataSource(io.druid.query.TableDataSource) TimelineLookup(io.druid.timeline.TimelineLookup) Pair(io.druid.java.util.common.Pair) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

TableDataSource (io.druid.query.TableDataSource)25 Interval (org.joda.time.Interval)18 Test (org.junit.Test)17 Result (io.druid.query.Result)8 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 TimelineObjectHolder (io.druid.timeline.TimelineObjectHolder)7 DateTime (org.joda.time.DateTime)7 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)6 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)5 DataSegment (io.druid.timeline.DataSegment)5 Pair (io.druid.java.util.common.Pair)4 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)4 TimelineLookup (io.druid.timeline.TimelineLookup)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 Function (com.google.common.base.Function)3 ImmutableMap (com.google.common.collect.ImmutableMap)3 Query (io.druid.query.Query)3 QueryRunner (io.druid.query.QueryRunner)3 SegmentDescriptor (io.druid.query.SegmentDescriptor)3 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)3