Search in sources :

Example 21 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class TiersResource method getTierDatasources.

@GET
@Path("/{tierName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTierDatasources(@PathParam("tierName") String tierName, @QueryParam("simple") String simple) {
    if (simple != null) {
        Table<String, Interval, Map<String, Object>> retVal = HashBasedTable.create();
        for (DruidServer druidServer : serverInventoryView.getInventory()) {
            if (druidServer.getTier().equalsIgnoreCase(tierName)) {
                for (DataSegment dataSegment : druidServer.getSegments().values()) {
                    Map<String, Object> properties = retVal.get(dataSegment.getDataSource(), dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        retVal.put(dataSegment.getDataSource(), dataSegment.getInterval(), properties);
                    }
                    properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                    properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                }
            }
        }
        return Response.ok(retVal.rowMap()).build();
    }
    Set<String> retVal = Sets.newHashSet();
    for (DruidServer druidServer : serverInventoryView.getInventory()) {
        if (druidServer.getTier().equalsIgnoreCase(tierName)) {
            retVal.addAll(Lists.newArrayList(Iterables.transform(druidServer.getDataSources(), new Function<DruidDataSource, String>() {

                @Override
                public String apply(DruidDataSource input) {
                    return input.getName();
                }
            })));
        }
    }
    return Response.ok(retVal).build();
}
Also used : DruidServer(io.druid.client.DruidServer) Map(java.util.Map) DataSegment(io.druid.timeline.DataSegment) DruidDataSource(io.druid.client.DruidDataSource) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 22 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class DruidCoordinatorSegmentKiller method run.

@Override
public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) {
    boolean killAllDataSources = params.getCoordinatorDynamicConfig().isKillAllDataSources();
    Collection<String> whitelist = params.getCoordinatorDynamicConfig().getKillDataSourceWhitelist();
    if (killAllDataSources && whitelist != null && !whitelist.isEmpty()) {
        log.error("killAllDataSources can't be true when killDataSourceWhitelist is non-empty, No kill tasks are scheduled.");
        return params;
    }
    if (killAllDataSources) {
        whitelist = segmentManager.getAllDatasourceNames();
    }
    if (whitelist != null && whitelist.size() > 0 && (lastKillTime + period) < System.currentTimeMillis()) {
        lastKillTime = System.currentTimeMillis();
        for (String dataSource : whitelist) {
            final Interval intervalToKill = findIntervalForKillTask(dataSource, maxSegmentsToKill);
            if (intervalToKill != null) {
                try {
                    indexingServiceClient.killSegments(dataSource, intervalToKill);
                } catch (Exception ex) {
                    log.error(ex, "Failed to submit kill task for dataSource [%s]", dataSource);
                    if (Thread.currentThread().isInterrupted()) {
                        log.warn("skipping kill task scheduling because thread is interrupted.");
                        break;
                    }
                }
            }
        }
    }
    return params;
}
Also used : Interval(org.joda.time.Interval)

Example 23 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class CachingClusteredClientTest method testTopNOnPostAggMetricCaching.

@Test
public void testTopNOnPostAggMetricCaching() {
    final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("avg_imps_per_row_double").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
    testQueryCaching(runner, builder.build(), new Interval("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), new Interval("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), new Interval("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983), new Interval("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983));
    HashMap<String, List> context = new HashMap<String, List>();
    TestHelper.assertExpectedResults(makeTopNResultsWithoutRename(new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983), runner.run(builder.intervals("2011-01-01/2011-01-10").metric("avg_imps_per_row_double").aggregators(AGGS).postAggregators(DIFF_ORDER_POST_AGGS).build(), context));
}
Also used : TopNQueryBuilder(io.druid.query.topn.TopNQueryBuilder) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) TopNQueryConfig(io.druid.query.topn.TopNQueryConfig) HashMap(java.util.HashMap) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) DateTime(org.joda.time.DateTime) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 24 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class CachingClusteredClientTest method testDisableUseCache.

@Test
public void testDisableUseCache() throws Exception {
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
    testQueryCaching(runner, 1, true, builder.context(ImmutableMap.<String, Object>of("useCache", "false", "populateCache", "true")).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000));
    Assert.assertEquals(1, cache.getStats().getNumEntries());
    Assert.assertEquals(0, cache.getStats().getNumHits());
    Assert.assertEquals(0, cache.getStats().getNumMisses());
    cache.close("0_0");
    testQueryCaching(runner, 1, false, builder.context(ImmutableMap.<String, Object>of("useCache", "false", "populateCache", "false")).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000));
    Assert.assertEquals(0, cache.getStats().getNumEntries());
    Assert.assertEquals(0, cache.getStats().getNumHits());
    Assert.assertEquals(0, cache.getStats().getNumMisses());
    testQueryCaching(client, 1, false, builder.context(ImmutableMap.<String, Object>of("useCache", "true", "populateCache", "false")).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000));
    Assert.assertEquals(0, cache.getStats().getNumEntries());
    Assert.assertEquals(0, cache.getStats().getNumHits());
    Assert.assertEquals(1, cache.getStats().getNumMisses());
}
Also used : FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) Druids(io.druid.query.Druids) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) DateTime(org.joda.time.DateTime) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 25 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class CachingClusteredClientTest method testSelectCaching.

@Test
public void testSelectCaching() throws Exception {
    final Set<String> dimensions = Sets.<String>newHashSet("a");
    final Set<String> metrics = Sets.<String>newHashSet("rows");
    Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).dimensions(Arrays.asList("a")).metrics(Arrays.asList("rows")).pagingSpec(new PagingSpec(null, 3)).context(CONTEXT);
    testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), new Interval("2011-01-02/2011-01-03"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)));
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new SelectQueryQueryToolChest(jsonMapper, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier));
    HashMap<String, Object> context = new HashMap<String, Object>();
    TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
}
Also used : HashMap(java.util.HashMap) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) SelectQueryQueryToolChest(io.druid.query.select.SelectQueryQueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) PagingSpec(io.druid.query.select.PagingSpec) Druids(io.druid.query.Druids) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Aggregations

Interval (org.joda.time.Interval)555 Test (org.junit.Test)340 DateTime (org.joda.time.DateTime)200 DataSegment (io.druid.timeline.DataSegment)138 ArrayList (java.util.ArrayList)59 Map (java.util.Map)54 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)49 QueryRunner (io.druid.query.QueryRunner)47 List (java.util.List)47 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)45 Result (io.druid.query.Result)44 File (java.io.File)42 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)41 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)40 HashMap (java.util.HashMap)37 IOException (java.io.IOException)35 Period (org.joda.time.Period)30 Test (org.testng.annotations.Test)29 DruidServer (io.druid.client.DruidServer)27 Sequence (io.druid.java.util.common.guava.Sequence)26