Search in sources :

Example 81 with DateTime

use of org.joda.time.DateTime in project druid by druid-io.

the class TimeBoundaryQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> strategy = new TimeBoundaryQueryQueryToolChest().getCacheStrategy(new TimeBoundaryQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, null, null));
    final Result<TimeBoundaryResultValue> result = new Result<>(new DateTime(123L), new TimeBoundaryResultValue(ImmutableMap.of(TimeBoundaryQuery.MIN_TIME, new DateTime(0L).toString(), TimeBoundaryQuery.MAX_TIME, new DateTime("2015-01-01").toString())));
    Object preparedValue = strategy.prepareForCache().apply(result);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<TimeBoundaryResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
    Assert.assertEquals(result, fromCacheResult);
}
Also used : MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) TableDataSource(io.druid.query.TableDataSource) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 82 with DateTime

use of org.joda.time.DateTime in project druid by druid-io.

the class TopNBinaryFnBenchmark method setUp.

@Override
protected void setUp() throws Exception {
    final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
    final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
    final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
    final List<AggregatorFactory> aggregatorFactories = new ArrayList<>();
    aggregatorFactories.add(new CountAggregatorFactory("rows"));
    aggregatorFactories.add(new LongSumAggregatorFactory("index", "index"));
    for (int i = 1; i < aggCount; i++) {
        aggregatorFactories.add(new CountAggregatorFactory("rows" + i));
    }
    final List<PostAggregator> postAggregators = new ArrayList<>();
    for (int i = 0; i < postAggCount; i++) {
        postAggregators.add(new ArithmeticPostAggregator("addrowsindexconstant" + i, "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)));
    }
    final DateTime currTime = new DateTime();
    List<Map<String, Object>> list = new ArrayList<>();
    for (int i = 0; i < threshold; i++) {
        Map<String, Object> res = new HashMap<>();
        res.put("testdim", "" + i);
        res.put("rows", 1L);
        for (int j = 0; j < aggCount; j++) {
            res.put("rows" + j, 1L);
        }
        res.put("index", 1L);
        list.add(res);
    }
    result1 = new Result<>(currTime, new TopNResultValue(list));
    List<Map<String, Object>> list2 = new ArrayList<>();
    for (int i = 0; i < threshold; i++) {
        Map<String, Object> res = new HashMap<>();
        res.put("testdim", "" + i);
        res.put("rows", 2L);
        for (int j = 0; j < aggCount; j++) {
            res.put("rows" + j, 2L);
        }
        res.put("index", 2L);
        list2.add(res);
    }
    result2 = new Result<>(currTime, new TopNResultValue(list2));
    fn = new TopNBinaryFn(TopNResultMerger.identity, Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 100, aggregatorFactories, postAggregators);
}
Also used : ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) PostAggregator(io.druid.query.aggregation.PostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) HashMap(java.util.HashMap) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DateTime(org.joda.time.DateTime) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) HashMap(java.util.HashMap) Map(java.util.Map)

Example 83 with DateTime

use of org.joda.time.DateTime in project druid by druid-io.

the class TopNQueryRunnerTest method testTopNDimExtractionToOne.

@Test
public void testTopNDimExtractionToOne() throws IOException {
    TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, new JavaScriptExtractionFn("function(f) { return \"POTATO\"; }", false, JavaScriptConfig.getEnabledInstance()))).metric("rows").threshold(10).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
    Granularity gran = Granularities.DAY;
    TimeseriesQuery tsQuery = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(gran).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum, QueryRunnerTestHelper.qualityUniques)).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
    List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of("addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, QueryRunnerTestHelper.marketDimension, "POTATO", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 1209L)))));
    List<Result<TopNResultValue>> list = Sequences.toList(runWithMerge(query), new ArrayList<Result<TopNResultValue>>());
    Assert.assertEquals(list.size(), 1);
    Assert.assertEquals("Didn't merge results", list.get(0).getValue().getValue().size(), 1);
    TestHelper.assertExpectedResults(expectedResults, list, "Failed to match");
}
Also used : HyperUniqueFinalizingPostAggregator(io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator) PostAggregator(io.druid.query.aggregation.PostAggregator) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) JavaScriptExtractionFn(io.druid.query.extraction.JavaScriptExtractionFn) Granularity(io.druid.java.util.common.granularity.Granularity) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) ExtractionDimensionSpec(io.druid.query.dimension.ExtractionDimensionSpec) Test(org.junit.Test)

Example 84 with DateTime

use of org.joda.time.DateTime in project druid by druid-io.

the class TopNQueryRunnerTest method testNumericDimensionTopNWithNullPreviousStop.

@Test
public void testNumericDimensionTopNWithNullPreviousStop() {
    TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(Granularities.ALL).dimension(QueryRunnerTestHelper.marketDimension).metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).threshold(2).intervals(QueryRunnerTestHelper.secondOnly).aggregators(duplicateAggregators(QueryRunnerTestHelper.rowsCount, new CountAggregatorFactory("rows1"))).build();
    List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-02T00:00:00.000Z"), new TopNResultValue(withDuplicateResults(Arrays.asList(ImmutableMap.<String, Object>of("market", "spot", "rows", 9L), ImmutableMap.<String, Object>of("market", "total_market", "rows", 2L)), "rows", "rows1"))));
    TestHelper.assertExpectedResults(expectedResults, runner.run(query, new HashMap<String, Object>()));
}
Also used : CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) Test(org.junit.Test)

Example 85 with DateTime

use of org.joda.time.DateTime in project druid by druid-io.

the class TopNQueryRunnerTest method testTopNWithExtractionFilter.

@Test
public void testTopNWithExtractionFilter() {
    Map<String, String> extractionMap = new HashMap<>();
    extractionMap.put("spot", "spot0");
    MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
    TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(QueryRunnerTestHelper.marketDimension).metric("rows").threshold(3).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).filters(new ExtractionDimFilter(QueryRunnerTestHelper.marketDimension, "spot0", lookupExtractionFn, null)).build();
    List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.8768157958984D, "addRowsIndexConstant", 2250.8768157958984D, "uniques", QueryRunnerTestHelper.UNIQUES_9)))));
    assertExpectedResults(expectedResults, query);
    // Assert the optimization path as well
    final Sequence<Result<TopNResultValue>> retval = runWithPreMergeAndMerge(query);
    TestHelper.assertExpectedResults(expectedResults, retval);
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ExtractionDimFilter(io.druid.query.filter.ExtractionDimFilter) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) LookupExtractionFn(io.druid.query.lookup.LookupExtractionFn) MapLookupExtractor(io.druid.query.extraction.MapLookupExtractor) Test(org.junit.Test)

Aggregations

DateTime (org.joda.time.DateTime)5683 Test (org.junit.Test)1667 Test (org.testng.annotations.Test)774 ArrayList (java.util.ArrayList)545 Date (java.util.Date)389 LocalDate (org.joda.time.LocalDate)374 DateTimeRfc1123 (com.microsoft.rest.DateTimeRfc1123)353 ResponseBody (okhttp3.ResponseBody)335 Interval (org.joda.time.Interval)307 Test (org.junit.jupiter.api.Test)281 HashMap (java.util.HashMap)272 BigDecimal (java.math.BigDecimal)234 DateTimeZone (org.joda.time.DateTimeZone)220 UUID (java.util.UUID)195 List (java.util.List)194 DateTimeFormatter (org.joda.time.format.DateTimeFormatter)178 Map (java.util.Map)171 IOException (java.io.IOException)158 Result (io.druid.query.Result)153 ServiceCall (com.microsoft.rest.ServiceCall)148