Search in sources :

Example 6 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class CloudFilesDataSegmentPusherTest method testPush.

@Test
public void testPush() throws Exception {
    ObjectApi objectApi = EasyMock.createStrictMock(ObjectApi.class);
    EasyMock.expect(objectApi.put(EasyMock.anyString(), EasyMock.<Payload>anyObject())).andReturn(null).atLeastOnce();
    EasyMock.replay(objectApi);
    CloudFilesApi api = EasyMock.createStrictMock(CloudFilesApi.class);
    EasyMock.expect(api.getObjectApi(EasyMock.anyString(), EasyMock.anyString())).andReturn(objectApi).atLeastOnce();
    EasyMock.replay(api);
    CloudFilesDataSegmentPusherConfig config = new CloudFilesDataSegmentPusherConfig();
    config.setRegion("region");
    config.setContainer("container");
    config.setBasePath("basePath");
    CloudFilesDataSegmentPusher pusher = new CloudFilesDataSegmentPusher(api, config, new DefaultObjectMapper());
    // Create a mock segment on disk
    File tmp = tempFolder.newFile("version.bin");
    final byte[] data = new byte[] { 0x0, 0x0, 0x0, 0x1 };
    Files.write(data, tmp);
    final long size = data.length;
    DataSegment segmentToPush = new DataSegment("foo", new Interval("2015/2016"), "0", Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, size);
    DataSegment segment = pusher.push(tempFolder.getRoot(), segmentToPush);
    Assert.assertEquals(segmentToPush.getSize(), segment.getSize());
    EasyMock.verify(api);
}
Also used : ObjectApi(org.jclouds.openstack.swift.v1.features.ObjectApi) Payload(org.jclouds.io.Payload) CloudFilesApi(org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) File(java.io.File) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 7 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testFilterByNull.

@Test
public void testFilterByNull() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "hi")));
    index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "bo")));
    GroupByQueryEngine engine = makeGroupByQueryEngine();
    final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(0, new DateTime().getMillis())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).setDimFilter(DimFilters.dimEquals("sally", (String) null)).build(), new IncrementalIndexStorageAdapter(index));
    final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());
    Assert.assertEquals(1, results.size());
    MapBasedRow row = (MapBasedRow) results.get(0);
    Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
}
Also used : LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DateTime(org.joda.time.DateTime) MapBasedRow(io.druid.data.input.MapBasedRow) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Row(io.druid.data.input.Row) MapBasedRow(io.druid.data.input.MapBasedRow) GroupByQueryEngine(io.druid.query.groupby.GroupByQueryEngine) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 8 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testSingleValueTopN.

@Test
public void testSingleValueTopN() throws IOException {
    IncrementalIndex index = indexCreator.createIndex();
    DateTime t = DateTime.now();
    index.add(new MapBasedInputRow(t.minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "bo")));
    TopNQueryEngine engine = new TopNQueryEngine(new StupidPool<ByteBuffer>("TopNQueryEngine-bufferPool", new Supplier<ByteBuffer>() {

        @Override
        public ByteBuffer get() {
            return ByteBuffer.allocate(50000);
        }
    }));
    final Iterable<Result<TopNResultValue>> results = Sequences.toList(engine.query(new TopNQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Lists.newArrayList(new Interval(0, new DateTime().getMillis()))).dimension("sally").metric("cnt").threshold(10).aggregators(Lists.<AggregatorFactory>newArrayList(new LongSumAggregatorFactory("cnt", "cnt"))).build(), new IncrementalIndexStorageAdapter(index)), Lists.<Result<TopNResultValue>>newLinkedList());
    Assert.assertEquals(1, Iterables.size(results));
    Assert.assertEquals(1, results.iterator().next().getValue().getValue().size());
}
Also used : TopNQueryBuilder(io.druid.query.topn.TopNQueryBuilder) TopNResultValue(io.druid.query.topn.TopNResultValue) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) JavaScriptAggregatorFactory(io.druid.query.aggregation.JavaScriptAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) ByteBuffer(java.nio.ByteBuffer) DateTime(org.joda.time.DateTime) TopNQueryEngine(io.druid.query.topn.TopNQueryEngine) Result(io.druid.query.Result) Supplier(com.google.common.base.Supplier) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 9 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testCursoringAndIndexUpdationInterleaving.

@Test
public void testCursoringAndIndexUpdationInterleaving() throws Exception {
    final IncrementalIndex index = indexCreator.createIndex();
    final long timestamp = System.currentTimeMillis();
    for (int i = 0; i < 2; i++) {
        index.add(new MapBasedInputRow(timestamp, Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "v1" + i)));
    }
    final StorageAdapter sa = new IncrementalIndexStorageAdapter(index);
    Sequence<Cursor> cursors = sa.makeCursors(null, new Interval(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false);
    Sequences.toList(Sequences.map(cursors, new Function<Cursor, Object>() {

        @Nullable
        @Override
        public Object apply(Cursor cursor) {
            DimensionSelector dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
            int cardinality = dimSelector.getValueCardinality();
            //index gets more rows at this point, while other thread is iterating over the cursor
            try {
                for (int i = 0; i < 1; i++) {
                    index.add(new MapBasedInputRow(timestamp, Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "v2" + i)));
                }
            } catch (Exception ex) {
                throw new RuntimeException(ex);
            }
            // and then, cursoring continues in the other thread
            while (!cursor.isDone()) {
                IndexedInts row = dimSelector.getRow();
                for (int i : row) {
                    Assert.assertTrue(i < cardinality);
                }
                cursor.advance();
            }
            return null;
        }
    }), new ArrayList<>());
}
Also used : DimensionSelector(io.druid.segment.DimensionSelector) StorageAdapter(io.druid.segment.StorageAdapter) Cursor(io.druid.segment.Cursor) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) IOException(java.io.IOException) Function(com.google.common.base.Function) IndexedInts(io.druid.segment.data.IndexedInts) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 10 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class SpatialFilterTest method testSpatialQueryWithOtherSpatialDim.

@Test
public void testSpatialQueryWithOtherSpatialDim() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("spatialIsRad", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<TimeseriesResultValue>(new DateTime("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(query, Maps.newHashMap()));
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) IOException(java.io.IOException) Result(io.druid.query.Result) TimeseriesQueryEngine(io.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(io.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) RadiusBound(io.druid.collections.spatial.search.RadiusBound) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

Interval (org.joda.time.Interval)555 Test (org.junit.Test)340 DateTime (org.joda.time.DateTime)200 DataSegment (io.druid.timeline.DataSegment)138 ArrayList (java.util.ArrayList)59 Map (java.util.Map)54 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)49 QueryRunner (io.druid.query.QueryRunner)47 List (java.util.List)47 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)45 Result (io.druid.query.Result)44 File (java.io.File)42 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)41 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)40 HashMap (java.util.HashMap)37 IOException (java.io.IOException)35 Period (org.joda.time.Period)30 Test (org.testng.annotations.Test)29 DruidServer (io.druid.client.DruidServer)27 Sequence (io.druid.java.util.common.guava.Sequence)26