Search in sources :

Example 31 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class SegmentTransactionalInsertActionTest method testFailTransactional.

@Test
public void testFailTransactional() throws Exception {
    final Task task = new NoopTask(null, 0, 0, null, null, null);
    actionTestKit.getTaskLockbox().add(task);
    actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL));
    SegmentPublishResult result = new SegmentTransactionalInsertAction(ImmutableSet.of(SEGMENT1), new ObjectMetadata(ImmutableList.of(1)), new ObjectMetadata(ImmutableList.of(2))).perform(task, actionTestKit.getTaskActionToolbox());
    Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result);
}
Also used : SegmentPublishResult(io.druid.indexing.overlord.SegmentPublishResult) Task(io.druid.indexing.common.task.Task) NoopTask(io.druid.indexing.common.task.NoopTask) NoopTask(io.druid.indexing.common.task.NoopTask) ObjectMetadata(io.druid.indexing.overlord.ObjectMetadata) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 32 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTimelineTest method TC.

private static TestCase TC(String intervalString, int expectedCount, long expectedSum, DataSegmentMaker... segmentMakers) {
    final File tmpDir = Files.createTempDir();
    final Set<DataSegment> segments = Sets.newHashSet();
    for (DataSegmentMaker segmentMaker : segmentMakers) {
        segments.add(segmentMaker.make(tmpDir));
    }
    return new TestCase(tmpDir, new Interval(intervalString), expectedCount, expectedSum, segments);
}
Also used : File(java.io.File) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval)

Example 33 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class TaskLifecycleTest method testBadInterval.

@Test
public void testBadInterval() throws Exception {
    final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null) {

        @Override
        public String getType() {
            return "test";
        }

        @Override
        public TaskStatus run(TaskToolbox toolbox) throws Exception {
            final TaskLock myLock = Iterables.getOnlyElement(toolbox.getTaskActionClient().submit(new LockListAction()));
            final DataSegment segment = DataSegment.builder().dataSource("ds").interval(new Interval("2012-01-01/P2D")).version(myLock.getVersion()).build();
            toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of(segment)));
            return TaskStatus.success(getId());
        }
    };
    final TaskStatus status = runTask(task);
    Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
    Assert.assertEquals("segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("segments nuked", 0, mdc.getNuked().size());
}
Also used : TaskToolbox(io.druid.indexing.common.TaskToolbox) LockListAction(io.druid.indexing.common.actions.LockListAction) IndexTask(io.druid.indexing.common.task.IndexTask) RealtimeIndexTask(io.druid.indexing.common.task.RealtimeIndexTask) Task(io.druid.indexing.common.task.Task) AbstractFixedIntervalTask(io.druid.indexing.common.task.AbstractFixedIntervalTask) KillTask(io.druid.indexing.common.task.KillTask) TaskLock(io.druid.indexing.common.TaskLock) SegmentInsertAction(io.druid.indexing.common.actions.SegmentInsertAction) TaskStatus(io.druid.indexing.common.TaskStatus) DataSegment(io.druid.timeline.DataSegment) AbstractFixedIntervalTask(io.druid.indexing.common.task.AbstractFixedIntervalTask) Interval(org.joda.time.Interval) FireDepartmentTest(io.druid.segment.realtime.FireDepartmentTest) Test(org.junit.Test)

Example 34 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class TaskLifecycleTest method testIndexTask.

@Test
public void testIndexTask() throws Exception {
    final Task indexTask = new IndexTask(null, null, new IndexTask.IndexIngestionSpec(new DataSchema("foo", null, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D"))), mapper), new IndexTask.IndexIOConfig(new MockFirehoseFactory(false), false, null), new IndexTask.IndexTuningConfig(10000, 10, null, null, indexSpec, 3, true, true, true)), null, MAPPER);
    final Optional<TaskStatus> preRunTaskStatus = tsqa.getStatus(indexTask.getId());
    Assert.assertTrue("pre run task status not present", !preRunTaskStatus.isPresent());
    final TaskStatus mergedStatus = runTask(indexTask);
    final TaskStatus status = taskStorage.getStatus(indexTask.getId()).get();
    final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
    final List<DataSegment> loggedSegments = byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));
    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals("merged statusCode", TaskStatus.Status.SUCCESS, mergedStatus.getStatusCode());
    Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
    Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
    Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval());
    Assert.assertEquals("segment1 dimensions", ImmutableList.of("dim1", "dim2"), publishedSegments.get(0).getDimensions());
    Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());
    Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
    Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval());
    Assert.assertEquals("segment2 dimensions", ImmutableList.of("dim1", "dim2"), publishedSegments.get(1).getDimensions());
    Assert.assertEquals("segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
}
Also used : IndexTask(io.druid.indexing.common.task.IndexTask) RealtimeIndexTask(io.druid.indexing.common.task.RealtimeIndexTask) Task(io.druid.indexing.common.task.Task) AbstractFixedIntervalTask(io.druid.indexing.common.task.AbstractFixedIntervalTask) KillTask(io.druid.indexing.common.task.KillTask) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) IndexTask(io.druid.indexing.common.task.IndexTask) RealtimeIndexTask(io.druid.indexing.common.task.RealtimeIndexTask) TaskStatus(io.druid.indexing.common.TaskStatus) DataSegment(io.druid.timeline.DataSegment) DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) Interval(org.joda.time.Interval) FireDepartmentTest(io.druid.segment.realtime.FireDepartmentTest) Test(org.junit.Test)

Example 35 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class TaskLifecycleTest method testIndexTaskFailure.

@Test
public void testIndexTaskFailure() throws Exception {
    final Task indexTask = new IndexTask(null, null, new IndexTask.IndexIngestionSpec(new DataSchema("foo", null, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P1D"))), mapper), new IndexTask.IndexIOConfig(new MockExceptionalFirehoseFactory(), false, null), new IndexTask.IndexTuningConfig(10000, 10, null, null, indexSpec, 3, true, true, true)), null, MAPPER);
    final TaskStatus status = runTask(indexTask);
    Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
    Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
Also used : DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) IndexTask(io.druid.indexing.common.task.IndexTask) RealtimeIndexTask(io.druid.indexing.common.task.RealtimeIndexTask) Task(io.druid.indexing.common.task.Task) AbstractFixedIntervalTask(io.druid.indexing.common.task.AbstractFixedIntervalTask) KillTask(io.druid.indexing.common.task.KillTask) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) IndexTask(io.druid.indexing.common.task.IndexTask) RealtimeIndexTask(io.druid.indexing.common.task.RealtimeIndexTask) TaskStatus(io.druid.indexing.common.TaskStatus) Interval(org.joda.time.Interval) FireDepartmentTest(io.druid.segment.realtime.FireDepartmentTest) Test(org.junit.Test)

Aggregations

Interval (org.joda.time.Interval)593 Test (org.junit.Test)367 DateTime (org.joda.time.DateTime)204 DataSegment (io.druid.timeline.DataSegment)146 ArrayList (java.util.ArrayList)65 Map (java.util.Map)55 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)49 List (java.util.List)48 QueryRunner (io.druid.query.QueryRunner)47 File (java.io.File)46 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)45 Result (io.druid.query.Result)44 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)41 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)40 HashMap (java.util.HashMap)38 IOException (java.io.IOException)37 Period (org.joda.time.Period)30 Test (org.testng.annotations.Test)29 DruidServer (io.druid.client.DruidServer)27 LocalDateTime (org.joda.time.LocalDateTime)27