Search in sources :

Example 11 with Task

use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.

the class SegmentAllocateActionTest method testAddToExistingNumberedShardSpecsFinerPreferredGranularity.

@Test
public void testAddToExistingNumberedShardSpecsFinerPreferredGranularity() throws Exception {
    final Task task = NoopTask.create();
    taskActionTestKit.getMetadataStorageCoordinator().announceHistoricalSegments(ImmutableSet.of(DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(0, 2)).size(0).build(), DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(1, 2)).size(0).build()));
    taskActionTestKit.getTaskLockbox().add(task);
    final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.MINUTE, "s1", null);
    assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(2, 2)), id1);
}
Also used : Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Test(org.junit.Test)

Example 12 with Task

use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.

the class SegmentAllocateActionTest method testResumeSequence.

@Test
public void testResumeSequence() {
    final Task task = NoopTask.create();
    taskActionTestKit.getTaskLockbox().add(task);
    final Map<Integer, SegmentIdWithShardSpec> allocatedPartyTimeIds = new HashMap<>();
    final Map<Integer, SegmentIdWithShardSpec> allocatedFutureIds = new HashMap<>();
    final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
    Assert.assertNotNull(id1);
    allocatedPartyTimeIds.put(id1.getShardSpec().getPartitionNum(), id1);
    final SegmentIdWithShardSpec id2 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
    Assert.assertNotNull(id2);
    allocatedFutureIds.put(id2.getShardSpec().getPartitionNum(), id2);
    final SegmentIdWithShardSpec id3 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id2.toString());
    Assert.assertNotNull(id3);
    allocatedPartyTimeIds.put(id3.getShardSpec().getPartitionNum(), id3);
    final SegmentIdWithShardSpec id4 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
    Assert.assertNull(id4);
    final SegmentIdWithShardSpec id5 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
    Assert.assertNotNull(id5);
    allocatedFutureIds.put(id5.getShardSpec().getPartitionNum(), id5);
    final SegmentIdWithShardSpec id6 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.MINUTE, "s1", id1.toString());
    Assert.assertNull(id6);
    final SegmentIdWithShardSpec id7 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.DAY, "s1", id1.toString());
    Assert.assertNotNull(id7);
    allocatedFutureIds.put(id7.getShardSpec().getPartitionNum(), id7);
    if (lockGranularity == LockGranularity.TIME_CHUNK) {
        final TaskLock partyLock = Iterables.getOnlyElement(FluentIterable.from(taskActionTestKit.getTaskLockbox().findLocksForTask(task)).filter(new Predicate<TaskLock>() {

            @Override
            public boolean apply(TaskLock input) {
                return input.getInterval().contains(PARTY_TIME);
            }
        }));
        final TaskLock futureLock = Iterables.getOnlyElement(FluentIterable.from(taskActionTestKit.getTaskLockbox().findLocksForTask(task)).filter(new Predicate<TaskLock>() {

            @Override
            public boolean apply(TaskLock input) {
                return input.getInterval().contains(THE_DISTANT_FUTURE);
            }
        }));
        assertSameIdentifier(id1, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(0, 0)));
        assertSameIdentifier(id2, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE), futureLock.getVersion(), new NumberedShardSpec(0, 0)));
        assertSameIdentifier(id3, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(1, 0)));
    } else {
        final List<TaskLock> partyLocks = taskActionTestKit.getTaskLockbox().findLocksForTask(task).stream().filter(input -> input.getInterval().contains(PARTY_TIME)).collect(Collectors.toList());
        Assert.assertEquals(2, partyLocks.size());
        final Map<Integer, SegmentLock> partitionIdToLock = new HashMap<>();
        partyLocks.forEach(lock -> {
            Assert.assertEquals(LockGranularity.SEGMENT, lock.getGranularity());
            final SegmentLock segmentLock = (SegmentLock) lock;
            partitionIdToLock.put(segmentLock.getPartitionId(), segmentLock);
        });
        for (Entry<Integer, SegmentLock> entry : partitionIdToLock.entrySet()) {
            assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), allocatedPartyTimeIds.get(entry.getKey()).getVersion(), new NumberedShardSpec(entry.getValue().getPartitionId(), 0)), allocatedPartyTimeIds.get(entry.getKey()));
        }
        final List<TaskLock> futureLocks = taskActionTestKit.getTaskLockbox().findLocksForTask(task).stream().filter(input -> input.getInterval().contains(THE_DISTANT_FUTURE)).collect(Collectors.toList());
        Assert.assertEquals(1, futureLocks.size());
        partitionIdToLock.clear();
        futureLocks.forEach(lock -> {
            Assert.assertEquals(LockGranularity.SEGMENT, lock.getGranularity());
            final SegmentLock segmentLock = (SegmentLock) lock;
            partitionIdToLock.put(segmentLock.getPartitionId(), segmentLock);
        });
        for (Entry<Integer, SegmentLock> entry : partitionIdToLock.entrySet()) {
            assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE), allocatedFutureIds.get(entry.getKey()).getVersion(), new NumberedShardSpec(entry.getValue().getPartitionId(), 0)), allocatedFutureIds.get(entry.getKey()));
        }
    }
    Assert.assertNull(id4);
    assertSameIdentifier(id2, id5);
    Assert.assertNull(id6);
    assertSameIdentifier(id2, id7);
}
Also used : NumberedPartialShardSpec(org.apache.druid.timeline.partition.NumberedPartialShardSpec) Iterables(com.google.common.collect.Iterables) Granularity(org.apache.druid.java.util.common.granularity.Granularity) Intervals(org.apache.druid.java.util.common.Intervals) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) RunWith(org.junit.runner.RunWith) HashMap(java.util.HashMap) ImmutableList(com.google.common.collect.ImmutableList) FluentIterable(com.google.common.collect.FluentIterable) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Task(org.apache.druid.indexing.common.task.Task) Map(java.util.Map) TaskLock(org.apache.druid.indexing.common.TaskLock) ExpectedException(org.junit.rules.ExpectedException) HashBasedNumberedPartialShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedPartialShardSpec) Parameterized(org.junit.runners.Parameterized) Before(org.junit.Before) DateTimes(org.apache.druid.java.util.common.DateTimes) ShardSpec(org.apache.druid.timeline.partition.ShardSpec) Period(org.joda.time.Period) ImmutableSet(com.google.common.collect.ImmutableSet) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) Collectors(java.util.stream.Collectors) LockGranularity(org.apache.druid.indexing.common.LockGranularity) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Granularities(org.apache.druid.java.util.common.granularity.Granularities) NoopTask(org.apache.druid.indexing.common.task.NoopTask) List(java.util.List) Rule(org.junit.Rule) Predicate(com.google.common.base.Predicate) SegmentLock(org.apache.druid.indexing.common.SegmentLock) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DataSegment(org.apache.druid.timeline.DataSegment) Entry(java.util.Map.Entry) LinearPartialShardSpec(org.apache.druid.timeline.partition.LinearPartialShardSpec) PartialShardSpec(org.apache.druid.timeline.partition.PartialShardSpec) Assert(org.junit.Assert) Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) HashMap(java.util.HashMap) SegmentLock(org.apache.druid.indexing.common.SegmentLock) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) Predicate(com.google.common.base.Predicate) TaskLock(org.apache.druid.indexing.common.TaskLock) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Test(org.junit.Test)

Example 13 with Task

use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.

the class SegmentAllocateActionTest method testAddToExistingNumberedShardSpecsCoarserPreferredGranularity.

@Test
public void testAddToExistingNumberedShardSpecsCoarserPreferredGranularity() throws Exception {
    final Task task = NoopTask.create();
    taskActionTestKit.getMetadataStorageCoordinator().announceHistoricalSegments(ImmutableSet.of(DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(0, 2)).size(0).build(), DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(1, 2)).size(0).build()));
    taskActionTestKit.getTaskLockbox().add(task);
    final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.DAY, "s1", null);
    assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(2, 2)), id1);
}
Also used : Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Test(org.junit.Test)

Example 14 with Task

use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.

the class SegmentAllocateActionTest method testManySegmentsSameInterval.

@Test
public void testManySegmentsSameInterval() {
    final Task task = NoopTask.create();
    taskActionTestKit.getTaskLockbox().add(task);
    final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
    final SegmentIdWithShardSpec id2 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
    final SegmentIdWithShardSpec id3 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id2.toString());
    if (lockGranularity == LockGranularity.TIME_CHUNK) {
        final TaskLock partyLock = Iterables.getOnlyElement(FluentIterable.from(taskActionTestKit.getTaskLockbox().findLocksForTask(task)).filter(input -> input.getInterval().contains(PARTY_TIME)));
        assertSameIdentifier(id1, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(0, 0)));
        assertSameIdentifier(id2, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(1, 0)));
        assertSameIdentifier(id3, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(2, 0)));
    } else {
        final List<TaskLock> partyTimeLocks = taskActionTestKit.getTaskLockbox().findLocksForTask(task).stream().filter(input -> input.getInterval().contains(PARTY_TIME)).collect(Collectors.toList());
        Assert.assertEquals(3, partyTimeLocks.size());
        assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), id1.getVersion(), new NumberedShardSpec(0, 0)), id1);
        assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), id1.getVersion(), new NumberedShardSpec(1, 0)), id2);
        assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), id1.getVersion(), new NumberedShardSpec(2, 0)), id3);
    }
}
Also used : NumberedPartialShardSpec(org.apache.druid.timeline.partition.NumberedPartialShardSpec) Iterables(com.google.common.collect.Iterables) Granularity(org.apache.druid.java.util.common.granularity.Granularity) Intervals(org.apache.druid.java.util.common.Intervals) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) RunWith(org.junit.runner.RunWith) HashMap(java.util.HashMap) ImmutableList(com.google.common.collect.ImmutableList) FluentIterable(com.google.common.collect.FluentIterable) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Task(org.apache.druid.indexing.common.task.Task) Map(java.util.Map) TaskLock(org.apache.druid.indexing.common.TaskLock) ExpectedException(org.junit.rules.ExpectedException) HashBasedNumberedPartialShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedPartialShardSpec) Parameterized(org.junit.runners.Parameterized) Before(org.junit.Before) DateTimes(org.apache.druid.java.util.common.DateTimes) ShardSpec(org.apache.druid.timeline.partition.ShardSpec) Period(org.joda.time.Period) ImmutableSet(com.google.common.collect.ImmutableSet) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) Collectors(java.util.stream.Collectors) LockGranularity(org.apache.druid.indexing.common.LockGranularity) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Granularities(org.apache.druid.java.util.common.granularity.Granularities) NoopTask(org.apache.druid.indexing.common.task.NoopTask) List(java.util.List) Rule(org.junit.Rule) Predicate(com.google.common.base.Predicate) SegmentLock(org.apache.druid.indexing.common.SegmentLock) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DataSegment(org.apache.druid.timeline.DataSegment) Entry(java.util.Map.Entry) LinearPartialShardSpec(org.apache.druid.timeline.partition.LinearPartialShardSpec) PartialShardSpec(org.apache.druid.timeline.partition.PartialShardSpec) Assert(org.junit.Assert) Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) TaskLock(org.apache.druid.indexing.common.TaskLock) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Test(org.junit.Test)

Example 15 with Task

use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.

the class SegmentAllocateActionTest method testAddToExistingNumberedShardSpecsSameGranularity.

@Test
public void testAddToExistingNumberedShardSpecsSameGranularity() throws Exception {
    final Task task = NoopTask.create();
    taskActionTestKit.getMetadataStorageCoordinator().announceHistoricalSegments(ImmutableSet.of(DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(0, 2)).size(0).build(), DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(1, 2)).size(0).build()));
    taskActionTestKit.getTaskLockbox().add(task);
    final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
    final SegmentIdWithShardSpec id2 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
    assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(2, 2)), id1);
    assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(3, 2)), id2);
}
Also used : Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Test(org.junit.Test)

Aggregations

Task (org.apache.druid.indexing.common.task.Task)383 Test (org.junit.Test)307 NoopTask (org.apache.druid.indexing.common.task.NoopTask)177 HashMap (java.util.HashMap)132 Map (java.util.Map)132 RealtimeIndexTask (org.apache.druid.indexing.common.task.RealtimeIndexTask)120 ArrayList (java.util.ArrayList)114 ImmutableMap (com.google.common.collect.ImmutableMap)104 TreeMap (java.util.TreeMap)100 TaskStatus (org.apache.druid.indexer.TaskStatus)100 TaskRunnerListener (org.apache.druid.indexing.overlord.TaskRunnerListener)98 Executor (java.util.concurrent.Executor)86 List (java.util.List)78 AbstractTask (org.apache.druid.indexing.common.task.AbstractTask)78 Collection (java.util.Collection)70 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)68 TaskLocation (org.apache.druid.indexer.TaskLocation)62 TaskLock (org.apache.druid.indexing.common.TaskLock)60 ImmutableList (com.google.common.collect.ImmutableList)58 ISE (org.apache.druid.java.util.common.ISE)58