use of org.apache.druid.timeline.partition.NumberedShardSpec in project druid by druid-io.
the class SegmentAllocateActionTest method testAddToExistingNumberedShardSpecsFinerPreferredGranularity.
@Test
public void testAddToExistingNumberedShardSpecsFinerPreferredGranularity() throws Exception {
final Task task = NoopTask.create();
taskActionTestKit.getMetadataStorageCoordinator().announceHistoricalSegments(ImmutableSet.of(DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(0, 2)).size(0).build(), DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(1, 2)).size(0).build()));
taskActionTestKit.getTaskLockbox().add(task);
final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.MINUTE, "s1", null);
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(2, 2)), id1);
}
use of org.apache.druid.timeline.partition.NumberedShardSpec in project druid by druid-io.
the class SegmentAllocateActionTest method testResumeSequence.
@Test
public void testResumeSequence() {
final Task task = NoopTask.create();
taskActionTestKit.getTaskLockbox().add(task);
final Map<Integer, SegmentIdWithShardSpec> allocatedPartyTimeIds = new HashMap<>();
final Map<Integer, SegmentIdWithShardSpec> allocatedFutureIds = new HashMap<>();
final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
Assert.assertNotNull(id1);
allocatedPartyTimeIds.put(id1.getShardSpec().getPartitionNum(), id1);
final SegmentIdWithShardSpec id2 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
Assert.assertNotNull(id2);
allocatedFutureIds.put(id2.getShardSpec().getPartitionNum(), id2);
final SegmentIdWithShardSpec id3 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id2.toString());
Assert.assertNotNull(id3);
allocatedPartyTimeIds.put(id3.getShardSpec().getPartitionNum(), id3);
final SegmentIdWithShardSpec id4 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
Assert.assertNull(id4);
final SegmentIdWithShardSpec id5 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
Assert.assertNotNull(id5);
allocatedFutureIds.put(id5.getShardSpec().getPartitionNum(), id5);
final SegmentIdWithShardSpec id6 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.MINUTE, "s1", id1.toString());
Assert.assertNull(id6);
final SegmentIdWithShardSpec id7 = allocate(task, THE_DISTANT_FUTURE, Granularities.NONE, Granularities.DAY, "s1", id1.toString());
Assert.assertNotNull(id7);
allocatedFutureIds.put(id7.getShardSpec().getPartitionNum(), id7);
if (lockGranularity == LockGranularity.TIME_CHUNK) {
final TaskLock partyLock = Iterables.getOnlyElement(FluentIterable.from(taskActionTestKit.getTaskLockbox().findLocksForTask(task)).filter(new Predicate<TaskLock>() {
@Override
public boolean apply(TaskLock input) {
return input.getInterval().contains(PARTY_TIME);
}
}));
final TaskLock futureLock = Iterables.getOnlyElement(FluentIterable.from(taskActionTestKit.getTaskLockbox().findLocksForTask(task)).filter(new Predicate<TaskLock>() {
@Override
public boolean apply(TaskLock input) {
return input.getInterval().contains(THE_DISTANT_FUTURE);
}
}));
assertSameIdentifier(id1, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(0, 0)));
assertSameIdentifier(id2, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE), futureLock.getVersion(), new NumberedShardSpec(0, 0)));
assertSameIdentifier(id3, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(1, 0)));
} else {
final List<TaskLock> partyLocks = taskActionTestKit.getTaskLockbox().findLocksForTask(task).stream().filter(input -> input.getInterval().contains(PARTY_TIME)).collect(Collectors.toList());
Assert.assertEquals(2, partyLocks.size());
final Map<Integer, SegmentLock> partitionIdToLock = new HashMap<>();
partyLocks.forEach(lock -> {
Assert.assertEquals(LockGranularity.SEGMENT, lock.getGranularity());
final SegmentLock segmentLock = (SegmentLock) lock;
partitionIdToLock.put(segmentLock.getPartitionId(), segmentLock);
});
for (Entry<Integer, SegmentLock> entry : partitionIdToLock.entrySet()) {
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), allocatedPartyTimeIds.get(entry.getKey()).getVersion(), new NumberedShardSpec(entry.getValue().getPartitionId(), 0)), allocatedPartyTimeIds.get(entry.getKey()));
}
final List<TaskLock> futureLocks = taskActionTestKit.getTaskLockbox().findLocksForTask(task).stream().filter(input -> input.getInterval().contains(THE_DISTANT_FUTURE)).collect(Collectors.toList());
Assert.assertEquals(1, futureLocks.size());
partitionIdToLock.clear();
futureLocks.forEach(lock -> {
Assert.assertEquals(LockGranularity.SEGMENT, lock.getGranularity());
final SegmentLock segmentLock = (SegmentLock) lock;
partitionIdToLock.put(segmentLock.getPartitionId(), segmentLock);
});
for (Entry<Integer, SegmentLock> entry : partitionIdToLock.entrySet()) {
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE), allocatedFutureIds.get(entry.getKey()).getVersion(), new NumberedShardSpec(entry.getValue().getPartitionId(), 0)), allocatedFutureIds.get(entry.getKey()));
}
}
Assert.assertNull(id4);
assertSameIdentifier(id2, id5);
Assert.assertNull(id6);
assertSameIdentifier(id2, id7);
}
use of org.apache.druid.timeline.partition.NumberedShardSpec in project druid by druid-io.
the class SegmentAllocateActionTest method testAddToExistingNumberedShardSpecsCoarserPreferredGranularity.
@Test
public void testAddToExistingNumberedShardSpecsCoarserPreferredGranularity() throws Exception {
final Task task = NoopTask.create();
taskActionTestKit.getMetadataStorageCoordinator().announceHistoricalSegments(ImmutableSet.of(DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(0, 2)).size(0).build(), DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(1, 2)).size(0).build()));
taskActionTestKit.getTaskLockbox().add(task);
final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.DAY, "s1", null);
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(2, 2)), id1);
}
use of org.apache.druid.timeline.partition.NumberedShardSpec in project druid by druid-io.
the class SegmentAllocateActionTest method testManySegmentsSameInterval.
@Test
public void testManySegmentsSameInterval() {
final Task task = NoopTask.create();
taskActionTestKit.getTaskLockbox().add(task);
final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
final SegmentIdWithShardSpec id2 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
final SegmentIdWithShardSpec id3 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id2.toString());
if (lockGranularity == LockGranularity.TIME_CHUNK) {
final TaskLock partyLock = Iterables.getOnlyElement(FluentIterable.from(taskActionTestKit.getTaskLockbox().findLocksForTask(task)).filter(input -> input.getInterval().contains(PARTY_TIME)));
assertSameIdentifier(id1, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(0, 0)));
assertSameIdentifier(id2, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(1, 0)));
assertSameIdentifier(id3, new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), partyLock.getVersion(), new NumberedShardSpec(2, 0)));
} else {
final List<TaskLock> partyTimeLocks = taskActionTestKit.getTaskLockbox().findLocksForTask(task).stream().filter(input -> input.getInterval().contains(PARTY_TIME)).collect(Collectors.toList());
Assert.assertEquals(3, partyTimeLocks.size());
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), id1.getVersion(), new NumberedShardSpec(0, 0)), id1);
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), id1.getVersion(), new NumberedShardSpec(1, 0)), id2);
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), id1.getVersion(), new NumberedShardSpec(2, 0)), id3);
}
}
use of org.apache.druid.timeline.partition.NumberedShardSpec in project druid by druid-io.
the class SegmentAllocateActionTest method testAddToExistingNumberedShardSpecsSameGranularity.
@Test
public void testAddToExistingNumberedShardSpecsSameGranularity() throws Exception {
final Task task = NoopTask.create();
taskActionTestKit.getMetadataStorageCoordinator().announceHistoricalSegments(ImmutableSet.of(DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(0, 2)).size(0).build(), DataSegment.builder().dataSource(DATA_SOURCE).interval(Granularities.HOUR.bucket(PARTY_TIME)).version(PARTY_TIME.toString()).shardSpec(new NumberedShardSpec(1, 2)).size(0).build()));
taskActionTestKit.getTaskLockbox().add(task);
final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
final SegmentIdWithShardSpec id2 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", id1.toString());
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(2, 2)), id1);
assertSameIdentifier(new SegmentIdWithShardSpec(DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), PARTY_TIME.toString(), new NumberedShardSpec(3, 2)), id2);
}
Aggregations