use of io.pravega.controller.store.stream.tables.HistoryRecord in project pravega by pravega.
the class TableHelperTest method scaleTest.
@Test
public void scaleTest() {
long timestamp = System.currentTimeMillis();
final List<Integer> startSegments = Lists.newArrayList(0, 1, 2, 3, 4);
byte[] segmentTable = createSegmentTable(5, timestamp);
byte[] historyTable = TableHelper.createHistoryTable(timestamp, startSegments);
// start new scale
List<Integer> newSegments = Lists.newArrayList(5, 6, 7, 8, 9);
final double keyRangeChunk = 1.0 / 5;
final List<AbstractMap.SimpleEntry<Double, Double>> newRanges = IntStream.range(0, 5).boxed().map(x -> new AbstractMap.SimpleEntry<>(x * keyRangeChunk, (x + 1) * keyRangeChunk)).collect(Collectors.toList());
segmentTable = updateSegmentTable(segmentTable, newRanges, timestamp + 1);
assertTrue(TableHelper.isScaleOngoing(historyTable, segmentTable));
assertTrue(TableHelper.isRerunOf(startSegments, newRanges, historyTable, segmentTable));
final double keyRangeChunkInvalid = 1.0 / 5;
final List<AbstractMap.SimpleEntry<Double, Double>> newRangesInvalid = IntStream.range(0, 2).boxed().map(x -> new AbstractMap.SimpleEntry<>(x * keyRangeChunkInvalid, (x + 1) * keyRangeChunkInvalid)).collect(Collectors.toList());
assertFalse(TableHelper.isRerunOf(Lists.newArrayList(5, 6), newRangesInvalid, historyTable, segmentTable));
historyTable = TableHelper.addPartialRecordToHistoryTable(historyTable, newSegments);
assertTrue(TableHelper.isScaleOngoing(historyTable, segmentTable));
assertTrue(TableHelper.isRerunOf(startSegments, newRanges, historyTable, segmentTable));
HistoryRecord partial = HistoryRecord.readLatestRecord(historyTable, false).get();
historyTable = TableHelper.completePartialRecordInHistoryTable(historyTable, partial, timestamp + 2);
assertFalse(TableHelper.isScaleOngoing(historyTable, segmentTable));
}
use of io.pravega.controller.store.stream.tables.HistoryRecord in project pravega by pravega.
the class TableHelperTest method getActiveSegmentsTest.
@Test
public void getActiveSegmentsTest() {
final List<Integer> startSegments = Lists.newArrayList(0, 1, 2, 3, 4);
long timestamp = System.currentTimeMillis();
byte[] historyTable = TableHelper.createHistoryTable(timestamp, startSegments);
List<Integer> activeSegments = TableHelper.getActiveSegments(historyTable);
assertEquals(activeSegments, startSegments);
List<Integer> newSegments = Lists.newArrayList(5, 6, 7, 8, 9);
historyTable = TableHelper.addPartialRecordToHistoryTable(historyTable, newSegments);
activeSegments = TableHelper.getActiveSegments(historyTable);
assertEquals(activeSegments, startSegments);
int epoch = TableHelper.getActiveEpoch(historyTable).getKey();
assertEquals(0, epoch);
epoch = TableHelper.getLatestEpoch(historyTable).getKey();
assertEquals(1, epoch);
HistoryRecord partial = HistoryRecord.readLatestRecord(historyTable, false).get();
historyTable = TableHelper.completePartialRecordInHistoryTable(historyTable, partial, timestamp + 2);
activeSegments = TableHelper.getActiveSegments(historyTable);
assertEquals(activeSegments, newSegments);
activeSegments = TableHelper.getActiveSegments(timestamp, new byte[0], historyTable, null, null);
assertEquals(startSegments, activeSegments);
activeSegments = TableHelper.getActiveSegments(0, new byte[0], historyTable, null, null);
assertEquals(startSegments, activeSegments);
activeSegments = TableHelper.getActiveSegments(timestamp - 1, new byte[0], historyTable, null, null);
assertEquals(startSegments, activeSegments);
activeSegments = TableHelper.getActiveSegments(timestamp + 1, new byte[0], historyTable, null, null);
assertEquals(startSegments, activeSegments);
activeSegments = TableHelper.getActiveSegments(timestamp + 2, new byte[0], historyTable, null, null);
assertEquals(newSegments, activeSegments);
activeSegments = TableHelper.getActiveSegments(timestamp + 3, new byte[0], historyTable, null, null);
assertEquals(newSegments, activeSegments);
}
use of io.pravega.controller.store.stream.tables.HistoryRecord in project pravega by pravega.
the class PersistentStreamBase method addPartialHistoryRecord.
/**
* update history table if not already updated:
* fetch last record from history table.
* if eventTime is >= scale.scaleTimeStamp do nothing, else create record
*
* @return : future of history table offset for last entry
*/
private CompletableFuture<Void> addPartialHistoryRecord(final List<Integer> sealedSegments, final List<Integer> createdSegments, final int epoch) {
return getHistoryTable().thenCompose(historyTable -> {
final Optional<HistoryRecord> lastRecordOpt = HistoryRecord.readLatestRecord(historyTable.getData(), false);
// record in history table.
assert lastRecordOpt.isPresent();
final HistoryRecord lastRecord = lastRecordOpt.get();
// idempotent check
if (lastRecord.getEpoch() > epoch) {
boolean idempotent = lastRecord.isPartial() && lastRecord.getSegments().containsAll(createdSegments);
if (idempotent) {
HistoryRecord previous = HistoryRecord.fetchPrevious(lastRecord, historyTable.getData()).get();
idempotent = previous.getSegments().stream().noneMatch(createdSegments::contains);
}
if (idempotent) {
log.debug("{}/{} scale op for epoch {} - history record already added", scope, name, epoch);
return CompletableFuture.completedFuture(null);
} else {
log.warn("{}/{} scale op for epoch {}. Scale already completed.", scope, name, epoch);
throw new ScaleOperationExceptions.ScaleConditionInvalidException();
}
}
final List<Integer> newActiveSegments = getNewActiveSegments(createdSegments, sealedSegments, lastRecord);
byte[] updatedTable = TableHelper.addPartialRecordToHistoryTable(historyTable.getData(), newActiveSegments);
final Data<T> updated = new Data<>(updatedTable, historyTable.getVersion());
int latestEpoch = TableHelper.getLatestEpoch(updatedTable).getKey();
return createNewEpoch(latestEpoch).thenCompose(v -> updateHistoryTable(updated)).whenComplete((r, e) -> {
if (e == null) {
log.debug("{}/{} scale op for epoch {}. Creating new epoch and updating history table.", scope, name, epoch);
} else {
log.warn("{}/{} scale op for epoch {}. Failed to update history table. {}", scope, name, epoch, e.getClass().getName());
}
});
});
}
use of io.pravega.controller.store.stream.tables.HistoryRecord in project pravega by pravega.
the class PersistentStreamBase method completeScale.
private CompletableFuture<Void> completeScale(final long scaleTimestamp, final Map<Integer, Long> sealedSegments, final int activeEpoch, final List<Integer> newSegments) {
return getHistoryTable().thenCompose(historyTable -> {
final Optional<HistoryRecord> lastRecordOpt = HistoryRecord.readLatestRecord(historyTable.getData(), false);
assert lastRecordOpt.isPresent();
final HistoryRecord lastRecord = lastRecordOpt.get();
// idempotent check
if (!lastRecord.isPartial()) {
if (lastRecord.getSegments().stream().noneMatch(sealedSegments::containsKey) && newSegments.stream().allMatch(x -> lastRecord.getSegments().contains(x))) {
log.debug("{}/{} scale already completed for epoch {}.", scope, name, activeEpoch);
return CompletableFuture.completedFuture(null);
} else {
log.debug("{}/{} scale complete attempt invalid for epoch {}.", scope, name, activeEpoch);
throw new ScaleOperationExceptions.ScaleConditionInvalidException();
}
}
long scaleEventTime = Math.max(System.currentTimeMillis(), scaleTimestamp);
final Optional<HistoryRecord> previousOpt = HistoryRecord.fetchPrevious(lastRecord, historyTable.getData());
if (previousOpt.isPresent()) {
// To ensure that we always have ascending time in history records irrespective of controller clock mismatches.
scaleEventTime = Math.max(scaleEventTime, previousOpt.get().getScaleTime() + 1);
if (previousOpt.get().getEpoch() > activeEpoch) {
throw new ScaleOperationExceptions.ScaleConditionInvalidException();
}
}
byte[] updatedTable = TableHelper.completePartialRecordInHistoryTable(historyTable.getData(), lastRecord, scaleEventTime);
final Data<T> updated = new Data<>(updatedTable, historyTable.getVersion());
final HistoryRecord newRecord = HistoryRecord.readLatestRecord(updatedTable, false).get();
return addSealedSegmentsToRecord(sealedSegments).thenCompose(x -> addIndexRecord(newRecord)).thenCompose(x -> updateHistoryTable(updated)).thenCompose(x -> Futures.toVoid(updateState(State.ACTIVE))).whenComplete((r, e) -> {
if (e != null) {
log.warn("{}/{} attempt to complete scale for epoch {}. {}", scope, name, activeEpoch, e.getClass().getName());
} else {
log.debug("{}/{} scale complete, index and history tables updated for epoch {}.", scope, name, activeEpoch);
}
});
});
}
use of io.pravega.controller.store.stream.tables.HistoryRecord in project pravega by pravega.
the class TableHelperTest method testSegmentCreationBeforePreviousScale.
@Test(timeout = 10000)
public void testSegmentCreationBeforePreviousScale() throws ParseException {
// no value present error comes because:
// - Index is not yet updated.
// - And segment creation time is before history record's time.
// While trying to find successor we look for record in history table with
// segment creation and get an old record. We search for segment sealed event
// between history record and last indexed entry both of which preceed segment creation entry.
List<Segment> segments = new ArrayList<>();
List<Integer> newSegments = Lists.newArrayList(0, 1);
// create stream
long timestamp = 1503933145366L;
Segment zero = new Segment(0, timestamp, 0, 0.5);
segments.add(zero);
Segment one = new Segment(1, timestamp, 0.5, 1.0);
segments.add(one);
byte[] historyTable = TableHelper.createHistoryTable(timestamp, newSegments);
byte[] indexTable = TableHelper.createIndexTable(timestamp, 0);
// scale up 1... 0 -> 2, 3
int numOfSplits = 2;
double delta = (zero.getKeyEnd() - zero.getKeyStart()) / numOfSplits;
ArrayList<AbstractMap.SimpleEntry<Double, Double>> simpleEntries = new ArrayList<>();
for (int i = 0; i < numOfSplits; i++) {
simpleEntries.add(new AbstractMap.SimpleEntry<>(zero.getKeyStart() + delta * i, zero.getKeyStart() + (delta * (i + 1))));
}
// create segments before scale
Segment two = new Segment(2, 1503933266113L, simpleEntries.get(0).getKey(), simpleEntries.get(0).getValue());
segments.add(two);
Segment three = new Segment(3, 1503933266113L, simpleEntries.get(1).getKey(), simpleEntries.get(1).getValue());
segments.add(three);
newSegments = Lists.newArrayList(1, 2, 3);
// add partial record to history table
historyTable = TableHelper.addPartialRecordToHistoryTable(historyTable, newSegments);
HistoryRecord partial = HistoryRecord.readLatestRecord(historyTable, false).get();
timestamp = 1503933266862L;
// complete record in history table by adding time
historyTable = TableHelper.completePartialRecordInHistoryTable(historyTable, partial, timestamp);
HistoryRecord historyRecord = HistoryRecord.readLatestRecord(historyTable, false).get();
indexTable = TableHelper.updateIndexTable(indexTable, historyRecord.getScaleTime(), historyRecord.getOffset());
// scale up 2.. 1 -> 4, 5
delta = (one.getKeyEnd() - one.getKeyStart()) / numOfSplits;
simpleEntries = new ArrayList<>();
for (int i = 0; i < numOfSplits; i++) {
simpleEntries.add(new AbstractMap.SimpleEntry<>(one.getKeyStart() + delta * i, one.getKeyStart() + (delta * (i + 1))));
}
// create segments before scale
Segment four = new Segment(4, 1503933266188L, simpleEntries.get(0).getKey(), simpleEntries.get(0).getValue());
segments.add(four);
Segment five = new Segment(5, 1503933266188L, simpleEntries.get(1).getKey(), simpleEntries.get(1).getValue());
segments.add(five);
newSegments = Lists.newArrayList(2, 3, 4, 5);
// add partial record to history table
historyTable = TableHelper.addPartialRecordToHistoryTable(historyTable, newSegments);
partial = HistoryRecord.readLatestRecord(historyTable, false).get();
// Notice: segment was created at timestamp but we are recording its entry in history table at timestamp + 10000
timestamp = 1503933288726L;
// complete record in history table by adding time
historyTable = TableHelper.completePartialRecordInHistoryTable(historyTable, partial, timestamp);
historyRecord = HistoryRecord.readLatestRecord(historyTable, false).get();
indexTable = TableHelper.updateIndexTable(indexTable, historyRecord.getScaleTime(), historyRecord.getOffset());
// scale up 3.. 5 -> 6, 7
delta = (five.getKeyEnd() - five.getKeyStart()) / numOfSplits;
simpleEntries = new ArrayList<>();
for (int i = 0; i < numOfSplits; i++) {
simpleEntries.add(new AbstractMap.SimpleEntry<>(five.getKeyStart() + delta * i, five.getKeyStart() + (delta * (i + 1))));
}
// create new segments
Segment six = new Segment(6, 1503933409076L, simpleEntries.get(0).getKey(), simpleEntries.get(0).getValue());
segments.add(six);
Segment seven = new Segment(7, 1503933409076L, simpleEntries.get(1).getKey(), simpleEntries.get(1).getValue());
segments.add(seven);
newSegments = Lists.newArrayList(2, 3, 4, 6, 7);
// create partial record in history table
historyTable = TableHelper.addPartialRecordToHistoryTable(historyTable, newSegments);
partial = HistoryRecord.readLatestRecord(historyTable, false).get();
timestamp = 1503933409806L;
// find successor candidates before completing scale.
List<Integer> candidates5 = TableHelper.findSegmentSuccessorCandidates(five, indexTable, historyTable);
assertTrue(candidates5.containsAll(Arrays.asList(2, 3, 4, 6, 7)));
// complete record in history table by adding time
historyTable = TableHelper.completePartialRecordInHistoryTable(historyTable, partial, timestamp);
historyRecord = HistoryRecord.readLatestRecord(historyTable, false).get();
// verify successor candidates after completing history record but before adding index entry
candidates5 = TableHelper.findSegmentSuccessorCandidates(five, indexTable, historyTable);
assertTrue(candidates5.containsAll(Arrays.asList(2, 3, 4, 6, 7)));
indexTable = TableHelper.updateIndexTable(indexTable, historyRecord.getScaleTime(), historyRecord.getOffset());
// verify successor candidates after index is updated
candidates5 = TableHelper.findSegmentSuccessorCandidates(five, indexTable, historyTable);
assertTrue(candidates5.containsAll(Arrays.asList(2, 3, 4, 6, 7)));
// scale down 6, 7 -> 8
// scale down
timestamp = 1503933560447L;
// add another scale
Segment eight = new Segment(8, timestamp, six.keyStart, seven.keyEnd);
segments.add(eight);
newSegments = Lists.newArrayList(2, 3, 4, 8);
historyTable = TableHelper.addPartialRecordToHistoryTable(historyTable, newSegments);
partial = HistoryRecord.readLatestRecord(historyTable, false).get();
timestamp = 1503933560448L;
// complete scale
historyTable = TableHelper.completePartialRecordInHistoryTable(historyTable, partial, timestamp);
historyRecord = HistoryRecord.readLatestRecord(historyTable, false).get();
// add index
indexTable = TableHelper.updateIndexTable(indexTable, historyRecord.getScaleTime(), historyRecord.getOffset());
// verify successors again after a new scale entry comes in
candidates5 = TableHelper.findSegmentSuccessorCandidates(five, indexTable, historyTable);
assertTrue(candidates5.containsAll(Arrays.asList(2, 3, 4, 6, 7)));
}
Aggregations