use of io.pravega.common.util.IllegalDataFormatException in project pravega by pravega.
the class BTreePage method applyUpdates.
/**
* Updates (in-place) the contents of this BTreePage with the given entries for those Keys that already exist. For
* all the new or deleted Keys, collects them into a List and calculates the offset where they would have to be
* inserted at or removed from.
*
* @param entries A List of PageEntries to update, in sorted order by {@link PageEntry#getKey()}.
* @return A {@link ChangeInfo} object.
* @throws IllegalDataFormatException If any of the entries do not conform to the Key/Value size constraints.
* @throws IllegalArgumentException If the entries are not sorted by {@link PageEntry#getKey()}.
*/
private ChangeInfo applyUpdates(List<PageEntry> entries) {
// Keep track of new keys to be added along with the offset (in the original page) where they would have belonged.
val changes = new ArrayList<Map.Entry<Integer, PageEntry>>();
int removeCount = 0;
// Process all the Entries, in order (by Key).
int lastPos = 0;
ByteArraySegment lastKey = null;
for (val e : entries) {
if (e.getKey().getLength() != this.config.keyLength || (e.hasValue() && e.getValue().getLength() != this.config.valueLength)) {
throw new IllegalDataFormatException("Found an entry with unexpected Key or Value length.");
}
if (lastKey != null) {
Preconditions.checkArgument(KEY_COMPARATOR.compare(lastKey, e.getKey()) < 0, "Entries must be sorted by key and no duplicates are allowed.");
}
// Figure out if this entry exists already.
val searchResult = search(e.getKey(), lastPos);
if (searchResult.isExactMatch()) {
if (e.hasValue()) {
// Key already exists: update in-place.
setValueAtPosition(searchResult.getPosition(), e.getValue());
} else {
// Key exists but this is a removal. Record it for later.
changes.add(new AbstractMap.SimpleImmutableEntry<>(searchResult.getPosition(), null));
removeCount++;
}
} else if (e.hasValue()) {
// This entry's key does not exist and we want to insert it (we don't care if we want to delete an inexistent
// key). We need to remember it for later. Since this was not an exact match, binary search returned the
// position where it should have been.
changes.add(new AbstractMap.SimpleImmutableEntry<>(searchResult.getPosition(), e));
}
// Remember the last position so we may resume the next search from there.
lastPos = searchResult.getPosition();
lastKey = e.getKey();
}
return new ChangeInfo(changes, changes.size() - removeCount, removeCount);
}
use of io.pravega.common.util.IllegalDataFormatException in project pravega by pravega.
the class HashTableSegmentLayout method newIterator.
private <T> CompletableFuture<AsyncIterator<IteratorItem<T>>> newIterator(@NonNull DirectSegmentAccess segment, @NonNull IteratorArgs args, @NonNull GetBucketReader<T> createBucketReader) {
Preconditions.checkArgument(args.getFrom() == null && args.getTo() == null, "Range Iterators not supported for HashTableSegments.");
UUID fromHash;
BufferView serializedState = args.getContinuationToken();
try {
fromHash = KeyHasher.getNextHash(serializedState == null ? null : IteratorStateImpl.deserialize(serializedState).getKeyHash());
} catch (IOException ex) {
// Bad IteratorState serialization.
throw new IllegalDataFormatException("Unable to deserialize `serializedState`.", ex);
}
if (fromHash == null) {
// Nothing to iterate on.
return CompletableFuture.completedFuture(TableIterator.empty());
}
// Create a converter that will use a TableBucketReader to fetch all requested items in the iterated Buckets.
val bucketReader = createBucketReader.apply(segment, this.keyIndex::getBackpointerOffset, this.executor);
TableIterator.ConvertResult<IteratorItem<T>> converter = bucket -> bucketReader.findAllExisting(bucket.getSegmentOffset(), new TimeoutTimer(args.getFetchTimeout())).thenApply(result -> new IteratorItemImpl<>(new IteratorStateImpl(bucket.getHash()).serialize(), result));
// Fetch the Tail (Unindexed) Hashes, then create the TableIterator.
return this.keyIndex.getUnindexedKeyHashes(segment).thenComposeAsync(cacheHashes -> TableIterator.<IteratorItem<T>>builder().segment(segment).cacheHashes(cacheHashes).firstHash(fromHash).executor(executor).resultConverter(converter).fetchTimeout(args.getFetchTimeout()).build(), this.executor);
}
Aggregations