use of io.pravega.common.function.Callbacks in project pravega by pravega.
the class DataFrameReaderTests method testReadsWithDataLogFailure.
/**
* Tests the case when the DataFrameReader reads from a log and it encounters log read failures.
* 1. Initial read failures.
* 2. Somewhere in the middle of reading.
*/
@Test
public void testReadsWithDataLogFailure() throws Exception {
// Fail reads synchronously every X attempts.
int failReadSyncEvery = 3;
ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(100, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
records.addAll(DataFrameTestHelpers.generateLogItems(100, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
dataLog.initialize(TIMEOUT);
BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
val args = new DataFrameBuilder.Args(Callbacks::doNothing, Callbacks::doNothing, errorCallback, executorService());
try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
for (TestLogItem r : records) {
b.append(r);
}
}
TestSerializer logItemFactory = new TestSerializer();
// Test 1: Initial call to getReader.
ErrorInjector<Exception> getReaderErrorInjector = new ErrorInjector<>(// Fail every time.
count -> true, () -> new DataLogNotAvailableException("intentional getReader exception"));
dataLog.setReadErrorInjectors(getReaderErrorInjector, null);
AssertExtensions.assertThrows("No exception or wrong type of exception thrown by getNext() with exception thrown by getReader().", () -> new DataFrameReader<>(dataLog, logItemFactory, CONTAINER_ID), ex -> Exceptions.unwrap(ex) == getReaderErrorInjector.getLastCycleException());
// Test 2: Failures during getNext().
ErrorInjector<Exception> readErrorInjector = new ErrorInjector<>(count -> count % failReadSyncEvery == 0, () -> new DataLogNotAvailableException("intentional getNext exception"));
dataLog.setReadErrorInjectors(null, readErrorInjector);
testReadWithException(dataLog, logItemFactory, ex -> ex == readErrorInjector.getLastCycleException());
}
}
use of io.pravega.common.function.Callbacks in project pravega by pravega.
the class DataFrameReaderTests method testReadsWithDeserializationFailure.
/**
* Tests the case when the DataFrameReader reads from a log and it encounters LogItem SerializationExceptions.
*/
@Test
public void testReadsWithDeserializationFailure() throws Exception {
// Fail deserialization every X records (write-wise).
int failDeserializationEvery = 11;
ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(100, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
records.addAll(DataFrameTestHelpers.generateLogItems(100, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
dataLog.initialize(TIMEOUT);
BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
val args = new DataFrameBuilder.Args(Callbacks::doNothing, Callbacks::doNothing, errorCallback, executorService());
try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
for (TestLogItem r : records) {
b.append(r);
}
}
ErrorInjector<SerializationException> errorInjector = new ErrorInjector<>(count -> count % failDeserializationEvery == 0, () -> new SerializationException("TestLogItem.deserialize intentional"));
TestSerializer logItemFactory = new TestSerializer();
logItemFactory.setDeserializationErrorInjector(errorInjector);
testReadWithException(dataLog, logItemFactory, ex -> ex instanceof DataCorruptionException);
}
}
use of io.pravega.common.function.Callbacks in project pravega by pravega.
the class DataFrameBuilderTests method testAppendNoFailure.
private void testAppendNoFailure(int delayMillis) throws Exception {
// Happy case: append a bunch of data, and make sure the frames that get output contain it.
ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
records.addAll(DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, delayMillis, executorService())) {
dataLog.initialize(TIMEOUT);
val order = new HashMap<DataFrameBuilder.CommitArgs, Integer>();
List<DataFrameBuilder.CommitArgs> commitFrames = Collections.synchronizedList(new ArrayList<>());
BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
val args = new DataFrameBuilder.Args(DataFrameTestHelpers.appendOrder(order), commitFrames::add, errorCallback, executorService());
try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
for (TestLogItem item : records) {
b.append(item);
}
b.close();
}
// Wait for all the frames commit callbacks to be invoked. Even though the DataFrameBuilder waits (upon close)
// for the OrderedItemProcessor to finish, there are other callbacks chained that need to be completed (such
// as the one collecting frames in the list above).
TestUtils.await(() -> commitFrames.size() >= order.size(), delayMillis, TIMEOUT.toMillis());
// It is quite likely that acks will arrive out of order. The DataFrameBuilder has no responsibility for
// rearrangement; that should be done by its user.
commitFrames.sort(Comparator.comparingInt(order::get));
// Check the correctness of the commit callback.
AssertExtensions.assertGreaterThan("Not enough Data Frames were generated.", 1, commitFrames.size());
DataFrameBuilder.CommitArgs previousCommitArgs = null;
for (val ca : commitFrames) {
if (previousCommitArgs != null) {
AssertExtensions.assertGreaterThanOrEqual("CommitArgs.getLastFullySerializedSequenceNumber() is not monotonically increasing.", previousCommitArgs.getLastFullySerializedSequenceNumber(), ca.getLastFullySerializedSequenceNumber());
AssertExtensions.assertGreaterThanOrEqual("CommitArgs.getLastStartedSequenceNumber() is not monotonically increasing.", previousCommitArgs.getLastStartedSequenceNumber(), ca.getLastStartedSequenceNumber());
AssertExtensions.assertGreaterThanOrEqual("CommitArgs.getLogAddress() is not monotonically increasing.", previousCommitArgs.getLogAddress().getSequence(), ca.getLogAddress().getSequence());
}
previousCommitArgs = ca;
}
// Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed.
val frames = dataLog.getAllEntries(readItem -> DataFrame.read(readItem.getPayload(), readItem.getLength(), readItem.getAddress()));
DataFrameTestHelpers.checkReadRecords(frames, records, r -> new ByteArraySegment(r.getFullSerialization()));
}
}
use of io.pravega.common.function.Callbacks in project pravega by pravega.
the class DataFrameReaderTests method testReadsWithPartialEntries.
/**
* Tests the case when we begin reading from a DataFrame which begins with a partial record. That record needs to
* be dropped (not returned). DataFrameReader should always return full records.
*/
@Test
public void testReadsWithPartialEntries() throws Exception {
// This test will only work if LARGE_RECORD_MIN_SIZE > FRAME_SIZE.
ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(3, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MIN_SIZE, 0);
try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
dataLog.initialize(TIMEOUT);
ArrayList<DataFrameBuilder.CommitArgs> commitFrames = new ArrayList<>();
BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
val args = new DataFrameBuilder.Args(Callbacks::doNothing, commitFrames::add, errorCallback, executorService());
try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
for (TestLogItem r : records) {
b.append(r);
}
b.flush();
}
// Delete the first entry in the DataLog.
ArrayList<Integer> failedIndices = new ArrayList<>();
dataLog.truncate(commitFrames.get(0).getLogAddress(), TIMEOUT).join();
// Given that each TestLogItem's length is larger than a data frame, truncating the first DataFrame will
// invalidate the first one.
failedIndices.add(0);
TestSerializer logItemFactory = new TestSerializer();
DataFrameReader<TestLogItem> reader = new DataFrameReader<>(dataLog, logItemFactory, CONTAINER_ID);
List<TestLogItem> readItems = readAll(reader);
checkReadResult(records, failedIndices, readItems);
}
}
use of io.pravega.common.function.Callbacks in project pravega by pravega.
the class DataFrameReaderTests method testReadsNoFailure.
/**
* Tests the happy case: DataFrameReader can read from a DataLog when the are no exceptions.
*/
@Test
public void testReadsNoFailure() throws Exception {
// Fail every X records (write-wise).
int failEvery = 7;
ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(100, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
records.addAll(DataFrameTestHelpers.generateLogItems(100, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
// Have every other 'failEvery' record fail after writing 90% of itself.
for (int i = 0; i < records.size(); i += failEvery) {
records.get(i).failSerializationAfterComplete(0.9, new IOException("intentional " + i));
}
HashSet<Integer> failedIndices = new HashSet<>();
try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
dataLog.initialize(TIMEOUT);
BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
val args = new DataFrameBuilder.Args(Callbacks::doNothing, Callbacks::doNothing, errorCallback, executorService());
try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
for (int i = 0; i < records.size(); i++) {
try {
b.append(records.get(i));
} catch (IOException ex) {
failedIndices.add(i);
}
}
b.flush();
}
TestSerializer logItemFactory = new TestSerializer();
DataFrameReader<TestLogItem> reader = new DataFrameReader<>(dataLog, logItemFactory, CONTAINER_ID);
List<TestLogItem> readItems = readAll(reader);
checkReadResult(records, failedIndices, readItems);
}
}
Aggregations