use of org.apache.flink.core.memory.DataInputView in project flink by apache.
the class AvroSerializerSnapshotTest method roundTrip.
// ---------------------------------------------------------------------------------------------------------------
// Utils
// ---------------------------------------------------------------------------------------------------------------
/**
* Serialize an (avro)TypeSerializerSnapshot and deserialize it.
*/
private static <T> AvroSerializerSnapshot<T> roundTrip(TypeSerializerSnapshot<T> original) throws IOException {
// writeSnapshot();
DataOutputSerializer out = new DataOutputSerializer(1024);
original.writeSnapshot(out);
// init
AvroSerializerSnapshot<T> restored = new AvroSerializerSnapshot<>();
// readSnapshot();
DataInputView in = new DataInputDeserializer(out.wrapAsByteBuffer());
restored.readSnapshot(restored.getCurrentVersion(), in, original.getClass().getClassLoader());
return restored;
}
use of org.apache.flink.core.memory.DataInputView in project flink by apache.
the class SpillingBufferTest method testWriteReadTooMuchInMemory.
@Test
public void testWriteReadTooMuchInMemory() throws Exception {
final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
// create the writer output view
final ArrayList<MemorySegment> memory = new ArrayList<MemorySegment>(NUM_MEMORY_SEGMENTS);
this.memoryManager.allocatePages(this.parentTask, memory, NUM_MEMORY_SEGMENTS);
final SpillingBuffer outView = new SpillingBuffer(this.ioManager, new ListMemorySegmentSource(memory), this.memoryManager.getPageSize());
// write a number of pairs
final Tuple2<Integer, String> rec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_INMEM; i++) {
generator.next(rec);
serializer.serialize(rec, outView);
}
// create the reader input view
DataInputView inView = outView.flip();
generator.reset();
// notifyNonEmpty and re-generate all records and compare them
final Tuple2<Integer, String> readRec = new Tuple2<>();
try {
for (int i = 0; i < NUM_PAIRS_INMEM + 1; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the notifyNonEmpty record do not match.", k1 == k2 && v1.equals(v2));
}
Assert.fail("Read too much, expected EOFException.");
} catch (EOFException eofex) {
// expected
}
// re-notifyNonEmpty the data
inView = outView.flip();
generator.reset();
// notifyNonEmpty and re-generate all records and compare them
for (int i = 0; i < NUM_PAIRS_INMEM; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the notifyNonEmpty record do not match.", k1 == k2 && v1.equals(v2));
}
this.memoryManager.release(outView.close());
this.memoryManager.release(memory);
}
use of org.apache.flink.core.memory.DataInputView in project flink by apache.
the class SpillingBufferTest method testWriteReadTooMuchExternal.
@Test
public void testWriteReadTooMuchExternal() throws Exception {
final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
// create the writer output view
final ArrayList<MemorySegment> memory = new ArrayList<MemorySegment>(NUM_MEMORY_SEGMENTS);
this.memoryManager.allocatePages(this.parentTask, memory, NUM_MEMORY_SEGMENTS);
final SpillingBuffer outView = new SpillingBuffer(this.ioManager, new ListMemorySegmentSource(memory), this.memoryManager.getPageSize());
// write a number of pairs
final Tuple2<Integer, String> rec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_EXTERNAL; i++) {
generator.next(rec);
serializer.serialize(rec, outView);
}
// create the reader input view
DataInputView inView = outView.flip();
generator.reset();
// notifyNonEmpty and re-generate all records and compare them
final Tuple2<Integer, String> readRec = new Tuple2<>();
try {
for (int i = 0; i < NUM_PAIRS_EXTERNAL + 1; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the notifyNonEmpty record do not match.", k1 == k2 && v1.equals(v2));
}
Assert.fail("Read too much, expected EOFException.");
} catch (EOFException eofex) {
// expected
}
// re-notifyNonEmpty the data
inView = outView.flip();
generator.reset();
// notifyNonEmpty and re-generate all records and compare them
for (int i = 0; i < NUM_PAIRS_EXTERNAL; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the notifyNonEmpty record do not match.", k1 == k2 && v1.equals(v2));
}
this.memoryManager.release(outView.close());
this.memoryManager.release(memory);
}
use of org.apache.flink.core.memory.DataInputView in project flink by apache.
the class CopyOnWriteSkipListStateMapTestUtils method readStateFromSnapshot.
private static <K, N, S> Map<N, Map<K, S>> readStateFromSnapshot(byte[] data, TypeSerializer<K> keySerializer, TypeSerializer<N> namespaceSerializer, TypeSerializer<S> stateSerializer) throws IOException {
ByteArrayInputStreamWithPos inputStream = new ByteArrayInputStreamWithPos(data);
DataInputView dataInputView = new DataInputViewStreamWrapper(inputStream);
int size = dataInputView.readInt();
Map<N, Map<K, S>> states = new HashMap<>();
for (int i = 0; i < size; i++) {
N namespace = namespaceSerializer.deserialize(dataInputView);
K key = keySerializer.deserialize(dataInputView);
S state = stateSerializer.deserialize(dataInputView);
states.computeIfAbsent(namespace, (none) -> new HashMap<>()).put(key, state);
}
return states;
}
use of org.apache.flink.core.memory.DataInputView in project flink by apache.
the class KeyedStateCheckpointOutputStreamTest method testReadWriteMissingKeyGroups.
@Test
public void testReadWriteMissingKeyGroups() throws Exception {
final KeyGroupRange keyRange = new KeyGroupRange(0, 2);
KeyedStateCheckpointOutputStream stream = createStream(keyRange);
DataOutputView dov = new DataOutputViewStreamWrapper(stream);
stream.startNewKeyGroup(1);
dov.writeInt(1);
KeyGroupsStateHandle fullHandle = stream.closeAndGetHandle();
int count = 0;
try (FSDataInputStream in = fullHandle.openInputStream()) {
DataInputView div = new DataInputViewStreamWrapper(in);
for (int kg : fullHandle.getKeyGroupRange()) {
long off = fullHandle.getOffsetForKeyGroup(kg);
if (off >= 0) {
in.seek(off);
Assert.assertEquals(1, div.readInt());
++count;
}
}
}
Assert.assertEquals(1, count);
}
Aggregations