use of org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView in project flink by apache.
the class ChannelViewsTest method testReadWithoutKnownBlockCount.
@Test
public void testReadWithoutKnownBlockCount() throws Exception {
final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final FileIOChannel.ID channel = this.ioManager.createChannel();
final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
// create the writer output view
List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
// write a number of pairs
final Tuple2<Integer, String> rec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(rec);
serializer.serialize(rec, outView);
}
this.memoryManager.release(outView.close());
// create the reader input view
memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, true);
generator.reset();
// read and re-generate all records and cmpare them
final Tuple2<Integer, String> readRec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
}
this.memoryManager.release(inView.close());
reader.deleteChannel();
}
use of org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView in project flink by apache.
the class ChannelViewsTest method testWriteReadOneBufferOnly.
@Test
public void testWriteReadOneBufferOnly() throws Exception {
final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final FileIOChannel.ID channel = this.ioManager.createChannel();
final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
// create the writer output view
List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, 1);
final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
// write a number of pairs
final Tuple2<Integer, String> rec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(rec);
serializer.serialize(rec, outView);
}
this.memoryManager.release(outView.close());
// create the reader input view
memory = this.memoryManager.allocatePages(this.parentTask, 1);
final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
generator.reset();
// read and re-generate all records and compare them
final Tuple2<Integer, String> readRec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
}
this.memoryManager.release(inView.close());
reader.deleteChannel();
}
use of org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView in project flink by apache.
the class ChannelViewsTest method testWriteReadSmallRecords.
// --------------------------------------------------------------------------------------------
@Test
public void testWriteReadSmallRecords() throws Exception {
final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final FileIOChannel.ID channel = this.ioManager.createChannel();
final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
// create the writer output view
List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
// write a number of pairs
final Tuple2<Integer, String> rec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(rec);
serializer.serialize(rec, outView);
}
this.memoryManager.release(outView.close());
// create the reader input view
memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
generator.reset();
// read and re-generate all records and compare them
final Tuple2<Integer, String> readRec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
int k1 = rec.f0;
String v1 = rec.f1;
int k2 = readRec.f0;
String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
}
this.memoryManager.release(inView.close());
reader.deleteChannel();
}
use of org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView in project flink by apache.
the class ChannelViewsTest method testWriteAndReadLongRecords.
@Test
public void testWriteAndReadLongRecords() throws Exception {
final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final FileIOChannel.ID channel = this.ioManager.createChannel();
final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
// create the writer output view
List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
// write a number of pairs
final Tuple2<Integer, String> rec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_LONG; i++) {
generator.next(rec);
serializer.serialize(rec, outView);
}
this.memoryManager.release(outView.close());
// create the reader input view
memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
generator.reset();
// read and re-generate all records and compare them
final Tuple2<Integer, String> readRec = new Tuple2<>();
for (int i = 0; i < NUM_PAIRS_LONG; i++) {
generator.next(rec);
serializer.deserialize(readRec, inView);
final int k1 = rec.f0;
final String v1 = rec.f1;
final int k2 = readRec.f0;
final String v2 = readRec.f1;
Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
}
this.memoryManager.release(inView.close());
reader.deleteChannel();
}
use of org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView in project flink by apache.
the class FixedLengthRecordSorterTest method testFlushFullMemoryPage.
@Test
public void testFlushFullMemoryPage() throws Exception {
// Insert IntPair which would fill 2 memory pages.
final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);
FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);
// write the records
IntPair record = new IntPair();
int num = -1;
do {
generator.next(record);
num++;
} while (sorter.write(record) && num < NUM_RECORDS);
FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());
sorter.writeToOutput(outputView, 0, NUM_RECORDS);
this.memoryManager.release(outputView.close());
BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);
record = iterator.next(record);
int i = 0;
while (record != null) {
Assert.assertEquals(i, record.getKey());
record = iterator.next(record);
i++;
}
Assert.assertEquals(NUM_RECORDS, i);
this.memoryManager.release(dataBuffer);
// release the memory occupied by the buffers
sorter.dispose();
this.memoryManager.release(memory);
}
Aggregations