Search in sources :

Example 6 with ChannelReaderInputView

use of org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView in project flink by apache.

the class ChannelViewsTest method testReadWithoutKnownBlockCount.

@Test
public void testReadWithoutKnownBlockCount() throws Exception {
    final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
    final FileIOChannel.ID channel = this.ioManager.createChannel();
    final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
    // create the writer output view
    List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
    final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
    // write a number of pairs
    final Tuple2<Integer, String> rec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.serialize(rec, outView);
    }
    this.memoryManager.release(outView.close());
    // create the reader input view
    memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
    final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, true);
    generator.reset();
    // read and re-generate all records and cmpare them
    final Tuple2<Integer, String> readRec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.deserialize(readRec, inView);
        int k1 = rec.f0;
        String v1 = rec.f1;
        int k2 = readRec.f0;
        String v2 = readRec.f1;
        Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
    }
    this.memoryManager.release(inView.close());
    reader.deleteChannel();
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FileIOChannel(org.apache.flink.runtime.io.disk.iomanager.FileIOChannel) MemorySegment(org.apache.flink.core.memory.MemorySegment) ChannelWriterOutputView(org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView) ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 7 with ChannelReaderInputView

use of org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView in project flink by apache.

the class ChannelViewsTest method testWriteReadOneBufferOnly.

@Test
public void testWriteReadOneBufferOnly() throws Exception {
    final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
    final FileIOChannel.ID channel = this.ioManager.createChannel();
    final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
    // create the writer output view
    List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, 1);
    final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
    final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
    // write a number of pairs
    final Tuple2<Integer, String> rec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.serialize(rec, outView);
    }
    this.memoryManager.release(outView.close());
    // create the reader input view
    memory = this.memoryManager.allocatePages(this.parentTask, 1);
    final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
    final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
    generator.reset();
    // read and re-generate all records and compare them
    final Tuple2<Integer, String> readRec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.deserialize(readRec, inView);
        int k1 = rec.f0;
        String v1 = rec.f1;
        int k2 = readRec.f0;
        String v2 = readRec.f1;
        Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
    }
    this.memoryManager.release(inView.close());
    reader.deleteChannel();
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FileIOChannel(org.apache.flink.runtime.io.disk.iomanager.FileIOChannel) MemorySegment(org.apache.flink.core.memory.MemorySegment) ChannelWriterOutputView(org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView) ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 8 with ChannelReaderInputView

use of org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView in project flink by apache.

the class ChannelViewsTest method testWriteReadSmallRecords.

// --------------------------------------------------------------------------------------------
@Test
public void testWriteReadSmallRecords() throws Exception {
    final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
    final FileIOChannel.ID channel = this.ioManager.createChannel();
    final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
    // create the writer output view
    List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
    final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
    // write a number of pairs
    final Tuple2<Integer, String> rec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.serialize(rec, outView);
    }
    this.memoryManager.release(outView.close());
    // create the reader input view
    memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
    final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
    generator.reset();
    // read and re-generate all records and compare them
    final Tuple2<Integer, String> readRec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.deserialize(readRec, inView);
        int k1 = rec.f0;
        String v1 = rec.f1;
        int k2 = readRec.f0;
        String v2 = readRec.f1;
        Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
    }
    this.memoryManager.release(inView.close());
    reader.deleteChannel();
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FileIOChannel(org.apache.flink.runtime.io.disk.iomanager.FileIOChannel) MemorySegment(org.apache.flink.core.memory.MemorySegment) ChannelWriterOutputView(org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView) ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 9 with ChannelReaderInputView

use of org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView in project flink by apache.

the class ChannelViewsTest method testWriteAndReadLongRecords.

@Test
public void testWriteAndReadLongRecords() throws Exception {
    final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
    final FileIOChannel.ID channel = this.ioManager.createChannel();
    final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
    // create the writer output view
    List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
    final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
    // write a number of pairs
    final Tuple2<Integer, String> rec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_LONG; i++) {
        generator.next(rec);
        serializer.serialize(rec, outView);
    }
    this.memoryManager.release(outView.close());
    // create the reader input view
    memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
    final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
    generator.reset();
    // read and re-generate all records and compare them
    final Tuple2<Integer, String> readRec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_LONG; i++) {
        generator.next(rec);
        serializer.deserialize(readRec, inView);
        final int k1 = rec.f0;
        final String v1 = rec.f1;
        final int k2 = readRec.f0;
        final String v2 = readRec.f1;
        Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
    }
    this.memoryManager.release(inView.close());
    reader.deleteChannel();
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FileIOChannel(org.apache.flink.runtime.io.disk.iomanager.FileIOChannel) MemorySegment(org.apache.flink.core.memory.MemorySegment) ChannelWriterOutputView(org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView) ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 10 with ChannelReaderInputView

use of org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView in project flink by apache.

the class FixedLengthRecordSorterTest method testFlushFullMemoryPage.

@Test
public void testFlushFullMemoryPage() throws Exception {
    // Insert IntPair which would fill 2 memory pages.
    final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
    final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);
    FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
    UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);
    // write the records
    IntPair record = new IntPair();
    int num = -1;
    do {
        generator.next(record);
        num++;
    } while (sorter.write(record) && num < NUM_RECORDS);
    FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
    BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
    final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
    ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());
    sorter.writeToOutput(outputView, 0, NUM_RECORDS);
    this.memoryManager.release(outputView.close());
    BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
    final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
    ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
    final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
    ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);
    record = iterator.next(record);
    int i = 0;
    while (record != null) {
        Assert.assertEquals(i, record.getKey());
        record = iterator.next(record);
        i++;
    }
    Assert.assertEquals(NUM_RECORDS, i);
    this.memoryManager.release(dataBuffer);
    // release the memory occupied by the buffers
    sorter.dispose();
    this.memoryManager.release(memory);
}
Also used : FileIOChannel(org.apache.flink.runtime.io.disk.iomanager.FileIOChannel) ChannelReaderInputViewIterator(org.apache.flink.runtime.io.disk.ChannelReaderInputViewIterator) IntPair(org.apache.flink.runtime.operators.testutils.types.IntPair) MemorySegment(org.apache.flink.core.memory.MemorySegment) ChannelWriterOutputView(org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView) ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) UniformIntPairGenerator(org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator) Test(org.junit.Test)

Aggregations

MemorySegment (org.apache.flink.core.memory.MemorySegment)10 ChannelReaderInputView (org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView)10 ChannelWriterOutputView (org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView)8 FileIOChannel (org.apache.flink.runtime.io.disk.iomanager.FileIOChannel)8 Test (org.junit.Test)8 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)6 TestData (org.apache.flink.runtime.operators.testutils.TestData)6 ChannelReaderInputViewIterator (org.apache.flink.runtime.io.disk.ChannelReaderInputViewIterator)4 ArrayList (java.util.ArrayList)2 HeaderlessChannelReaderInputView (org.apache.flink.runtime.io.disk.iomanager.HeaderlessChannelReaderInputView)2 DummyInvokable (org.apache.flink.runtime.operators.testutils.DummyInvokable)2 UniformIntPairGenerator (org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator)2 IntPair (org.apache.flink.runtime.operators.testutils.types.IntPair)2 EOFException (java.io.EOFException)1 LinkedBlockingQueue (java.util.concurrent.LinkedBlockingQueue)1 BulkBlockChannelReader (org.apache.flink.runtime.io.disk.iomanager.BulkBlockChannelReader)1