Search in sources :

Example 21 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class IterationHeadTask method initBackChannel.

/**
	 * the iteration head prepares the backchannel: it allocates memory, instantiates a {@link BlockingBackChannel} and
	 * hands it to the iteration tail via a {@link Broker} singleton
	 **/
private BlockingBackChannel initBackChannel() throws Exception {
    /* get the size of the memory available to the backchannel */
    int backChannelMemoryPages = getMemoryManager().computeNumberOfPages(this.config.getRelativeBackChannelMemory());
    /* allocate the memory available to the backchannel */
    List<MemorySegment> segments = new ArrayList<MemorySegment>();
    int segmentSize = getMemoryManager().getPageSize();
    getMemoryManager().allocatePages(this, segments, backChannelMemoryPages);
    /* instantiate the backchannel */
    BlockingBackChannel backChannel = new BlockingBackChannel(new SerializedUpdateBuffer(segments, segmentSize, getIOManager()));
    /* hand the backchannel over to the iteration tail */
    Broker<BlockingBackChannel> broker = BlockingBackChannelBroker.instance();
    broker.handIn(brokerKey(), backChannel);
    return backChannel;
}
Also used : ArrayList(java.util.ArrayList) BlockingBackChannel(org.apache.flink.runtime.iterative.concurrent.BlockingBackChannel) MemorySegment(org.apache.flink.core.memory.MemorySegment) SerializedUpdateBuffer(org.apache.flink.runtime.iterative.io.SerializedUpdateBuffer)

Example 22 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class FileChannelOutputView method close.

private void close(boolean delete) throws IOException {
    try {
        // send off set last segment, if we have not been closed before
        MemorySegment current = getCurrentSegment();
        if (current != null) {
            writeSegment(current, getCurrentPositionInSegment());
        }
        clear();
        if (delete) {
            writer.closeAndDelete();
        } else {
            writer.close();
        }
    } finally {
        memManager.release(memory);
    }
}
Also used : MemorySegment(org.apache.flink.core.memory.MemorySegment)

Example 23 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class SpillingBuffer method nextSegment.

@Override
protected MemorySegment nextSegment(MemorySegment current, int positionInCurrent) throws IOException {
    // check if we are still in memory
    if (this.writer == null) {
        this.fullSegments.add(current);
        final MemorySegment nextSeg = this.memorySource.nextSegment();
        if (nextSeg != null) {
            return nextSeg;
        } else {
            // out of memory, need to spill: create a writer
            this.writer = this.ioManager.createBlockChannelWriter(this.ioManager.createChannel());
            // add all segments to the writer
            this.blockCount = this.fullSegments.size();
            this.numMemorySegmentsInWriter = this.blockCount;
            for (int i = 0; i < this.fullSegments.size(); i++) {
                this.writer.writeBlock(this.fullSegments.get(i));
            }
            this.fullSegments.clear();
            final MemorySegment seg = this.writer.getNextReturnedBlock();
            this.numMemorySegmentsInWriter--;
            return seg;
        }
    } else {
        // spilling
        this.writer.writeBlock(current);
        this.blockCount++;
        return this.writer.getNextReturnedBlock();
    }
}
Also used : MemorySegment(org.apache.flink.core.memory.MemorySegment)

Example 24 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class LocalBufferPool method returnExcessMemorySegments.

private void returnExcessMemorySegments() {
    assert Thread.holdsLock(availableMemorySegments);
    while (numberOfRequestedMemorySegments > currentPoolSize) {
        MemorySegment segment = availableMemorySegments.poll();
        if (segment == null) {
            return;
        }
        returnMemorySegment(segment);
    }
}
Also used : MemorySegment(org.apache.flink.core.memory.MemorySegment)

Example 25 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class ChannelViewsTest method testWriteReadNotAll.

@Test
public void testWriteReadNotAll() throws Exception {
    final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
    final FileIOChannel.ID channel = this.ioManager.createChannel();
    final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
    // create the writer output view
    List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
    final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
    // write a number of pairs
    final Tuple2<Integer, String> rec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
        generator.next(rec);
        serializer.serialize(rec, outView);
    }
    this.memoryManager.release(outView.close());
    // create the reader input view
    memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
    final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
    final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
    generator.reset();
    // read and re-generate all records and compare them
    final Tuple2<Integer, String> readRec = new Tuple2<>();
    for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) {
        generator.next(rec);
        serializer.deserialize(readRec, inView);
        int k1 = rec.f0;
        String v1 = rec.f1;
        int k2 = readRec.f0;
        String v2 = readRec.f1;
        Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
    }
    this.memoryManager.release(inView.close());
    reader.deleteChannel();
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FileIOChannel(org.apache.flink.runtime.io.disk.iomanager.FileIOChannel) MemorySegment(org.apache.flink.core.memory.MemorySegment) ChannelWriterOutputView(org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView) ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Aggregations

MemorySegment (org.apache.flink.core.memory.MemorySegment)161 Test (org.junit.Test)86 DummyInvokable (org.apache.flink.runtime.operators.testutils.DummyInvokable)38 ArrayList (java.util.ArrayList)30 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)24 IntPair (org.apache.flink.runtime.operators.testutils.types.IntPair)24 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)22 IOException (java.io.IOException)19 TestData (org.apache.flink.runtime.operators.testutils.TestData)18 FileIOChannel (org.apache.flink.runtime.io.disk.iomanager.FileIOChannel)17 UniformIntPairGenerator (org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator)16 IOManager (org.apache.flink.runtime.io.disk.iomanager.IOManager)15 IOManagerAsync (org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync)15 EOFException (java.io.EOFException)14 AbstractInvokable (org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable)14 Random (java.util.Random)11 ChannelReaderInputView (org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView)10 UniformRecordGenerator (org.apache.flink.runtime.operators.testutils.UniformRecordGenerator)9 Record (org.apache.flink.types.Record)9 MutableObjectIterator (org.apache.flink.util.MutableObjectIterator)9