Search in sources :

Example 71 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class LargeRecordHandlerITCase method testRecordHandlerCompositeKey.

@Test
public void testRecordHandlerCompositeKey() {
    final int PAGE_SIZE = 4 * 1024;
    final int NUM_PAGES = 1000;
    final int NUM_RECORDS = 10;
    try (final IOManager ioMan = new IOManagerAsync()) {
        final MemoryManager memMan = MemoryManagerBuilder.newBuilder().setMemorySize(NUM_PAGES * PAGE_SIZE).setPageSize(PAGE_SIZE).build();
        final AbstractInvokable owner = new DummyInvokable();
        final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6);
        final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6);
        final TypeInformation<?>[] types = new TypeInformation<?>[] { BasicTypeInfo.LONG_TYPE_INFO, new ValueTypeInfo<SomeVeryLongValue>(SomeVeryLongValue.class), BasicTypeInfo.BYTE_TYPE_INFO };
        final TupleTypeInfo<Tuple3<Long, SomeVeryLongValue, Byte>> typeInfo = new TupleTypeInfo<Tuple3<Long, SomeVeryLongValue, Byte>>(types);
        final TypeSerializer<Tuple3<Long, SomeVeryLongValue, Byte>> serializer = typeInfo.createSerializer(new ExecutionConfig());
        final TypeComparator<Tuple3<Long, SomeVeryLongValue, Byte>> comparator = typeInfo.createComparator(new int[] { 2, 0 }, new boolean[] { true, true }, 0, new ExecutionConfig());
        LargeRecordHandler<Tuple3<Long, SomeVeryLongValue, Byte>> handler = new LargeRecordHandler<Tuple3<Long, SomeVeryLongValue, Byte>>(serializer, comparator, ioMan, memMan, initialMemory, owner, 128, owner.getExecutionConfig());
        assertFalse(handler.hasData());
        // add the test data
        Random rnd = new Random();
        for (int i = 0; i < NUM_RECORDS; i++) {
            long val = rnd.nextLong();
            handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(val, new SomeVeryLongValue((int) val), (byte) val));
            assertTrue(handler.hasData());
        }
        MutableObjectIterator<Tuple3<Long, SomeVeryLongValue, Byte>> sorted = handler.finishWriteAndSortKeys(sortMemory);
        try {
            handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(92L, null, (byte) 1));
            fail("should throw an exception");
        } catch (IllegalStateException e) {
        // expected
        }
        Tuple3<Long, SomeVeryLongValue, Byte> previous = null;
        Tuple3<Long, SomeVeryLongValue, Byte> next;
        while ((next = sorted.next(null)) != null) {
            // key and value must be equal
            assertTrue(next.f0.intValue() == next.f1.val());
            assertTrue(next.f0.byteValue() == next.f2);
            // order must be correct
            if (previous != null) {
                assertTrue(previous.f2 <= next.f2);
                assertTrue(previous.f2.byteValue() != next.f2.byteValue() || previous.f0 <= next.f0);
            }
            previous = next;
        }
        handler.close();
        assertFalse(handler.hasData());
        handler.close();
        try {
            handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(92L, null, (byte) 1));
            fail("should throw an exception");
        } catch (IllegalStateException e) {
        // expected
        }
        assertTrue(memMan.verifyEmpty());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) AbstractInvokable(org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) IOManagerAsync(org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync) Random(java.util.Random) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) ValueTypeInfo(org.apache.flink.api.java.typeutils.ValueTypeInfo) IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) MemorySegment(org.apache.flink.core.memory.MemorySegment) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) IOException(java.io.IOException) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Test(org.junit.Test)

Example 72 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class InPlaceMutableHashTableTest method testHashTableGrowthWithInsertOrReplace.

/**
 * This test validates that records are not lost via "insertOrReplace()" as in bug [FLINK-2361]
 *
 * <p>This has to be duplicated in InPlaceMutableHashTableTest and CompactingHashTableTest
 * because of the different constructor calls.
 */
@Test
public void testHashTableGrowthWithInsertOrReplace() {
    try {
        final int numElements = 1000000;
        List<MemorySegment> memory = getMemory(1000, 32 * 1024);
        InPlaceMutableHashTable<Tuple2<Long, String>> table = new InPlaceMutableHashTable<Tuple2<Long, String>>(serializer, comparator, memory);
        table.open();
        for (long i = 0; i < numElements; i++) {
            table.insertOrReplaceRecord(Tuple2.of(i, String.valueOf(i)));
        }
        // make sure that all elements are contained via the entry iterator
        {
            BitSet bitSet = new BitSet(numElements);
            MutableObjectIterator<Tuple2<Long, String>> iter = table.getEntryIterator();
            Tuple2<Long, String> next;
            while ((next = iter.next()) != null) {
                assertNotNull(next.f0);
                assertNotNull(next.f1);
                assertEquals(next.f0.longValue(), Long.parseLong(next.f1));
                bitSet.set(next.f0.intValue());
            }
            assertEquals(numElements, bitSet.cardinality());
        }
        // make sure all entries are contained via the prober
        {
            InPlaceMutableHashTable<Tuple2<Long, String>>.HashTableProber<Long> proper = table.getProber(probeComparator, pairComparator);
            Tuple2<Long, String> reuse = new Tuple2<>();
            for (long i = 0; i < numElements; i++) {
                assertNotNull(proper.getMatchFor(i, reuse));
                assertNull(proper.getMatchFor(i + numElements, reuse));
            }
        }
        table.close();
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) BitSet(java.util.BitSet) MemorySegment(org.apache.flink.core.memory.MemorySegment) EOFException(java.io.EOFException) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 73 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class SkipListUtils method helpGetNodeLatestVersion.

/**
 * Return of the newest version of value for the node.
 *
 * @param node the node.
 * @param spaceAllocator the space allocator.
 */
static int helpGetNodeLatestVersion(long node, Allocator spaceAllocator) {
    Chunk chunk = spaceAllocator.getChunkById(SpaceUtils.getChunkIdByAddress(node));
    int offsetInChunk = SpaceUtils.getChunkOffsetByAddress(node);
    MemorySegment segment = chunk.getMemorySegment(offsetInChunk);
    int offsetInByteBuffer = chunk.getOffsetInSegment(offsetInChunk);
    long valuePointer = getValuePointer(segment, offsetInByteBuffer);
    return helpGetValueVersion(valuePointer, spaceAllocator);
}
Also used : Chunk(org.apache.flink.runtime.state.heap.space.Chunk) MemorySegment(org.apache.flink.core.memory.MemorySegment)

Example 74 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class CopyOnWriteSkipListStateMapBasicOpTest method testNamespaceNodeIteratorIllegalNextInvocation.

/**
 * Test state map iterator illegal next call.
 */
@Test
public void testNamespaceNodeIteratorIllegalNextInvocation() {
    SkipListKeySerializer<Integer, Long> skipListKeySerializer = new SkipListKeySerializer<>(IntSerializer.INSTANCE, LongSerializer.INSTANCE);
    byte[] namespaceBytes = skipListKeySerializer.serializeNamespace(namespace);
    MemorySegment namespaceSegment = MemorySegmentFactory.wrap(namespaceBytes);
    Iterator<Long> iterator = stateMap.new NamespaceNodeIterator(namespaceSegment, 0, namespaceBytes.length);
    while (iterator.hasNext()) {
        iterator.next();
    }
    try {
        iterator.next();
        fail("Should have thrown NoSuchElementException.");
    } catch (NoSuchElementException e) {
    // expected
    }
}
Also used : MemorySegment(org.apache.flink.core.memory.MemorySegment) NoSuchElementException(java.util.NoSuchElementException) Test(org.junit.Test)

Example 75 with MemorySegment

use of org.apache.flink.core.memory.MemorySegment in project flink by apache.

the class CopyOnWriteSkipListStateMapBasicOpTest method testPutAndGetNodeWithNoneZeroOffset.

/**
 * This tests the internal capability of using partial {@link ByteBuffer}, making sure the
 * internal methods works when put/get state with a key stored at a none-zero offset of a
 * ByteBuffer.
 */
@Test
public void testPutAndGetNodeWithNoneZeroOffset() {
    final int key = 10;
    final long namespace = 0L;
    final String valueString = "test";
    SkipListKeySerializer<Integer, Long> skipListKeySerializer = new SkipListKeySerializer<>(IntSerializer.INSTANCE, LongSerializer.INSTANCE);
    SkipListValueSerializer<String> skipListValueSerializer = new SkipListValueSerializer<>(StringSerializer.INSTANCE);
    byte[] keyBytes = skipListKeySerializer.serialize(key, namespace);
    byte[] constructedKeyBytes = new byte[keyBytes.length + 1];
    System.arraycopy(keyBytes, 0, constructedKeyBytes, 1, keyBytes.length);
    MemorySegment keySegment = MemorySegmentFactory.wrap(constructedKeyBytes);
    int keyLen = keyBytes.length;
    byte[] value = skipListValueSerializer.serialize(valueString);
    stateMap.putValue(keySegment, 1, keyLen, value, false);
    String state = stateMap.getNode(keySegment, 1, keyLen);
    assertThat(state, is(valueString));
}
Also used : MemorySegment(org.apache.flink.core.memory.MemorySegment) Test(org.junit.Test)

Aggregations

MemorySegment (org.apache.flink.core.memory.MemorySegment)375 Test (org.junit.Test)136 ArrayList (java.util.ArrayList)52 DummyInvokable (org.apache.flink.runtime.operators.testutils.DummyInvokable)44 IOException (java.io.IOException)37 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)29 Buffer (org.apache.flink.runtime.io.network.buffer.Buffer)26 NetworkBuffer (org.apache.flink.runtime.io.network.buffer.NetworkBuffer)25 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)24 IntPair (org.apache.flink.runtime.operators.testutils.types.IntPair)24 FileIOChannel (org.apache.flink.runtime.io.disk.iomanager.FileIOChannel)20 EOFException (java.io.EOFException)18 ByteBuffer (java.nio.ByteBuffer)18 AbstractInvokable (org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable)18 TestData (org.apache.flink.runtime.operators.testutils.TestData)18 Random (java.util.Random)16 UniformIntPairGenerator (org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator)16 Chunk (org.apache.flink.runtime.state.heap.space.Chunk)15 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)15 IOManagerAsync (org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync)14