Search in sources :

Example 36 with MemoryManager

use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.

the class BytesHashMapTestBase method testBuildAndRetrieve.

@Test
public void testBuildAndRetrieve() throws Exception {
    final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
    int memorySize = numMemSegments * PAGE_SIZE;
    MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(memorySize).build();
    AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
    K[] keys = generateRandomKeys(NUM_ENTRIES);
    List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
    verifyInsert(keys, expected, table);
    verifyRetrieve(table, keys, expected);
    table.free();
}
Also used : BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ArrayList(java.util.ArrayList) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) Test(org.junit.Test)

Example 37 with MemoryManager

use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.

the class BytesHashMapTestBase method testRest.

@Test
public void testRest() throws Exception {
    final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
    int memorySize = numMemSegments * PAGE_SIZE;
    MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(memorySize).build();
    AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
    final K[] keys = generateRandomKeys(NUM_ENTRIES);
    List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
    verifyInsertAndUpdate(keys, expected, table);
    verifyRetrieve(table, keys, expected);
    table.reset();
    Assert.assertEquals(0, table.getNumElements());
    Assert.assertEquals(1, table.getRecordAreaMemorySegments().size());
    expected.clear();
    verifyInsertAndUpdate(keys, expected, table);
    verifyRetrieve(table, keys, expected);
    table.free();
}
Also used : BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ArrayList(java.util.ArrayList) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) Test(org.junit.Test)

Example 38 with MemoryManager

use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.

the class BytesHashMapTestBase method testResetAndOutput.

@Test
public void testResetAndOutput() throws Exception {
    final Random rnd = new Random(RANDOM_SEED);
    final int reservedMemSegments = 64;
    int minMemorySize = reservedMemSegments * PAGE_SIZE;
    MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(minMemorySize).build();
    AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, minMemorySize, KEY_TYPES, VALUE_TYPES);
    K[] keys = generateRandomKeys(NUM_ENTRIES);
    List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
    List<BinaryRowData> actualValues = new ArrayList<>(NUM_ENTRIES);
    List<K> actualKeys = new ArrayList<>(NUM_ENTRIES);
    for (int i = 0; i < NUM_ENTRIES; i++) {
        K groupKey = keys[i];
        // look up and insert
        BytesMap.LookupInfo<K, BinaryRowData> lookupInfo = table.lookup(groupKey);
        Assert.assertFalse(lookupInfo.isFound());
        try {
            BinaryRowData entry = table.append(lookupInfo, defaultValue);
            Assert.assertNotNull(entry);
            // mock multiple updates
            for (int j = 0; j < NUM_REWRITES; j++) {
                updateOutputBuffer(entry, rnd);
            }
            expected.add(entry.copy());
        } catch (Exception e) {
            ArrayList<MemorySegment> segments = table.getRecordAreaMemorySegments();
            RandomAccessInputView inView = new RandomAccessInputView(segments, segments.get(0).size());
            K reuseKey = keySerializer.createInstance();
            BinaryRowData reuseValue = valueSerializer.createInstance();
            for (int index = 0; index < table.getNumElements(); index++) {
                reuseKey = keySerializer.mapFromPages(reuseKey, inView);
                reuseValue = valueSerializer.mapFromPages(reuseValue, inView);
                actualKeys.add(keySerializer.copy(reuseKey));
                actualValues.add(reuseValue.copy());
            }
            table.reset();
            // retry
            lookupInfo = table.lookup(groupKey);
            BinaryRowData entry = table.append(lookupInfo, defaultValue);
            Assert.assertNotNull(entry);
            // mock multiple updates
            for (int j = 0; j < NUM_REWRITES; j++) {
                updateOutputBuffer(entry, rnd);
            }
            expected.add(entry.copy());
        }
    }
    KeyValueIterator<K, BinaryRowData> iter = table.getEntryIterator(false);
    while (iter.advanceNext()) {
        actualKeys.add(keySerializer.copy(iter.getKey()));
        actualValues.add(iter.getValue().copy());
    }
    Assert.assertEquals(NUM_ENTRIES, expected.size());
    Assert.assertEquals(NUM_ENTRIES, actualKeys.size());
    Assert.assertEquals(NUM_ENTRIES, actualValues.size());
    Assert.assertEquals(expected, actualValues);
    table.free();
}
Also used : RandomAccessInputView(org.apache.flink.runtime.io.disk.RandomAccessInputView) ArrayList(java.util.ArrayList) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) IOException(java.io.IOException) Random(java.util.Random) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) Test(org.junit.Test)

Example 39 with MemoryManager

use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.

the class BytesMultiMapTestBase method testBuildAndRetrieve.

// ------------------------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------------------------
@Test
public void testBuildAndRetrieve() throws Exception {
    final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
    int memorySize = numMemSegments * PAGE_SIZE;
    MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(numMemSegments * PAGE_SIZE).build();
    AbstractBytesMultiMap<K> table = createBytesMultiMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
    K[] keys = generateRandomKeys(NUM_ENTRIES / 10);
    BinaryRowData[] values = genValues(NUM_VALUE_PER_KEY);
    for (K key : keys) {
        BytesMap.LookupInfo<K, Iterator<RowData>> lookupInfo;
        for (BinaryRowData value : values) {
            lookupInfo = table.lookup(key);
            table.append(lookupInfo, value);
        }
    }
    KeyValueIterator<K, Iterator<RowData>> iter = table.getEntryIterator(false);
    while (iter.advanceNext()) {
        int i = 0;
        Iterator<RowData> valueIter = iter.getValue();
        while (valueIter.hasNext()) {
            Assert.assertEquals(valueIter.next(), values[i++]);
        }
    }
}
Also used : MemoryManager(org.apache.flink.runtime.memory.MemoryManager) RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) Iterator(java.util.Iterator) KeyValueIterator(org.apache.flink.table.runtime.util.KeyValueIterator) Test(org.junit.Test)

Example 40 with MemoryManager

use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.

the class MassiveStringValueSorting method testStringValueTuplesSorting.

@SuppressWarnings("unchecked")
public void testStringValueTuplesSorting() {
    final int numStrings = 300000;
    File input = null;
    File sorted = null;
    try {
        // the source file
        input = generateFileWithStringTuples(numStrings, "http://some-uri.com/that/is/a/common/prefix/to/all");
        // the sorted file
        sorted = File.createTempFile("sorted_strings", "txt");
        String[] command = { "/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\"" };
        Process p = null;
        try {
            p = Runtime.getRuntime().exec(command);
            int retCode = p.waitFor();
            if (retCode != 0) {
                throw new Exception("Command failed with return code " + retCode);
            }
            p = null;
        } finally {
            if (p != null) {
                p.destroy();
            }
        }
        // sort the data
        Sorter<Tuple2<StringValue, StringValue[]>> sorter = null;
        BufferedReader reader = null;
        BufferedReader verifyReader = null;
        MemoryManager mm = null;
        try (IOManager ioMan = new IOManagerAsync()) {
            mm = MemoryManagerBuilder.newBuilder().setMemorySize(1024 * 1024).build();
            TupleTypeInfo<Tuple2<StringValue, StringValue[]>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, StringValue[]>>) new TypeHint<Tuple2<StringValue, StringValue[]>>() {
            }.getTypeInfo();
            TypeSerializer<Tuple2<StringValue, StringValue[]>> serializer = typeInfo.createSerializer(new ExecutionConfig());
            TypeComparator<Tuple2<StringValue, StringValue[]>> comparator = typeInfo.createComparator(new int[] { 0 }, new boolean[] { true }, 0, new ExecutionConfig());
            reader = new BufferedReader(new FileReader(input));
            MutableObjectIterator<Tuple2<StringValue, StringValue[]>> inputIterator = new StringValueTupleReaderMutableObjectIterator(reader);
            sorter = ExternalSorter.newBuilder(mm, new DummyInvokable(), serializer, comparator).maxNumFileHandles(4).enableSpilling(ioMan, 0.8f).memoryFraction(1.0).objectReuse(false).largeRecords(true).build(inputIterator);
            // use this part to verify that all if good when sorting in memory
            // List<MemorySegment> memory = mm.allocatePages(new DummyInvokable(),
            // mm.computeNumberOfPages(1024*1024*1024));
            // NormalizedKeySorter<Tuple2<String, String[]>> nks = new
            // NormalizedKeySorter<Tuple2<String,String[]>>(serializer, comparator, memory);
            // 
            // {
            // Tuple2<String, String[]> wi = new Tuple2<String, String[]>("", new
            // String[0]);
            // while ((wi = inputIterator.next(wi)) != null) {
            // Assert.assertTrue(nks.write(wi));
            // }
            // 
            // new QuickSort().sort(nks);
            // }
            // 
            // MutableObjectIterator<Tuple2<String, String[]>> sortedData =
            // nks.getIterator();
            MutableObjectIterator<Tuple2<StringValue, StringValue[]>> sortedData = sorter.getIterator();
            reader.close();
            // verify
            verifyReader = new BufferedReader(new FileReader(sorted));
            MutableObjectIterator<Tuple2<StringValue, StringValue[]>> verifyIterator = new StringValueTupleReaderMutableObjectIterator(verifyReader);
            Tuple2<StringValue, StringValue[]> nextVerify = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
            Tuple2<StringValue, StringValue[]> nextFromFlinkSort = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
            int num = 0;
            while ((nextVerify = verifyIterator.next(nextVerify)) != null) {
                num++;
                nextFromFlinkSort = sortedData.next(nextFromFlinkSort);
                Assert.assertNotNull(nextFromFlinkSort);
                Assert.assertEquals(nextVerify.f0, nextFromFlinkSort.f0);
                Assert.assertArrayEquals(nextVerify.f1, nextFromFlinkSort.f1);
            }
            Assert.assertNull(sortedData.next(nextFromFlinkSort));
            Assert.assertEquals(numStrings, num);
        } finally {
            if (reader != null) {
                reader.close();
            }
            if (verifyReader != null) {
                verifyReader.close();
            }
            if (sorter != null) {
                sorter.close();
            }
            if (mm != null) {
                mm.shutdown();
            }
        }
    } catch (Exception e) {
        System.err.println(e.getMessage());
        e.printStackTrace();
        Assert.fail(e.getMessage());
    } finally {
        if (input != null) {
            // noinspection ResultOfMethodCallIgnored
            input.delete();
        }
        if (sorted != null) {
            // noinspection ResultOfMethodCallIgnored
            sorted.delete();
        }
    }
}
Also used : TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) IOException(java.io.IOException) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) IOManagerAsync(org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) StringValue(org.apache.flink.types.StringValue) File(java.io.File)

Aggregations

MemoryManager (org.apache.flink.runtime.memory.MemoryManager)69 Test (org.junit.Test)37 IOManager (org.apache.flink.runtime.io.disk.iomanager.IOManager)22 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)21 IOManagerAsync (org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync)18 IOException (java.io.IOException)16 ArrayList (java.util.ArrayList)14 DummyInvokable (org.apache.flink.runtime.operators.testutils.DummyInvokable)14 UniformBinaryRowGenerator (org.apache.flink.table.runtime.util.UniformBinaryRowGenerator)14 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)13 MemorySegment (org.apache.flink.core.memory.MemorySegment)12 Configuration (org.apache.flink.configuration.Configuration)9 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)8 TypeHint (org.apache.flink.api.common.typeinfo.TypeHint)7 TupleTypeInfo (org.apache.flink.api.java.typeutils.TupleTypeInfo)7 File (java.io.File)6 MutableObjectIterator (org.apache.flink.util.MutableObjectIterator)6 Map (java.util.Map)5 AbstractInvokable (org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable)5 BufferedReader (java.io.BufferedReader)4