use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BytesHashMapTestBase method testBuildAndRetrieve.
@Test
public void testBuildAndRetrieve() throws Exception {
final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
int memorySize = numMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(memorySize).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
K[] keys = generateRandomKeys(NUM_ENTRIES);
List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
verifyInsert(keys, expected, table);
verifyRetrieve(table, keys, expected);
table.free();
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BytesHashMapTestBase method testRest.
@Test
public void testRest() throws Exception {
final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
int memorySize = numMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(memorySize).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
final K[] keys = generateRandomKeys(NUM_ENTRIES);
List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
verifyInsertAndUpdate(keys, expected, table);
verifyRetrieve(table, keys, expected);
table.reset();
Assert.assertEquals(0, table.getNumElements());
Assert.assertEquals(1, table.getRecordAreaMemorySegments().size());
expected.clear();
verifyInsertAndUpdate(keys, expected, table);
verifyRetrieve(table, keys, expected);
table.free();
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BytesHashMapTestBase method testResetAndOutput.
@Test
public void testResetAndOutput() throws Exception {
final Random rnd = new Random(RANDOM_SEED);
final int reservedMemSegments = 64;
int minMemorySize = reservedMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(minMemorySize).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, minMemorySize, KEY_TYPES, VALUE_TYPES);
K[] keys = generateRandomKeys(NUM_ENTRIES);
List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
List<BinaryRowData> actualValues = new ArrayList<>(NUM_ENTRIES);
List<K> actualKeys = new ArrayList<>(NUM_ENTRIES);
for (int i = 0; i < NUM_ENTRIES; i++) {
K groupKey = keys[i];
// look up and insert
BytesMap.LookupInfo<K, BinaryRowData> lookupInfo = table.lookup(groupKey);
Assert.assertFalse(lookupInfo.isFound());
try {
BinaryRowData entry = table.append(lookupInfo, defaultValue);
Assert.assertNotNull(entry);
// mock multiple updates
for (int j = 0; j < NUM_REWRITES; j++) {
updateOutputBuffer(entry, rnd);
}
expected.add(entry.copy());
} catch (Exception e) {
ArrayList<MemorySegment> segments = table.getRecordAreaMemorySegments();
RandomAccessInputView inView = new RandomAccessInputView(segments, segments.get(0).size());
K reuseKey = keySerializer.createInstance();
BinaryRowData reuseValue = valueSerializer.createInstance();
for (int index = 0; index < table.getNumElements(); index++) {
reuseKey = keySerializer.mapFromPages(reuseKey, inView);
reuseValue = valueSerializer.mapFromPages(reuseValue, inView);
actualKeys.add(keySerializer.copy(reuseKey));
actualValues.add(reuseValue.copy());
}
table.reset();
// retry
lookupInfo = table.lookup(groupKey);
BinaryRowData entry = table.append(lookupInfo, defaultValue);
Assert.assertNotNull(entry);
// mock multiple updates
for (int j = 0; j < NUM_REWRITES; j++) {
updateOutputBuffer(entry, rnd);
}
expected.add(entry.copy());
}
}
KeyValueIterator<K, BinaryRowData> iter = table.getEntryIterator(false);
while (iter.advanceNext()) {
actualKeys.add(keySerializer.copy(iter.getKey()));
actualValues.add(iter.getValue().copy());
}
Assert.assertEquals(NUM_ENTRIES, expected.size());
Assert.assertEquals(NUM_ENTRIES, actualKeys.size());
Assert.assertEquals(NUM_ENTRIES, actualValues.size());
Assert.assertEquals(expected, actualValues);
table.free();
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BytesMultiMapTestBase method testBuildAndRetrieve.
// ------------------------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------------------------
@Test
public void testBuildAndRetrieve() throws Exception {
final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
int memorySize = numMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(numMemSegments * PAGE_SIZE).build();
AbstractBytesMultiMap<K> table = createBytesMultiMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
K[] keys = generateRandomKeys(NUM_ENTRIES / 10);
BinaryRowData[] values = genValues(NUM_VALUE_PER_KEY);
for (K key : keys) {
BytesMap.LookupInfo<K, Iterator<RowData>> lookupInfo;
for (BinaryRowData value : values) {
lookupInfo = table.lookup(key);
table.append(lookupInfo, value);
}
}
KeyValueIterator<K, Iterator<RowData>> iter = table.getEntryIterator(false);
while (iter.advanceNext()) {
int i = 0;
Iterator<RowData> valueIter = iter.getValue();
while (valueIter.hasNext()) {
Assert.assertEquals(valueIter.next(), values[i++]);
}
}
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class MassiveStringValueSorting method testStringValueTuplesSorting.
@SuppressWarnings("unchecked")
public void testStringValueTuplesSorting() {
final int numStrings = 300000;
File input = null;
File sorted = null;
try {
// the source file
input = generateFileWithStringTuples(numStrings, "http://some-uri.com/that/is/a/common/prefix/to/all");
// the sorted file
sorted = File.createTempFile("sorted_strings", "txt");
String[] command = { "/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\"" };
Process p = null;
try {
p = Runtime.getRuntime().exec(command);
int retCode = p.waitFor();
if (retCode != 0) {
throw new Exception("Command failed with return code " + retCode);
}
p = null;
} finally {
if (p != null) {
p.destroy();
}
}
// sort the data
Sorter<Tuple2<StringValue, StringValue[]>> sorter = null;
BufferedReader reader = null;
BufferedReader verifyReader = null;
MemoryManager mm = null;
try (IOManager ioMan = new IOManagerAsync()) {
mm = MemoryManagerBuilder.newBuilder().setMemorySize(1024 * 1024).build();
TupleTypeInfo<Tuple2<StringValue, StringValue[]>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, StringValue[]>>) new TypeHint<Tuple2<StringValue, StringValue[]>>() {
}.getTypeInfo();
TypeSerializer<Tuple2<StringValue, StringValue[]>> serializer = typeInfo.createSerializer(new ExecutionConfig());
TypeComparator<Tuple2<StringValue, StringValue[]>> comparator = typeInfo.createComparator(new int[] { 0 }, new boolean[] { true }, 0, new ExecutionConfig());
reader = new BufferedReader(new FileReader(input));
MutableObjectIterator<Tuple2<StringValue, StringValue[]>> inputIterator = new StringValueTupleReaderMutableObjectIterator(reader);
sorter = ExternalSorter.newBuilder(mm, new DummyInvokable(), serializer, comparator).maxNumFileHandles(4).enableSpilling(ioMan, 0.8f).memoryFraction(1.0).objectReuse(false).largeRecords(true).build(inputIterator);
// use this part to verify that all if good when sorting in memory
// List<MemorySegment> memory = mm.allocatePages(new DummyInvokable(),
// mm.computeNumberOfPages(1024*1024*1024));
// NormalizedKeySorter<Tuple2<String, String[]>> nks = new
// NormalizedKeySorter<Tuple2<String,String[]>>(serializer, comparator, memory);
//
// {
// Tuple2<String, String[]> wi = new Tuple2<String, String[]>("", new
// String[0]);
// while ((wi = inputIterator.next(wi)) != null) {
// Assert.assertTrue(nks.write(wi));
// }
//
// new QuickSort().sort(nks);
// }
//
// MutableObjectIterator<Tuple2<String, String[]>> sortedData =
// nks.getIterator();
MutableObjectIterator<Tuple2<StringValue, StringValue[]>> sortedData = sorter.getIterator();
reader.close();
// verify
verifyReader = new BufferedReader(new FileReader(sorted));
MutableObjectIterator<Tuple2<StringValue, StringValue[]>> verifyIterator = new StringValueTupleReaderMutableObjectIterator(verifyReader);
Tuple2<StringValue, StringValue[]> nextVerify = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
Tuple2<StringValue, StringValue[]> nextFromFlinkSort = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
int num = 0;
while ((nextVerify = verifyIterator.next(nextVerify)) != null) {
num++;
nextFromFlinkSort = sortedData.next(nextFromFlinkSort);
Assert.assertNotNull(nextFromFlinkSort);
Assert.assertEquals(nextVerify.f0, nextFromFlinkSort.f0);
Assert.assertArrayEquals(nextVerify.f1, nextFromFlinkSort.f1);
}
Assert.assertNull(sortedData.next(nextFromFlinkSort));
Assert.assertEquals(numStrings, num);
} finally {
if (reader != null) {
reader.close();
}
if (verifyReader != null) {
verifyReader.close();
}
if (sorter != null) {
sorter.close();
}
if (mm != null) {
mm.shutdown();
}
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (input != null) {
// noinspection ResultOfMethodCallIgnored
input.delete();
}
if (sorted != null) {
// noinspection ResultOfMethodCallIgnored
sorted.delete();
}
}
}
Aggregations