use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BytesHashMapTestBase method testHashSetMode.
// ------------------------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------------------------
@Test
public void testHashSetMode() throws IOException {
final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
int memorySize = numMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(numMemSegments * PAGE_SIZE).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, new LogicalType[] {});
Assert.assertTrue(table.isHashSetMode());
K[] keys = generateRandomKeys(NUM_ENTRIES);
verifyKeyInsert(keys, table);
verifyKeyPresent(keys, table);
table.free();
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BytesHashMapTestBase method testBuildAndUpdate.
@Test
public void testBuildAndUpdate() throws Exception {
final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
int memorySize = numMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(memorySize).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
K[] keys = generateRandomKeys(NUM_ENTRIES);
List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
verifyInsertAndUpdate(keys, expected, table);
verifyRetrieve(table, keys, expected);
table.free();
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class BufferDataOverWindowOperator method open.
@Override
public void open() throws Exception {
super.open();
ClassLoader cl = getUserCodeClassloader();
serializer = (AbstractRowDataSerializer) getOperatorConfig().getTypeSerializerIn1(cl);
partitionComparator = genComparator.newInstance(cl);
genComparator = null;
MemoryManager memManager = getContainingTask().getEnvironment().getMemoryManager();
LazyMemorySegmentPool pool = new LazyMemorySegmentPool(this, memManager, (int) (computeMemorySize() / memManager.getPageSize()));
this.currentData = new ResettableExternalBuffer(getContainingTask().getEnvironment().getIOManager(), pool, serializer, isRowAllInFixedPart);
collector = new StreamRecordCollector<>(output);
joinedRows = new JoinedRowData[overWindowFrames.length];
for (int i = 0; i < overWindowFrames.length; i++) {
overWindowFrames[i].open(new ExecutionContextImpl(this, getRuntimeContext()));
joinedRows[i] = new JoinedRowData();
}
}
use of org.apache.flink.runtime.memory.MemoryManager in project flink by apache.
the class MassiveStringValueSorting method testStringValueSorting.
public void testStringValueSorting() {
File input = null;
File sorted = null;
try {
// the source file
input = generateFileWithStrings(300000, "http://some-uri.com/that/is/a/common/prefix/to/all");
// the sorted file
sorted = File.createTempFile("sorted_strings", "txt");
String[] command = { "/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\"" };
Process p = null;
try {
p = Runtime.getRuntime().exec(command);
int retCode = p.waitFor();
if (retCode != 0) {
throw new Exception("Command failed with return code " + retCode);
}
p = null;
} finally {
if (p != null) {
p.destroy();
}
}
// sort the data
Sorter<StringValue> sorter = null;
BufferedReader reader = null;
BufferedReader verifyReader = null;
MemoryManager mm = null;
try (IOManager ioMan = new IOManagerAsync()) {
mm = MemoryManagerBuilder.newBuilder().setMemorySize(1024 * 1024).build();
TypeSerializer<StringValue> serializer = new CopyableValueSerializer<StringValue>(StringValue.class);
TypeComparator<StringValue> comparator = new CopyableValueComparator<StringValue>(true, StringValue.class);
reader = new BufferedReader(new FileReader(input));
MutableObjectIterator<StringValue> inputIterator = new StringValueReaderMutableObjectIterator(reader);
sorter = ExternalSorter.newBuilder(mm, new DummyInvokable(), serializer, comparator).maxNumFileHandles(128).enableSpilling(ioMan, 0.8f).memoryFraction(1.0).objectReuse(true).largeRecords(true).build(inputIterator);
MutableObjectIterator<StringValue> sortedData = sorter.getIterator();
reader.close();
// verify
verifyReader = new BufferedReader(new FileReader(sorted));
String nextVerify;
StringValue nextFromFlinkSort = new StringValue();
while ((nextVerify = verifyReader.readLine()) != null) {
nextFromFlinkSort = sortedData.next(nextFromFlinkSort);
Assert.assertNotNull(nextFromFlinkSort);
Assert.assertEquals(nextVerify, nextFromFlinkSort.getValue());
}
} finally {
if (reader != null) {
reader.close();
}
if (verifyReader != null) {
verifyReader.close();
}
if (sorter != null) {
sorter.close();
}
if (mm != null) {
mm.shutdown();
}
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (input != null) {
// noinspection ResultOfMethodCallIgnored
input.delete();
}
if (sorted != null) {
// noinspection ResultOfMethodCallIgnored
sorted.delete();
}
}
}
Aggregations