use of org.apache.flink.runtime.operators.testutils.DummyInvokable in project flink by apache.
the class NormalizedKeySorterTest method testSwap.
/**
* The swap test fills the sort buffer and swaps all elements such that they are
* backwards. It then resets the generator, goes backwards through the buffer
* and compares for equality.
*/
@Test
public void testSwap() throws Exception {
final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE;
final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments);
NormalizedKeySorter<Tuple2<Integer, String>> sorter = newSortBuffer(memory);
TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
// write the records
Tuple2<Integer, String> record = new Tuple2<>();
int num = -1;
do {
generator.next(record);
num++;
} while (sorter.write(record));
// swap the records
int start = 0, end = num - 1;
while (start < end) {
sorter.swap(start++, end--);
}
// re-read the records
generator.reset();
Tuple2<Integer, String> readTarget = new Tuple2<>();
int i = num - 1;
while (i >= 0) {
generator.next(record);
readTarget = sorter.getRecord(readTarget, i--);
int rk = readTarget.f0;
int gk = record.f0;
String rv = readTarget.f1;
String gv = record.f1;
Assert.assertEquals("The re-read key is wrong", gk, rk);
Assert.assertEquals("The re-read value is wrong", gv, rv);
}
// release the memory occupied by the buffers
sorter.dispose();
this.memoryManager.release(memory);
}
use of org.apache.flink.runtime.operators.testutils.DummyInvokable in project flink by apache.
the class NormalizedKeySorterTest method testWriteAndIterator.
@Test
public void testWriteAndIterator() throws Exception {
final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE;
final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments);
NormalizedKeySorter<Tuple2<Integer, String>> sorter = newSortBuffer(memory);
TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
// write the records
Tuple2<Integer, String> record = new Tuple2<>();
do {
generator.next(record);
} while (sorter.write(record));
// re-read the records
generator.reset();
MutableObjectIterator<Tuple2<Integer, String>> iter = sorter.getIterator();
Tuple2<Integer, String> readTarget = new Tuple2<>();
while ((readTarget = iter.next(readTarget)) != null) {
generator.next(record);
int rk = readTarget.f0;
int gk = record.f0;
String rv = readTarget.f1;
String gv = record.f1;
Assert.assertEquals("The re-read key is wrong", gk, rk);
Assert.assertEquals("The re-read value is wrong", gv, rv);
}
// release the memory occupied by the buffers
sorter.dispose();
this.memoryManager.release(memory);
}
use of org.apache.flink.runtime.operators.testutils.DummyInvokable in project flink by apache.
the class FileChannelStreamsITCase method testWriteReadSmallRecords.
// --------------------------------------------------------------------------------------------
@Test
public void testWriteReadSmallRecords() {
try {
List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final FileIOChannel.ID channel = ioManager.createChannel();
// create the writer output view
final BlockChannelWriter<MemorySegment> writer = ioManager.createBlockChannelWriter(channel);
final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
// write a number of pairs
Pair pair = new Pair();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(pair);
pair.write(outView);
}
outView.close();
// create the reader input view
List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
generator.reset();
// read and re-generate all records and compare them
Pair readPair = new Pair();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(pair);
readPair.read(inView);
assertEquals("The re-generated and the read record do not match.", pair, readPair);
}
inView.close();
reader.deleteChannel();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.DummyInvokable in project flink by apache.
the class FileChannelStreamsITCase method testWriteReadNotAll.
@Test
public void testWriteReadNotAll() {
try {
final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final FileIOChannel.ID channel = this.ioManager.createChannel();
// create the writer output view
final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
// write a number of pairs
Pair pair = new Pair();
for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
generator.next(pair);
pair.write(outView);
}
outView.close();
// create the reader input view
List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
generator.reset();
// read and re-generate all records and compare them
Pair readPair = new Pair();
for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) {
generator.next(pair);
readPair.read(inView);
assertEquals("The re-generated and the read record do not match.", pair, readPair);
}
inView.close();
reader.deleteChannel();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.DummyInvokable in project flink by apache.
the class FileChannelStreamsTest method testCloseAndDeleteOutputView.
@Test
public void testCloseAndDeleteOutputView() {
final IOManager ioManager = new IOManagerAsync();
try {
MemoryManager memMan = new MemoryManager(4 * 16 * 1024, 1, 16 * 1024, MemoryType.HEAP, true);
List<MemorySegment> memory = new ArrayList<MemorySegment>();
memMan.allocatePages(new DummyInvokable(), memory, 4);
FileIOChannel.ID channel = ioManager.createChannel();
BlockChannelWriter<MemorySegment> writer = ioManager.createBlockChannelWriter(channel);
FileChannelOutputView out = new FileChannelOutputView(writer, memMan, memory, memMan.getPageSize());
new StringValue("Some test text").write(out);
// close for the first time, make sure all memory returns
out.close();
assertTrue(memMan.verifyEmpty());
// close again, should not cause an exception
out.close();
// delete, make sure file is removed
out.closeAndDelete();
assertFalse(new File(channel.getPath()).exists());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
ioManager.shutdown();
}
}
Aggregations