use of org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator in project flink by apache.
the class NonReusingHashJoinIteratorITCase method testBuildSecondWithMixedDataTypes.
@Test
public void testBuildSecondWithMixedDataTypes() {
try {
MutableObjectIterator<IntPair> input1 = new UniformIntPairGenerator(500, 40, false);
final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
// collect expected data
final Map<Integer, Collection<TupleIntPairMatch>> expectedMatchesMap = joinIntPairs(collectIntPairData(input1), collectTupleData(input2));
final FlatJoinFunction<IntPair, Tuple2<Integer, String>, Tuple2<Integer, String>> matcher = new TupleIntPairMatchRemovingMatcher(expectedMatchesMap);
final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
// reset the generators
input1 = new UniformIntPairGenerator(500, 40, false);
generator2.reset();
input2.reset();
// compare with iterator values
NonReusingBuildFirstHashJoinIterator<IntPair, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new NonReusingBuildFirstHashJoinIterator<>(input1, input2, this.pairSerializer, this.pairComparator, this.recordSerializer, this.record2Comparator, this.recordPairPairComparator, this.memoryManager, this.ioManager, this.parentTask, 1.0, false, false, true);
iterator.open();
while (iterator.callWithNextKey(matcher, collector)) ;
iterator.close();
// assert that each expected match was seen
for (Entry<Integer, Collection<TupleIntPairMatch>> entry : expectedMatchesMap.entrySet()) {
if (!entry.getValue().isEmpty()) {
Assert.fail("Collection for key " + entry.getKey() + " is not empty");
}
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator in project flink by apache.
the class FixedLengthRecordSorterTest method testFlushFullMemoryPage.
@Test
public void testFlushFullMemoryPage() throws Exception {
// Insert IntPair which would fill 2 memory pages.
final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);
FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);
// write the records
IntPair record = new IntPair();
int num = -1;
do {
generator.next(record);
num++;
} while (sorter.write(record) && num < NUM_RECORDS);
FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());
sorter.writeToOutput(outputView, 0, NUM_RECORDS);
this.memoryManager.release(outputView.close());
BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);
record = iterator.next(record);
int i = 0;
while (record != null) {
Assert.assertEquals(i, record.getKey());
record = iterator.next(record);
i++;
}
Assert.assertEquals(NUM_RECORDS, i);
this.memoryManager.release(dataBuffer);
// release the memory occupied by the buffers
sorter.dispose();
this.memoryManager.release(memory);
}
use of org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator in project flink by apache.
the class FixedLengthRecordSorterTest method testCompare.
/**
* The compare test creates a sorted stream, writes it to the buffer and
* compares random elements. It expects that earlier elements are lower than later
* ones.
*/
@Test
public void testCompare() throws Exception {
final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE;
final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments);
FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, true);
// write the records
IntPair record = new IntPair();
int num = -1;
do {
generator.next(record);
num++;
} while (sorter.write(record) && num < 3354624);
// compare random elements
Random rnd = new Random(SEED << 1);
for (int i = 0; i < 2 * num; i++) {
int pos1 = rnd.nextInt(num);
int pos2 = rnd.nextInt(num);
int cmp = sorter.compare(pos1, pos2);
if (pos1 < pos2) {
Assert.assertTrue(cmp <= 0);
} else {
Assert.assertTrue(cmp >= 0);
}
}
// release the memory occupied by the buffers
sorter.dispose();
this.memoryManager.release(memory);
}
Aggregations