use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class FlatMapTaskTest method testMapTask.
@Test
public void testMapTask() {
final int keyCnt = 100;
final int valCnt = 20;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
setOutput(this.output);
final FlatMapDriver<Record, Record> testDriver = new FlatMapDriver<>();
try {
testDriver(testDriver, MockMapStub.class);
} catch (Exception e) {
LOG.debug("Exception while running the test driver.", e);
Assert.fail("Invoke method caused exception.");
}
Assert.assertEquals("Wrong result set size.", keyCnt * valCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskExternalITCase method testExternalSort1MatchTask.
@Test
public void testExternalSort1MatchTask() {
final int keyCnt1 = 16384 * 4;
final int valCnt1 = 2;
final int keyCnt2 = 8192;
final int valCnt2 = 4 * 2;
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskExternalITCase method testExternalHash1MatchTask.
@Test
public void testExternalHash1MatchTask() {
final int keyCnt1 = 32768;
final int valCnt1 = 8;
final int keyCnt2 = 65536;
final int valCnt2 = 8;
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.output);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskExternalITCase method testExternalHash2MatchTask.
@Test
public void testExternalHash2MatchTask() {
final int keyCnt1 = 32768;
final int valCnt1 = 8;
final int keyCnt2 = 65536;
final int valCnt2 = 8;
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.output);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ReduceTaskExternalITCase method testSingleLevelMergeCombiningReduceTask.
@Test
public void testSingleLevelMergeCombiningReduceTask() throws IOException {
final int keyCnt = 8192;
final int valCnt = 8;
addDriverComparator(this.comparator);
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
Sorter<Record> sorter = null;
try {
sorter = ExternalSorter.newBuilder(getMemoryManager(), getContainingTask(), RecordSerializerFactory.get().getSerializer(), this.comparator.duplicate()).maxNumFileHandles(2).withCombiner(new MockCombiningReduceStub()).enableSpilling(getIOManager(), 0.8f).memoryFraction(this.perSortFractionMem).objectReuse(true).largeRecords(true).build(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(sorter.getIterator());
GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
testDriver(testTask, MockCombiningReduceStub.class);
} catch (Exception e) {
LOG.info("Exception while running the test task.", e);
Assert.fail("Invoke method caused exception: " + e.getMessage());
} finally {
if (sorter != null) {
sorter.close();
}
}
int expSum = 0;
for (int i = 1; i < valCnt; i++) {
expSum += i;
}
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + keyCnt, this.outList.size() == keyCnt);
for (Record record : this.outList) {
Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum - record.getField(0, IntValue.class).getValue());
}
this.outList.clear();
}
Aggregations