use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testSortSecondMatchTask.
@Test
public void testSortSecondMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true));
addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt);
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testHash3MatchTask.
@Test
public void testHash3MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size());
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testFailingHashFirstMatchTask.
@Test
public void testFailingHashFirstMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
testDriver(testTask, MockFailingMatchStub.class);
Assert.fail("Function exception was not forwarded.");
} catch (ExpectedTestException etex) {
// good!
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ReduceTaskTest method testReduceTaskOnPreSortedInput.
@Test
public void testReduceTaskOnPreSortedInput() {
final int keyCnt = 100;
final int valCnt = 20;
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
addDriverComparator(this.comparator);
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
try {
testDriver(testTask, MockReduceStub.class);
} catch (Exception e) {
LOG.info("Exception while running the test task.", e);
Assert.fail("Invoke method caused exception: " + e.getMessage());
}
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + keyCnt, this.outList.size() == keyCnt);
for (Record record : this.outList) {
Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == valCnt - record.getField(0, IntValue.class).getValue());
}
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ReduceTaskTest method testCombiningReduceTask.
@Test
public void testCombiningReduceTask() throws IOException {
final int keyCnt = 100;
final int valCnt = 20;
addDriverComparator(this.comparator);
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
Sorter<Record> sorter = null;
try {
sorter = ExternalSorter.newBuilder(getMemoryManager(), getContainingTask(), RecordSerializerFactory.get().getSerializer(), this.comparator.duplicate()).maxNumFileHandles(4).withCombiner(new ReduceTaskExternalITCase.MockCombiningReduceStub()).enableSpilling(getIOManager(), 0.8f).memoryFraction(this.perSortFractionMem).objectReuse(true).largeRecords(true).build(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(sorter.getIterator());
GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
testDriver(testTask, MockCombiningReduceStub.class);
} catch (Exception e) {
LOG.info("Exception while running the test task.", e);
Assert.fail("Invoke method caused exception: " + e.getMessage());
} finally {
if (sorter != null) {
sorter.close();
}
}
int expSum = 0;
for (int i = 1; i < valCnt; i++) {
expSum += i;
}
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + keyCnt, this.outList.size() == keyCnt);
for (Record record : this.outList) {
Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum - record.getField(0, IntValue.class).getValue());
}
this.outList.clear();
}
Aggregations