use of org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator in project flink by apache.
the class LeftOuterJoinTaskTest method testFailingHashLeftOuterJoinTask.
@Test(expected = ExpectedTestException.class)
public void testFailingHashLeftOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(this.hash_frac);
final AbstractOuterJoinDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
testDriver(testTask, MockFailingJoinStub.class);
}
use of org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator in project flink by apache.
the class LeftOuterJoinTaskTest method testHashLeftOuterJoinTask.
private void testHashLeftOuterJoinTask(int keyCnt1, int valCnt1, int keyCnt2, int valCnt2) throws Exception {
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
final AbstractOuterJoinDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, false), this.serializer);
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, false), this.serializer);
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
Assert.assertTrue("Result set size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt);
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator in project flink by apache.
the class LeftOuterJoinTaskTest method testCancelLeftOuterJoinTaskWhileBuilding.
@Test
public void testCancelLeftOuterJoinTaskWhileBuilding() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(this.hash_frac);
final AbstractOuterJoinDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(100, 100, true), this.serializer);
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner = new Thread("Task runner for testCancelOuterJoinTaskWhileSort1()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.join(60000);
assertFalse("Task thread did not finish within 60 seconds", taskRunner.isAlive());
final Throwable taskError = error.get();
if (taskError != null) {
fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError));
}
}
use of org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator in project flink by apache.
the class CombineTaskTest method testCombineTask.
@Test
public void testCombineTask() {
try {
int keyCnt = 100;
int valCnt = 20;
setInput(new UniformIntTupleGenerator(keyCnt, valCnt, false), serializer);
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(this.outList, serializer);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
final GroupReduceCombineDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = new GroupReduceCombineDriver<>();
testDriver(testTask, MockCombiningReduceStub.class);
int expSum = 0;
for (int i = 1; i < valCnt; i++) {
expSum += i;
}
assertTrue(this.outList.size() == keyCnt);
for (Tuple2<Integer, Integer> record : this.outList) {
assertTrue(record.f1 == expSum);
}
this.outList.clear();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator in project flink by apache.
the class CombinerOversizedRecordsTest method testOversizedRecordCombineTask.
@Test
public void testOversizedRecordCombineTask() {
try {
final int keyCnt = 100;
final int valCnt = 20;
// create a long heavy string payload
StringBuilder bld = new StringBuilder(10 * 1024 * 1024);
Random rnd = new Random();
for (int i = 0; i < 10000000; i++) {
bld.append((char) (rnd.nextInt(26) + 'a'));
}
String longString = bld.toString();
bld = null;
// construct the input as a union of
// 1) long string
// 2) some random values
// 3) long string
// 4) random values
// 5) long string
// random values 1
MutableObjectIterator<Tuple2<Integer, Integer>> gen1 = new UniformIntTupleGenerator(keyCnt, valCnt, false);
// random values 2
MutableObjectIterator<Tuple2<Integer, Integer>> gen2 = new UniformIntTupleGenerator(keyCnt, valCnt, false);
@SuppressWarnings("unchecked") MutableObjectIterator<Tuple3<Integer, Integer, String>> input = new UnionIterator<Tuple3<Integer, Integer, String>>(new SingleValueIterator<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(-1, -1, longString)), new StringIteratorDecorator(gen1), new SingleValueIterator<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(-1, -1, longString)), new StringIteratorDecorator(gen2), new SingleValueIterator<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(-1, -1, longString)));
setInput(input, serializer);
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(this.outList, this.outSerializer);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
GroupReduceCombineDriver<Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>> testTask = new GroupReduceCombineDriver<Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>>();
testDriver(testTask, TestCombiner.class);
assertEquals(3, testTask.getOversizedRecordCount());
assertTrue(keyCnt + 3 == outList.size() || 2 * keyCnt + 3 == outList.size());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations