use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CrossTaskTest method testCancelStreamCrossTaskInit.
@Test
public void testCancelStreamCrossTaskInit() {
int keyCnt = 10;
int valCnt = 1;
setOutput(this.output);
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(new DelayingInfinitiveInputIterator(100));
getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_STREAMED_OUTER_FIRST);
getTaskConfig().setRelativeMemoryDriver(cross_frac);
final CrossDriver<Record, Record, Record> testTask = new CrossDriver<>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockCrossStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
Assert.assertTrue("Exception was thrown despite proper canceling.", success.get());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CachedMatchTaskTest method testHash2MatchTask.
@Test
public void testHash2MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 1;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size());
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CachedMatchTaskTest method testFailingHashSecondMatchTask.
@Test
public void testFailingHashSecondMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockFailingMatchStub.class, 3);
Assert.fail("Function exception was not forwarded.");
} catch (ExpectedTestException etex) {
// good!
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CachedMatchTaskTest method testHash3MatchTask.
@Test
public void testHash3MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size());
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CachedMatchTaskTest method testFailingHashFirstMatchTask.
@Test
public void testFailingHashFirstMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockFailingMatchStub.class, 3);
Assert.fail("Function exception was not forwarded.");
} catch (ExpectedTestException etex) {
// good!
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
}
Aggregations