use of org.apache.flink.runtime.operators.testutils.NirvanaOutputList in project flink by apache.
the class JoinTaskTest method testCancelMatchTaskWhileSort1.
@Test
public void testCancelMatchTaskWhileSort1() {
final int keyCnt = 20;
final int valCnt = 20;
try {
setOutput(new NirvanaOutputList());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
addInputSorted(new DelayingInfinitiveInputIterator(100), this.comparator1.duplicate());
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
final AtomicReference<Throwable> error = new AtomicReference<>();
Thread taskRunner = new Thread("Task runner for testCancelMatchTaskWhileSort1()") {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertFalse("Task thread did not finish within 60 seconds", taskRunner.isAlive());
Throwable taskError = error.get();
if (taskError != null) {
taskError.printStackTrace();
fail("Error in task while canceling: " + taskError.getMessage());
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.NirvanaOutputList in project flink by apache.
the class ReduceTaskTest method testCancelReduceTaskWhileReducing.
@Test
public void testCancelReduceTaskWhileReducing() {
final int keyCnt = 1000;
final int valCnt = 2;
addInput(new UniformRecordGenerator(keyCnt, valCnt, true));
addDriverComparator(this.comparator);
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
final GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockDelayingReduceStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(2, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
}
use of org.apache.flink.runtime.operators.testutils.NirvanaOutputList in project flink by apache.
the class ReduceTaskTest method testCancelReduceTaskWhileSorting.
@Test
public void testCancelReduceTaskWhileSorting() {
addDriverComparator(this.comparator);
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
final GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
try {
addInputSorted(new DelayingInfinitiveInputIterator(100), this.comparator.duplicate());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockReduceStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
}
use of org.apache.flink.runtime.operators.testutils.NirvanaOutputList in project flink by apache.
the class JoinTaskTest method testCancelHashMatchTaskWhileBuildFirst.
@Test
public void testCancelHashMatchTaskWhileBuildFirst() {
final int keyCnt = 20;
final int valCnt = 20;
try {
addInput(new DelayingInfinitiveInputIterator(100));
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
try {
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.NirvanaOutputList in project flink by apache.
the class JoinTaskTest method testFailingHashSecondMatchTask.
@Test
public void testFailingHashSecondMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
testDriver(testTask, MockFailingMatchStub.class);
Assert.fail("Function exception was not forwarded.");
} catch (ExpectedTestException etex) {
// good!
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
}
Aggregations