use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CrossTaskTest method testStreamEmptyOuterCrossTask.
@Test
public void testStreamEmptyOuterCrossTask() {
int keyCnt1 = 10;
int valCnt1 = 1;
int keyCnt2 = 0;
int valCnt2 = 0;
final int expCnt = keyCnt1 * valCnt1 * keyCnt2 * valCnt2;
setOutput(this.output);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_STREAMED_OUTER_SECOND);
getTaskConfig().setRelativeMemoryDriver(cross_frac);
final CrossDriver<Record, Record, Record> testTask = new CrossDriver<>();
try {
testDriver(testTask, MockCrossStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test failed due to an exception.");
}
Assert.assertEquals("Wrong result size.", expCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ReduceTaskExternalITCase method testMultiLevelMergeReduceTask.
@Test
public void testMultiLevelMergeReduceTask() {
final int keyCnt = 32768;
final int valCnt = 8;
setNumFileHandlesForSort(2);
addDriverComparator(this.comparator);
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
try {
addInputSorted(new UniformRecordGenerator(keyCnt, valCnt, false), this.comparator.duplicate());
GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
testDriver(testTask, MockReduceStub.class);
} catch (Exception e) {
LOG.info("Exception while running the test task.", e);
Assert.fail("Exception in Test: " + e.getMessage());
}
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + keyCnt, this.outList.size() == keyCnt);
for (Record record : this.outList) {
Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == valCnt - record.getField(0, IntValue.class).getValue());
}
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testHashCancelMatchTaskWhileBuildSecond.
@Test
public void testHashCancelMatchTaskWhileBuildSecond() {
final int keyCnt = 20;
final int valCnt = 20;
try {
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(new DelayingInfinitiveInputIterator(100));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
try {
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testSortBoth2MatchTask.
@Test
public void testSortBoth2MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 1;
setOutput(this.outList);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt);
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ChainTaskTest method testMapTask.
@Test
public void testMapTask() {
final int keyCnt = 100;
final int valCnt = 20;
final double memoryFraction = 1.0;
try {
// environment
initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
addInput(new UniformRecordGenerator(keyCnt, valCnt, false), 0);
addOutput(this.outList);
// chained combine config
{
final TaskConfig combineConfig = new TaskConfig(new Configuration());
// input
combineConfig.addInputToGroup(0);
combineConfig.setInputSerializer(serFact, 0);
// output
combineConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
combineConfig.setOutputSerializer(serFact);
// driver
combineConfig.setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
combineConfig.setDriverComparator(compFact, 0);
combineConfig.setDriverComparator(compFact, 1);
combineConfig.setRelativeMemoryDriver(memoryFraction);
// udf
combineConfig.setStubWrapper(new UserCodeClassWrapper<>(MockCombiningReduceStub.class));
getTaskConfig().addChainedTask(SynchronousChainedCombineDriver.class, combineConfig, "combine");
}
// chained map+combine
{
registerTask(FlatMapDriver.class, MockMapStub.class);
BatchTask<FlatMapFunction<Record, Record>, Record> testTask = new BatchTask<>(this.mockEnv);
try {
testTask.invoke();
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Invoke method caused exception.");
}
}
Assert.assertEquals(keyCnt, this.outList.size());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
Aggregations