use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ChainTaskTest method testDataSourceTaskOutputInCloseMethod.
@Test
public void testDataSourceTaskOutputInCloseMethod() throws IOException {
final int numChainedTasks = 10;
final int keyCnt = 100;
final int valCnt = 10;
final File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
DataSourceTaskTest.InputFilePreparator.prepareInputFile(new UniformRecordGenerator(keyCnt, valCnt, false), tempTestFile, true);
initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
addOutput(outList);
final DataSourceTask<Record> testTask = new DataSourceTask<>(mockEnv);
registerFileInputTask(testTask, DataSourceTaskTest.MockInputFormat.class, tempTestFile.toURI().toString(), "\n");
for (int i = 0; i < numChainedTasks; i++) {
final TaskConfig taskConfig = new TaskConfig(new Configuration());
taskConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
taskConfig.setOutputSerializer(serFact);
taskConfig.setStubWrapper(new UserCodeClassWrapper<>(ChainTaskTest.MockDuplicateLastValueMapFunction.class));
getTaskConfig().addChainedTask(ChainedFlatMapDriver.class, taskConfig, "chained-" + i);
}
try {
testTask.invoke();
Assert.assertEquals(keyCnt * valCnt + numChainedTasks, outList.size());
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Invoke method caused exception.");
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class ChainedOperatorsMetricTest method testOperatorIOMetricReuse.
@Test
public void testOperatorIOMetricReuse() throws Exception {
// environment
initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
this.mockEnv = new MockEnvironmentBuilder().setTaskName(HEAD_OPERATOR_NAME).setManagedMemorySize(MEMORY_MANAGER_SIZE).setInputSplitProvider(this.inputSplitProvider).setBufferSize(NETWORK_BUFFER_SIZE).setMetricGroup(TaskManagerMetricGroup.createTaskManagerMetricGroup(NoOpMetricRegistry.INSTANCE, "host", ResourceID.generate()).addJob(new JobID(), "jobName").addTask(new JobVertexID(), new ExecutionAttemptID(), "task", 0, 0)).build();
final int keyCnt = 100;
final int valCnt = 20;
final int numRecords = keyCnt * valCnt;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false), 0);
addOutput(this.outList);
// the chained operator
addChainedOperator();
// creates the head operator and assembles the chain
registerTask(FlatMapDriver.class, DuplicatingFlatMapFunction.class);
final BatchTask<FlatMapFunction<Record, Record>, Record> testTask = new BatchTask<>(this.mockEnv);
testTask.invoke();
Assert.assertEquals(numRecords * 2 * 2, this.outList.size());
final TaskMetricGroup taskMetricGroup = mockEnv.getMetricGroup();
// verify task-level metrics
{
final TaskIOMetricGroup ioMetricGroup = taskMetricGroup.getIOMetricGroup();
final Counter numRecordsInCounter = ioMetricGroup.getNumRecordsInCounter();
final Counter numRecordsOutCounter = ioMetricGroup.getNumRecordsOutCounter();
Assert.assertEquals(numRecords, numRecordsInCounter.getCount());
Assert.assertEquals(numRecords * 2 * 2, numRecordsOutCounter.getCount());
}
// verify head operator metrics
{
// this only returns the existing group and doesn't create a new one
final OperatorMetricGroup operatorMetricGroup1 = taskMetricGroup.getOrAddOperator(HEAD_OPERATOR_NAME);
final OperatorIOMetricGroup ioMetricGroup = operatorMetricGroup1.getIOMetricGroup();
final Counter numRecordsInCounter = ioMetricGroup.getNumRecordsInCounter();
final Counter numRecordsOutCounter = ioMetricGroup.getNumRecordsOutCounter();
Assert.assertEquals(numRecords, numRecordsInCounter.getCount());
Assert.assertEquals(numRecords * 2, numRecordsOutCounter.getCount());
}
// verify chained operator metrics
{
// this only returns the existing group and doesn't create a new one
final InternalOperatorMetricGroup operatorMetricGroup1 = taskMetricGroup.getOrAddOperator(CHAINED_OPERATOR_NAME);
final InternalOperatorIOMetricGroup ioMetricGroup = operatorMetricGroup1.getIOMetricGroup();
final Counter numRecordsInCounter = ioMetricGroup.getNumRecordsInCounter();
final Counter numRecordsOutCounter = ioMetricGroup.getNumRecordsOutCounter();
Assert.assertEquals(numRecords * 2, numRecordsInCounter.getCount());
Assert.assertEquals(numRecords * 2 * 2, numRecordsOutCounter.getCount());
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testCancelHashMatchTaskWhileBuildFirst.
@Test
public void testCancelHashMatchTaskWhileBuildFirst() {
final int keyCnt = 20;
final int valCnt = 20;
try {
addInput(new DelayingInfinitiveInputIterator(100));
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
try {
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testSortBoth3MatchTask.
@Test
public void testSortBoth3MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt);
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testFailingHashSecondMatchTask.
@Test
public void testFailingHashSecondMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
testDriver(testTask, MockFailingMatchStub.class);
Assert.fail("Function exception was not forwarded.");
} catch (ExpectedTestException etex) {
// good!
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
}
Aggregations