use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class DataSourceTaskTest method testDataSourceTask.
@Test
public void testDataSourceTask() throws IOException {
int keyCnt = 100;
int valCnt = 20;
this.outList = new ArrayList<Record>();
File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
InputFilePreparator.prepareInputFile(new UniformRecordGenerator(keyCnt, valCnt, false), tempTestFile, true);
super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
super.addOutput(this.outList);
DataSourceTask<Record> testTask = new DataSourceTask<>(this.mockEnv);
super.registerFileInputTask(testTask, MockInputFormat.class, tempTestFile.toURI().toString(), "\n");
try {
testTask.invoke();
} catch (Exception e) {
System.err.println(e);
Assert.fail("Invoke method caused exception.");
}
try {
Field formatField = DataSourceTask.class.getDeclaredField("format");
formatField.setAccessible(true);
MockInputFormat inputFormat = (MockInputFormat) formatField.get(testTask);
Assert.assertTrue("Invalid status of the input format. Expected for opened: true, Actual: " + inputFormat.opened, inputFormat.opened);
Assert.assertTrue("Invalid status of the input format. Expected for closed: true, Actual: " + inputFormat.closed, inputFormat.closed);
} catch (Exception e) {
System.err.println(e);
Assert.fail("Reflection error while trying to validate inputFormat status.");
}
Assert.assertTrue("Invalid output size. Expected: " + (keyCnt * valCnt) + " Actual: " + this.outList.size(), this.outList.size() == keyCnt * valCnt);
HashMap<Integer, HashSet<Integer>> keyValueCountMap = new HashMap<>(keyCnt);
for (Record kvp : this.outList) {
int key = kvp.getField(0, IntValue.class).getValue();
int val = kvp.getField(1, IntValue.class).getValue();
if (!keyValueCountMap.containsKey(key)) {
keyValueCountMap.put(key, new HashSet<Integer>());
}
keyValueCountMap.get(key).add(val);
}
Assert.assertTrue("Invalid key count in out file. Expected: " + keyCnt + " Actual: " + keyValueCountMap.keySet().size(), keyValueCountMap.keySet().size() == keyCnt);
for (Integer mapKey : keyValueCountMap.keySet()) {
Assert.assertTrue("Invalid value count for key: " + mapKey + ". Expected: " + valCnt + " Actual: " + keyValueCountMap.get(mapKey).size(), keyValueCountMap.get(mapKey).size() == valCnt);
}
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class DataSourceTaskTest method testFailingDataSourceTask.
@Test
public void testFailingDataSourceTask() throws IOException {
int keyCnt = 20;
int valCnt = 10;
this.outList = new NirvanaOutputList();
File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
InputFilePreparator.prepareInputFile(new UniformRecordGenerator(keyCnt, valCnt, false), tempTestFile, false);
super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
super.addOutput(this.outList);
DataSourceTask<Record> testTask = new DataSourceTask<>(this.mockEnv);
super.registerFileInputTask(testTask, MockFailingInputFormat.class, tempTestFile.toURI().toString(), "\n");
boolean stubFailed = false;
try {
testTask.invoke();
} catch (Exception e) {
stubFailed = true;
}
Assert.assertTrue("Function exception was not forwarded.", stubFailed);
// assert that temp file was created
Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testSortBoth1MatchTask.
@Test
public void testSortBoth1MatchTask() {
final int keyCnt1 = 20;
final int valCnt1 = 1;
final int keyCnt2 = 10;
final int valCnt2 = 2;
setOutput(this.outList);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
try {
addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockMatchStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt);
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class JoinTaskTest method testHashFirstCancelMatchTaskWhileMatching.
@Test
public void testHashFirstCancelMatchTaskWhileMatching() {
int keyCnt = 20;
int valCnt = 20;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(hash_frac);
final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner = new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
Assert.fail("Joining threads failed");
}
Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CachedMatchTaskTest method testHash5MatchTask.
@Test
public void testHash5MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size());
this.outList.clear();
}
Aggregations