use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CrossTaskExternalITCase method testExternalStreamCrossTask.
@Test
public void testExternalStreamCrossTask() {
int keyCnt1 = 2;
int valCnt1 = 1;
// 87381 fit into memory, 87382 do not!
int keyCnt2 = 87385;
int valCnt2 = 1;
final int expCnt = keyCnt1 * valCnt1 * keyCnt2 * valCnt2;
setOutput(this.output);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_STREAMED_OUTER_FIRST);
getTaskConfig().setRelativeMemoryDriver(cross_frac);
final CrossDriver<Record, Record, Record> testTask = new CrossDriver<Record, Record, Record>();
try {
testDriver(testTask, MockCrossStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test failed due to an exception.");
}
Assert.assertEquals("Wrong result size.", expCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CoGroupTaskExternalITCase method testExternalSortCoGroupTask.
@Test
public void testExternalSortCoGroupTask() {
int keyCnt1 = 16384 * 8;
int valCnt1 = 32;
int keyCnt2 = 65536 * 4;
int valCnt2 = 4;
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2) + (keyCnt1 > keyCnt2 ? (keyCnt1 - keyCnt2) * valCnt1 : (keyCnt2 - keyCnt1) * valCnt2);
setOutput(this.output);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<Record, Record, Record>();
try {
addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
testDriver(testTask, MockCoGroupStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("The test caused an exception.");
}
Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CombineTaskExternalITCase method testSingleLevelMergeCombineTask.
@Test
public void testSingleLevelMergeCombineTask() {
final int keyCnt = 40000;
final int valCnt = 8;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
final GroupReduceCombineDriver<Record, Record> testTask = new GroupReduceCombineDriver<>();
try {
testDriver(testTask, MockCombiningReduceStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Invoke method caused exception.");
}
int expSum = 0;
for (int i = 1; i < valCnt; i++) {
expSum += i;
}
// wee need to do the final aggregation manually in the test, because the
// combiner is not guaranteed to do that
final HashMap<IntValue, IntValue> aggMap = new HashMap<>();
for (Record record : this.outList) {
IntValue key = new IntValue();
IntValue value = new IntValue();
key = record.getField(0, key);
value = record.getField(1, value);
IntValue prevVal = aggMap.get(key);
if (prevVal != null) {
aggMap.put(key, new IntValue(prevVal.getValue() + value.getValue()));
} else {
aggMap.put(key, value);
}
}
Assert.assertTrue("Resultset size was " + aggMap.size() + ". Expected was " + keyCnt, aggMap.size() == keyCnt);
for (IntValue integer : aggMap.values()) {
Assert.assertTrue("Incorrect result", integer.getValue() == expSum);
}
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CombineTaskExternalITCase method testMultiLevelMergeCombineTask.
@Test
public void testMultiLevelMergeCombineTask() throws Exception {
final int keyCnt = 100000;
final int valCnt = 8;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
final GroupReduceCombineDriver<Record, Record> testTask = new GroupReduceCombineDriver<>();
try {
testDriver(testTask, MockCombiningReduceStub.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Invoke method caused exception.");
}
int expSum = 0;
for (int i = 1; i < valCnt; i++) {
expSum += i;
}
// wee need to do the final aggregation manually in the test, because the
// combiner is not guaranteed to do that
final HashMap<IntValue, IntValue> aggMap = new HashMap<>();
for (Record record : this.outList) {
IntValue key = new IntValue();
IntValue value = new IntValue();
key = record.getField(0, key);
value = record.getField(1, value);
IntValue prevVal = aggMap.get(key);
if (prevVal != null) {
aggMap.put(key, new IntValue(prevVal.getValue() + value.getValue()));
} else {
aggMap.put(key, value);
}
}
Assert.assertTrue("Resultset size was " + aggMap.size() + ". Expected was " + keyCnt, aggMap.size() == keyCnt);
for (IntValue integer : aggMap.values()) {
Assert.assertTrue("Incorrect result", integer.getValue() == expSum);
}
this.outList.clear();
}
use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.
the class CrossTaskTest method testFailingBlockCrossTask2.
@Test
public void testFailingBlockCrossTask2() {
int keyCnt1 = 10;
int valCnt1 = 1;
int keyCnt2 = 100;
int valCnt2 = 4;
setOutput(this.output);
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_BLOCKED_OUTER_SECOND);
getTaskConfig().setRelativeMemoryDriver(cross_frac);
final CrossDriver<Record, Record, Record> testTask = new CrossDriver<>();
try {
testDriver(testTask, MockFailingCrossStub.class);
Assert.fail("Exception not forwarded.");
} catch (ExpectedTestException etex) {
// good!
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test failed due to an exception.");
}
}
Aggregations