Search in sources :

Example 26 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CrossTaskExternalITCase method testExternalStreamCrossTask.

@Test
public void testExternalStreamCrossTask() {
    int keyCnt1 = 2;
    int valCnt1 = 1;
    // 87381 fit into memory, 87382 do not!
    int keyCnt2 = 87385;
    int valCnt2 = 1;
    final int expCnt = keyCnt1 * valCnt1 * keyCnt2 * valCnt2;
    setOutput(this.output);
    addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
    addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
    getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_STREAMED_OUTER_FIRST);
    getTaskConfig().setRelativeMemoryDriver(cross_frac);
    final CrossDriver<Record, Record, Record> testTask = new CrossDriver<Record, Record, Record>();
    try {
        testDriver(testTask, MockCrossStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Test failed due to an exception.");
    }
    Assert.assertEquals("Wrong result size.", expCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) Test(org.junit.Test)

Example 27 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CoGroupTaskExternalITCase method testExternalSortCoGroupTask.

@Test
public void testExternalSortCoGroupTask() {
    int keyCnt1 = 16384 * 8;
    int valCnt1 = 32;
    int keyCnt2 = 65536 * 4;
    int valCnt2 = 4;
    final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2) + (keyCnt1 > keyCnt2 ? (keyCnt1 - keyCnt2) * valCnt1 : (keyCnt2 - keyCnt1) * valCnt2);
    setOutput(this.output);
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    getTaskConfig().setDriverStrategy(DriverStrategy.CO_GROUP);
    final CoGroupDriver<Record, Record, Record> testTask = new CoGroupDriver<Record, Record, Record>();
    try {
        addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
        addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
        testDriver(testTask, MockCoGroupStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("The test caused an exception.");
    }
    Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) Test(org.junit.Test)

Example 28 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CombineTaskExternalITCase method testSingleLevelMergeCombineTask.

@Test
public void testSingleLevelMergeCombineTask() {
    final int keyCnt = 40000;
    final int valCnt = 8;
    addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
    addDriverComparator(this.comparator);
    addDriverComparator(this.comparator);
    setOutput(this.outList);
    getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
    getTaskConfig().setRelativeMemoryDriver(combine_frac);
    getTaskConfig().setFilehandlesDriver(2);
    final GroupReduceCombineDriver<Record, Record> testTask = new GroupReduceCombineDriver<>();
    try {
        testDriver(testTask, MockCombiningReduceStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Invoke method caused exception.");
    }
    int expSum = 0;
    for (int i = 1; i < valCnt; i++) {
        expSum += i;
    }
    // wee need to do the final aggregation manually in the test, because the
    // combiner is not guaranteed to do that
    final HashMap<IntValue, IntValue> aggMap = new HashMap<>();
    for (Record record : this.outList) {
        IntValue key = new IntValue();
        IntValue value = new IntValue();
        key = record.getField(0, key);
        value = record.getField(1, value);
        IntValue prevVal = aggMap.get(key);
        if (prevVal != null) {
            aggMap.put(key, new IntValue(prevVal.getValue() + value.getValue()));
        } else {
            aggMap.put(key, value);
        }
    }
    Assert.assertTrue("Resultset size was " + aggMap.size() + ". Expected was " + keyCnt, aggMap.size() == keyCnt);
    for (IntValue integer : aggMap.values()) {
        Assert.assertTrue("Incorrect result", integer.getValue() == expSum);
    }
    this.outList.clear();
}
Also used : HashMap(java.util.HashMap) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) IntValue(org.apache.flink.types.IntValue) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Example 29 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CombineTaskExternalITCase method testMultiLevelMergeCombineTask.

@Test
public void testMultiLevelMergeCombineTask() throws Exception {
    final int keyCnt = 100000;
    final int valCnt = 8;
    addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
    addDriverComparator(this.comparator);
    addDriverComparator(this.comparator);
    setOutput(this.outList);
    getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
    getTaskConfig().setRelativeMemoryDriver(combine_frac);
    getTaskConfig().setFilehandlesDriver(2);
    final GroupReduceCombineDriver<Record, Record> testTask = new GroupReduceCombineDriver<>();
    try {
        testDriver(testTask, MockCombiningReduceStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Invoke method caused exception.");
    }
    int expSum = 0;
    for (int i = 1; i < valCnt; i++) {
        expSum += i;
    }
    // wee need to do the final aggregation manually in the test, because the
    // combiner is not guaranteed to do that
    final HashMap<IntValue, IntValue> aggMap = new HashMap<>();
    for (Record record : this.outList) {
        IntValue key = new IntValue();
        IntValue value = new IntValue();
        key = record.getField(0, key);
        value = record.getField(1, value);
        IntValue prevVal = aggMap.get(key);
        if (prevVal != null) {
            aggMap.put(key, new IntValue(prevVal.getValue() + value.getValue()));
        } else {
            aggMap.put(key, value);
        }
    }
    Assert.assertTrue("Resultset size was " + aggMap.size() + ". Expected was " + keyCnt, aggMap.size() == keyCnt);
    for (IntValue integer : aggMap.values()) {
        Assert.assertTrue("Incorrect result", integer.getValue() == expSum);
    }
    this.outList.clear();
}
Also used : HashMap(java.util.HashMap) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) IntValue(org.apache.flink.types.IntValue) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Example 30 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CrossTaskTest method testFailingBlockCrossTask2.

@Test
public void testFailingBlockCrossTask2() {
    int keyCnt1 = 10;
    int valCnt1 = 1;
    int keyCnt2 = 100;
    int valCnt2 = 4;
    setOutput(this.output);
    addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
    addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
    getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_BLOCKED_OUTER_SECOND);
    getTaskConfig().setRelativeMemoryDriver(cross_frac);
    final CrossDriver<Record, Record, Record> testTask = new CrossDriver<>();
    try {
        testDriver(testTask, MockFailingCrossStub.class);
        Assert.fail("Exception not forwarded.");
    } catch (ExpectedTestException etex) {
    // good!
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Test failed due to an exception.");
    }
}
Also used : ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Aggregations

UniformRecordGenerator (org.apache.flink.runtime.operators.testutils.UniformRecordGenerator)101 Record (org.apache.flink.types.Record)101 Test (org.junit.Test)101 ExpectedTestException (org.apache.flink.runtime.operators.testutils.ExpectedTestException)68 IOException (java.io.IOException)22 TaskCancelThread (org.apache.flink.runtime.operators.testutils.TaskCancelThread)20 NirvanaOutputList (org.apache.flink.runtime.operators.testutils.NirvanaOutputList)19 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)16 DelayingInfinitiveInputIterator (org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator)12 IntValue (org.apache.flink.types.IntValue)12 Configuration (org.apache.flink.configuration.Configuration)10 File (java.io.File)9 MemorySegment (org.apache.flink.core.memory.MemorySegment)9 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)9 HashMap (java.util.HashMap)8 FileNotFoundException (java.io.FileNotFoundException)5 BatchTask (org.apache.flink.runtime.operators.BatchTask)5 TaskConfig (org.apache.flink.runtime.operators.util.TaskConfig)5 HashSet (java.util.HashSet)4 DataSourceTaskTest (org.apache.flink.runtime.operators.DataSourceTaskTest)4