Search in sources :

Example 6 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class FlatMapTaskTest method testMapTask.

@Test
public void testMapTask() {
    final int keyCnt = 100;
    final int valCnt = 20;
    addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
    setOutput(this.output);
    final FlatMapDriver<Record, Record> testDriver = new FlatMapDriver<>();
    try {
        testDriver(testDriver, MockMapStub.class);
    } catch (Exception e) {
        LOG.debug("Exception while running the test driver.", e);
        Assert.fail("Invoke method caused exception.");
    }
    Assert.assertEquals("Wrong result set size.", keyCnt * valCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Example 7 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class JoinTaskExternalITCase method testExternalSort1MatchTask.

@Test
public void testExternalSort1MatchTask() {
    final int keyCnt1 = 16384 * 4;
    final int valCnt1 = 2;
    final int keyCnt2 = 8192;
    final int valCnt2 = 4 * 2;
    final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
    setOutput(this.output);
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
    getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
    setNumFileHandlesForSort(4);
    final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
    try {
        addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
        addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
        testDriver(testTask, MockMatchStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("The test caused an exception.");
    }
    Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) Test(org.junit.Test)

Example 8 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class JoinTaskExternalITCase method testExternalHash1MatchTask.

@Test
public void testExternalHash1MatchTask() {
    final int keyCnt1 = 32768;
    final int valCnt1 = 8;
    final int keyCnt2 = 65536;
    final int valCnt2 = 8;
    final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
    addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
    addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    setOutput(this.output);
    getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
    getTaskConfig().setRelativeMemoryDriver(hash_frac);
    JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
    try {
        testDriver(testTask, MockMatchStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Test caused an exception.");
    }
    Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) Test(org.junit.Test)

Example 9 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class JoinTaskExternalITCase method testExternalHash2MatchTask.

@Test
public void testExternalHash2MatchTask() {
    final int keyCnt1 = 32768;
    final int valCnt1 = 8;
    final int keyCnt2 = 65536;
    final int valCnt2 = 8;
    final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
    addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
    addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    setOutput(this.output);
    getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
    getTaskConfig().setRelativeMemoryDriver(hash_frac);
    JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
    try {
        testDriver(testTask, MockMatchStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Test caused an exception.");
    }
    Assert.assertEquals("Wrong result set size.", expCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) Test(org.junit.Test)

Example 10 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class ReduceTaskExternalITCase method testSingleLevelMergeCombiningReduceTask.

@Test
public void testSingleLevelMergeCombiningReduceTask() throws IOException {
    final int keyCnt = 8192;
    final int valCnt = 8;
    addDriverComparator(this.comparator);
    setOutput(this.outList);
    getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
    Sorter<Record> sorter = null;
    try {
        sorter = ExternalSorter.newBuilder(getMemoryManager(), getContainingTask(), RecordSerializerFactory.get().getSerializer(), this.comparator.duplicate()).maxNumFileHandles(2).withCombiner(new MockCombiningReduceStub()).enableSpilling(getIOManager(), 0.8f).memoryFraction(this.perSortFractionMem).objectReuse(true).largeRecords(true).build(new UniformRecordGenerator(keyCnt, valCnt, false));
        addInput(sorter.getIterator());
        GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
        testDriver(testTask, MockCombiningReduceStub.class);
    } catch (Exception e) {
        LOG.info("Exception while running the test task.", e);
        Assert.fail("Invoke method caused exception: " + e.getMessage());
    } finally {
        if (sorter != null) {
            sorter.close();
        }
    }
    int expSum = 0;
    for (int i = 1; i < valCnt; i++) {
        expSum += i;
    }
    Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + keyCnt, this.outList.size() == keyCnt);
    for (Record record : this.outList) {
        Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum - record.getField(0, IntValue.class).getValue());
    }
    this.outList.clear();
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) IntValue(org.apache.flink.types.IntValue) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

UniformRecordGenerator (org.apache.flink.runtime.operators.testutils.UniformRecordGenerator)101 Record (org.apache.flink.types.Record)101 Test (org.junit.Test)101 ExpectedTestException (org.apache.flink.runtime.operators.testutils.ExpectedTestException)68 IOException (java.io.IOException)22 TaskCancelThread (org.apache.flink.runtime.operators.testutils.TaskCancelThread)20 NirvanaOutputList (org.apache.flink.runtime.operators.testutils.NirvanaOutputList)19 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)16 DelayingInfinitiveInputIterator (org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator)12 IntValue (org.apache.flink.types.IntValue)12 Configuration (org.apache.flink.configuration.Configuration)10 File (java.io.File)9 MemorySegment (org.apache.flink.core.memory.MemorySegment)9 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)9 HashMap (java.util.HashMap)8 FileNotFoundException (java.io.FileNotFoundException)5 BatchTask (org.apache.flink.runtime.operators.BatchTask)5 TaskConfig (org.apache.flink.runtime.operators.util.TaskConfig)5 HashSet (java.util.HashSet)4 DataSourceTaskTest (org.apache.flink.runtime.operators.DataSourceTaskTest)4