Search in sources :

Example 31 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CrossTaskTest method testStreamEmptyOuterCrossTask.

@Test
public void testStreamEmptyOuterCrossTask() {
    int keyCnt1 = 10;
    int valCnt1 = 1;
    int keyCnt2 = 0;
    int valCnt2 = 0;
    final int expCnt = keyCnt1 * valCnt1 * keyCnt2 * valCnt2;
    setOutput(this.output);
    addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
    addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
    getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_STREAMED_OUTER_SECOND);
    getTaskConfig().setRelativeMemoryDriver(cross_frac);
    final CrossDriver<Record, Record, Record> testTask = new CrossDriver<>();
    try {
        testDriver(testTask, MockCrossStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Test failed due to an exception.");
    }
    Assert.assertEquals("Wrong result size.", expCnt, this.output.getNumberOfRecords());
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Example 32 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class ReduceTaskExternalITCase method testMultiLevelMergeReduceTask.

@Test
public void testMultiLevelMergeReduceTask() {
    final int keyCnt = 32768;
    final int valCnt = 8;
    setNumFileHandlesForSort(2);
    addDriverComparator(this.comparator);
    setOutput(this.outList);
    getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
    try {
        addInputSorted(new UniformRecordGenerator(keyCnt, valCnt, false), this.comparator.duplicate());
        GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>();
        testDriver(testTask, MockReduceStub.class);
    } catch (Exception e) {
        LOG.info("Exception while running the test task.", e);
        Assert.fail("Exception in Test: " + e.getMessage());
    }
    Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + keyCnt, this.outList.size() == keyCnt);
    for (Record record : this.outList) {
        Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == valCnt - record.getField(0, IntValue.class).getValue());
    }
    this.outList.clear();
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) IntValue(org.apache.flink.types.IntValue) IOException(java.io.IOException) Test(org.junit.Test)

Example 33 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class JoinTaskTest method testHashCancelMatchTaskWhileBuildSecond.

@Test
public void testHashCancelMatchTaskWhileBuildSecond() {
    final int keyCnt = 20;
    final int valCnt = 20;
    try {
        addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
        addInput(new DelayingInfinitiveInputIterator(100));
        addDriverComparator(this.comparator1);
        addDriverComparator(this.comparator2);
        getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
        setOutput(new NirvanaOutputList());
        getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
        getTaskConfig().setRelativeMemoryDriver(hash_frac);
        final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
        final AtomicBoolean success = new AtomicBoolean(false);
        Thread taskRunner = new Thread() {

            @Override
            public void run() {
                try {
                    testDriver(testTask, MockMatchStub.class);
                    success.set(true);
                } catch (Exception ie) {
                    ie.printStackTrace();
                }
            }
        };
        taskRunner.start();
        Thread.sleep(1000);
        cancel();
        try {
            taskRunner.join();
        } catch (InterruptedException ie) {
            Assert.fail("Joining threads failed");
        }
        Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DelayingInfinitiveInputIterator(org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) NirvanaOutputList(org.apache.flink.runtime.operators.testutils.NirvanaOutputList) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) TaskCancelThread(org.apache.flink.runtime.operators.testutils.TaskCancelThread) Test(org.junit.Test)

Example 34 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class JoinTaskTest method testSortBoth2MatchTask.

@Test
public void testSortBoth2MatchTask() {
    int keyCnt1 = 20;
    int valCnt1 = 1;
    int keyCnt2 = 20;
    int valCnt2 = 1;
    setOutput(this.outList);
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE);
    getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
    setNumFileHandlesForSort(4);
    final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>();
    try {
        addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate());
        addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate());
        testDriver(testTask, MockMatchStub.class);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("The test caused an exception.");
    }
    int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
    Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt);
    this.outList.clear();
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Example 35 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class ChainTaskTest method testMapTask.

@Test
public void testMapTask() {
    final int keyCnt = 100;
    final int valCnt = 20;
    final double memoryFraction = 1.0;
    try {
        // environment
        initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
        addInput(new UniformRecordGenerator(keyCnt, valCnt, false), 0);
        addOutput(this.outList);
        // chained combine config
        {
            final TaskConfig combineConfig = new TaskConfig(new Configuration());
            // input
            combineConfig.addInputToGroup(0);
            combineConfig.setInputSerializer(serFact, 0);
            // output
            combineConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
            combineConfig.setOutputSerializer(serFact);
            // driver
            combineConfig.setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
            combineConfig.setDriverComparator(compFact, 0);
            combineConfig.setDriverComparator(compFact, 1);
            combineConfig.setRelativeMemoryDriver(memoryFraction);
            // udf
            combineConfig.setStubWrapper(new UserCodeClassWrapper<>(MockCombiningReduceStub.class));
            getTaskConfig().addChainedTask(SynchronousChainedCombineDriver.class, combineConfig, "combine");
        }
        // chained map+combine
        {
            registerTask(FlatMapDriver.class, MockMapStub.class);
            BatchTask<FlatMapFunction<Record, Record>, Record> testTask = new BatchTask<>(this.mockEnv);
            try {
                testTask.invoke();
            } catch (Exception e) {
                e.printStackTrace();
                Assert.fail("Invoke method caused exception.");
            }
        }
        Assert.assertEquals(keyCnt, this.outList.size());
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) BatchTask(org.apache.flink.runtime.operators.BatchTask) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig) MockMapStub(org.apache.flink.runtime.operators.FlatMapTaskTest.MockMapStub) IOException(java.io.IOException) UserCodeClassWrapper(org.apache.flink.api.common.operators.util.UserCodeClassWrapper) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) FlatMapDriver(org.apache.flink.runtime.operators.FlatMapDriver) DataSourceTaskTest(org.apache.flink.runtime.operators.DataSourceTaskTest) Test(org.junit.Test)

Aggregations

UniformRecordGenerator (org.apache.flink.runtime.operators.testutils.UniformRecordGenerator)101 Record (org.apache.flink.types.Record)101 Test (org.junit.Test)101 ExpectedTestException (org.apache.flink.runtime.operators.testutils.ExpectedTestException)68 IOException (java.io.IOException)22 TaskCancelThread (org.apache.flink.runtime.operators.testutils.TaskCancelThread)20 NirvanaOutputList (org.apache.flink.runtime.operators.testutils.NirvanaOutputList)19 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)16 DelayingInfinitiveInputIterator (org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator)12 IntValue (org.apache.flink.types.IntValue)12 Configuration (org.apache.flink.configuration.Configuration)10 File (java.io.File)9 MemorySegment (org.apache.flink.core.memory.MemorySegment)9 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)9 HashMap (java.util.HashMap)8 FileNotFoundException (java.io.FileNotFoundException)5 BatchTask (org.apache.flink.runtime.operators.BatchTask)5 TaskConfig (org.apache.flink.runtime.operators.util.TaskConfig)5 HashSet (java.util.HashSet)4 DataSourceTaskTest (org.apache.flink.runtime.operators.DataSourceTaskTest)4