Search in sources :

Example 1 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CachedMatchTaskTest method testHash1MatchTask.

@Test
public void testHash1MatchTask() {
    int keyCnt1 = 20;
    int valCnt1 = 1;
    int keyCnt2 = 10;
    int valCnt2 = 2;
    addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
    addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    setOutput(this.outList);
    getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
    getTaskConfig().setRelativeMemoryDriver(1.0f);
    BuildFirstCachedJoinDriver<Record, Record, Record> testTask = new BuildFirstCachedJoinDriver<Record, Record, Record>();
    try {
        testResettableDriver(testTask, MockMatchStub.class, 3);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Test caused an exception.");
    }
    final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
    Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size());
    this.outList.clear();
}
Also used : Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) Test(org.junit.Test)

Example 2 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class CachedMatchTaskTest method testHashCancelMatchTaskWhileBuildSecond.

@Test
public void testHashCancelMatchTaskWhileBuildSecond() {
    int keyCnt = 20;
    int valCnt = 20;
    addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
    addInput(new DelayingInfinitiveInputIterator(100));
    addDriverComparator(this.comparator1);
    addDriverComparator(this.comparator2);
    getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
    setOutput(new NirvanaOutputList());
    getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
    getTaskConfig().setRelativeMemoryDriver(1.0f);
    final BuildSecondCachedJoinDriver<Record, Record, Record> testTask = new BuildSecondCachedJoinDriver<Record, Record, Record>();
    final AtomicBoolean success = new AtomicBoolean(false);
    Thread taskRunner = new Thread() {

        @Override
        public void run() {
            try {
                testDriver(testTask, MockMatchStub.class);
                success.set(true);
            } catch (Exception ie) {
                ie.printStackTrace();
            }
        }
    };
    taskRunner.start();
    TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
    tct.start();
    try {
        tct.join();
        taskRunner.join();
    } catch (InterruptedException ie) {
        Assert.fail("Joining threads failed");
    }
    Assert.assertTrue("Test threw an exception even though it was properly canceled.", success.get());
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DelayingInfinitiveInputIterator(org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator) TaskCancelThread(org.apache.flink.runtime.operators.testutils.TaskCancelThread) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) NirvanaOutputList(org.apache.flink.runtime.operators.testutils.NirvanaOutputList) ExpectedTestException(org.apache.flink.runtime.operators.testutils.ExpectedTestException) TaskCancelThread(org.apache.flink.runtime.operators.testutils.TaskCancelThread) Test(org.junit.Test)

Example 3 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class DataSinkTaskTest method testDataSinkTask.

@Test
public void testDataSinkTask() {
    FileReader fr = null;
    BufferedReader br = null;
    try {
        int keyCnt = 100;
        int valCnt = 20;
        super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
        super.addInput(new UniformRecordGenerator(keyCnt, valCnt, false), 0);
        DataSinkTask<Record> testTask = new DataSinkTask<>(this.mockEnv);
        File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
        super.registerFileOutputTask(MockOutputFormat.class, tempTestFile.toURI().toString(), new Configuration());
        testTask.invoke();
        Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
        fr = new FileReader(tempTestFile);
        br = new BufferedReader(fr);
        HashMap<Integer, HashSet<Integer>> keyValueCountMap = new HashMap<>(keyCnt);
        while (br.ready()) {
            String line = br.readLine();
            Integer key = Integer.parseInt(line.substring(0, line.indexOf("_")));
            Integer val = Integer.parseInt(line.substring(line.indexOf("_") + 1, line.length()));
            if (!keyValueCountMap.containsKey(key)) {
                keyValueCountMap.put(key, new HashSet<Integer>());
            }
            keyValueCountMap.get(key).add(val);
        }
        Assert.assertTrue("Invalid key count in out file. Expected: " + keyCnt + " Actual: " + keyValueCountMap.keySet().size(), keyValueCountMap.keySet().size() == keyCnt);
        for (Integer key : keyValueCountMap.keySet()) {
            Assert.assertTrue("Invalid value count for key: " + key + ". Expected: " + valCnt + " Actual: " + keyValueCountMap.get(key).size(), keyValueCountMap.get(key).size() == valCnt);
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (Throwable t) {
            }
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (Throwable t) {
            }
        }
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 4 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class DataSinkTaskTest method testSortingDataSinkTask.

@Test
@SuppressWarnings("unchecked")
public void testSortingDataSinkTask() {
    int keyCnt = 100;
    int valCnt = 20;
    double memoryFraction = 1.0;
    super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
    super.addInput(new UniformRecordGenerator(keyCnt, valCnt, true), 0);
    DataSinkTask<Record> testTask = new DataSinkTask<>(this.mockEnv);
    // set sorting
    super.getTaskConfig().setInputLocalStrategy(0, LocalStrategy.SORT);
    super.getTaskConfig().setInputComparator(new RecordComparatorFactory(new int[] { 1 }, (new Class[] { IntValue.class })), 0);
    super.getTaskConfig().setRelativeMemoryInput(0, memoryFraction);
    super.getTaskConfig().setFilehandlesInput(0, 8);
    super.getTaskConfig().setSpillingThresholdInput(0, 0.8f);
    File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
    super.registerFileOutputTask(MockOutputFormat.class, tempTestFile.toURI().toString(), new Configuration());
    try {
        testTask.invoke();
    } catch (Exception e) {
        LOG.debug("Exception while invoking the test task.", e);
        Assert.fail("Invoke method caused exception.");
    }
    Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
    FileReader fr = null;
    BufferedReader br = null;
    try {
        fr = new FileReader(tempTestFile);
        br = new BufferedReader(fr);
        Set<Integer> keys = new HashSet<>();
        int curVal = -1;
        while (br.ready()) {
            String line = br.readLine();
            Integer key = Integer.parseInt(line.substring(0, line.indexOf("_")));
            Integer val = Integer.parseInt(line.substring(line.indexOf("_") + 1, line.length()));
            // check that values are in correct order
            Assert.assertTrue("Values not in ascending order", val >= curVal);
            // next value hit
            if (val > curVal) {
                if (curVal != -1) {
                    // check that we saw 100 distinct keys for this values
                    Assert.assertTrue("Keys missing for value", keys.size() == 100);
                }
                // empty keys set
                keys.clear();
                // update current value
                curVal = val;
            }
            Assert.assertTrue("Duplicate key for value", keys.add(key));
        }
    } catch (FileNotFoundException e) {
        Assert.fail("Out file got lost...");
    } catch (IOException ioe) {
        Assert.fail("Caught IOE while reading out file");
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (Throwable t) {
            }
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (Throwable t) {
            }
        }
    }
}
Also used : RecordComparatorFactory(org.apache.flink.runtime.testutils.recordutils.RecordComparatorFactory) Configuration(org.apache.flink.configuration.Configuration) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) BufferedReader(java.io.BufferedReader) Record(org.apache.flink.types.Record) FileReader(java.io.FileReader) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 5 with UniformRecordGenerator

use of org.apache.flink.runtime.operators.testutils.UniformRecordGenerator in project flink by apache.

the class DataSinkTaskTest method testUnionDataSinkTask.

@Test
public void testUnionDataSinkTask() {
    int keyCnt = 10;
    int valCnt = 20;
    super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
    final IteratorWrappingTestSingleInputGate<?>[] readers = new IteratorWrappingTestSingleInputGate[4];
    readers[0] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, 0, 0, false), 0, false);
    readers[1] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, keyCnt, 0, false), 0, false);
    readers[2] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, keyCnt * 2, 0, false), 0, false);
    readers[3] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, keyCnt * 3, 0, false), 0, false);
    DataSinkTask<Record> testTask = new DataSinkTask<>(this.mockEnv);
    File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
    super.registerFileOutputTask(MockOutputFormat.class, tempTestFile.toURI().toString(), new Configuration());
    try {
        // which checks forwards existing notifications on registerListener calls.
        for (IteratorWrappingTestSingleInputGate<?> reader : readers) {
            reader.notifyNonEmpty();
        }
        testTask.invoke();
    } catch (Exception e) {
        LOG.debug("Exception while invoking the test task.", e);
        Assert.fail("Invoke method caused exception.");
    }
    Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
    FileReader fr = null;
    BufferedReader br = null;
    try {
        fr = new FileReader(tempTestFile);
        br = new BufferedReader(fr);
        HashMap<Integer, HashSet<Integer>> keyValueCountMap = new HashMap<>(keyCnt);
        while (br.ready()) {
            String line = br.readLine();
            Integer key = Integer.parseInt(line.substring(0, line.indexOf("_")));
            Integer val = Integer.parseInt(line.substring(line.indexOf("_") + 1, line.length()));
            if (!keyValueCountMap.containsKey(key)) {
                keyValueCountMap.put(key, new HashSet<Integer>());
            }
            keyValueCountMap.get(key).add(val);
        }
        Assert.assertTrue("Invalid key count in out file. Expected: " + keyCnt + " Actual: " + keyValueCountMap.keySet().size(), keyValueCountMap.keySet().size() == keyCnt * 4);
        for (Integer key : keyValueCountMap.keySet()) {
            Assert.assertTrue("Invalid value count for key: " + key + ". Expected: " + valCnt + " Actual: " + keyValueCountMap.get(key).size(), keyValueCountMap.get(key).size() == valCnt);
        }
    } catch (FileNotFoundException e) {
        Assert.fail("Out file got lost...");
    } catch (IOException ioe) {
        Assert.fail("Caught IOE while reading out file");
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (Throwable t) {
            }
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (Throwable t) {
            }
        }
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) IteratorWrappingTestSingleInputGate(org.apache.flink.runtime.io.network.partition.consumer.IteratorWrappingTestSingleInputGate) BufferedReader(java.io.BufferedReader) Record(org.apache.flink.types.Record) FileReader(java.io.FileReader) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

UniformRecordGenerator (org.apache.flink.runtime.operators.testutils.UniformRecordGenerator)101 Record (org.apache.flink.types.Record)101 Test (org.junit.Test)101 ExpectedTestException (org.apache.flink.runtime.operators.testutils.ExpectedTestException)68 IOException (java.io.IOException)22 TaskCancelThread (org.apache.flink.runtime.operators.testutils.TaskCancelThread)20 NirvanaOutputList (org.apache.flink.runtime.operators.testutils.NirvanaOutputList)19 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)16 DelayingInfinitiveInputIterator (org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator)12 IntValue (org.apache.flink.types.IntValue)12 Configuration (org.apache.flink.configuration.Configuration)10 File (java.io.File)9 MemorySegment (org.apache.flink.core.memory.MemorySegment)9 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)9 HashMap (java.util.HashMap)8 FileNotFoundException (java.io.FileNotFoundException)5 BatchTask (org.apache.flink.runtime.operators.BatchTask)5 TaskConfig (org.apache.flink.runtime.operators.util.TaskConfig)5 HashSet (java.util.HashSet)4 DataSourceTaskTest (org.apache.flink.runtime.operators.DataSourceTaskTest)4