Search in sources :

Example 51 with HashSet

use of java.util.HashSet in project flink by apache.

the class DataSinkTaskTest method testDataSinkTask.

@Test
public void testDataSinkTask() {
    FileReader fr = null;
    BufferedReader br = null;
    try {
        int keyCnt = 100;
        int valCnt = 20;
        super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
        super.addInput(new UniformRecordGenerator(keyCnt, valCnt, false), 0);
        DataSinkTask<Record> testTask = new DataSinkTask<>();
        super.registerFileOutputTask(testTask, MockOutputFormat.class, new File(tempTestPath).toURI().toString());
        testTask.invoke();
        File tempTestFile = new File(this.tempTestPath);
        Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
        fr = new FileReader(tempTestFile);
        br = new BufferedReader(fr);
        HashMap<Integer, HashSet<Integer>> keyValueCountMap = new HashMap<>(keyCnt);
        while (br.ready()) {
            String line = br.readLine();
            Integer key = Integer.parseInt(line.substring(0, line.indexOf("_")));
            Integer val = Integer.parseInt(line.substring(line.indexOf("_") + 1, line.length()));
            if (!keyValueCountMap.containsKey(key)) {
                keyValueCountMap.put(key, new HashSet<Integer>());
            }
            keyValueCountMap.get(key).add(val);
        }
        Assert.assertTrue("Invalid key count in out file. Expected: " + keyCnt + " Actual: " + keyValueCountMap.keySet().size(), keyValueCountMap.keySet().size() == keyCnt);
        for (Integer key : keyValueCountMap.keySet()) {
            Assert.assertTrue("Invalid value count for key: " + key + ". Expected: " + valCnt + " Actual: " + keyValueCountMap.get(key).size(), keyValueCountMap.get(key).size() == valCnt);
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (Throwable t) {
            }
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (Throwable t) {
            }
        }
    }
}
Also used : HashMap(java.util.HashMap) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 52 with HashSet

use of java.util.HashSet in project flink by apache.

the class DataSinkTaskTest method testSortingDataSinkTask.

@Test
@SuppressWarnings("unchecked")
public void testSortingDataSinkTask() {
    int keyCnt = 100;
    int valCnt = 20;
    double memoryFraction = 1.0;
    super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
    super.addInput(new UniformRecordGenerator(keyCnt, valCnt, true), 0);
    DataSinkTask<Record> testTask = new DataSinkTask<>();
    // set sorting
    super.getTaskConfig().setInputLocalStrategy(0, LocalStrategy.SORT);
    super.getTaskConfig().setInputComparator(new RecordComparatorFactory(new int[] { 1 }, (new Class[] { IntValue.class })), 0);
    super.getTaskConfig().setRelativeMemoryInput(0, memoryFraction);
    super.getTaskConfig().setFilehandlesInput(0, 8);
    super.getTaskConfig().setSpillingThresholdInput(0, 0.8f);
    super.registerFileOutputTask(testTask, MockOutputFormat.class, new File(tempTestPath).toURI().toString());
    try {
        testTask.invoke();
    } catch (Exception e) {
        LOG.debug("Exception while invoking the test task.", e);
        Assert.fail("Invoke method caused exception.");
    }
    File tempTestFile = new File(this.tempTestPath);
    Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
    FileReader fr = null;
    BufferedReader br = null;
    try {
        fr = new FileReader(tempTestFile);
        br = new BufferedReader(fr);
        Set<Integer> keys = new HashSet<>();
        int curVal = -1;
        while (br.ready()) {
            String line = br.readLine();
            Integer key = Integer.parseInt(line.substring(0, line.indexOf("_")));
            Integer val = Integer.parseInt(line.substring(line.indexOf("_") + 1, line.length()));
            // check that values are in correct order
            Assert.assertTrue("Values not in ascending order", val >= curVal);
            // next value hit
            if (val > curVal) {
                if (curVal != -1) {
                    // check that we saw 100 distinct keys for this values
                    Assert.assertTrue("Keys missing for value", keys.size() == 100);
                }
                // empty keys set
                keys.clear();
                // update current value
                curVal = val;
            }
            Assert.assertTrue("Duplicate key for value", keys.add(key));
        }
    } catch (FileNotFoundException e) {
        Assert.fail("Out file got lost...");
    } catch (IOException ioe) {
        Assert.fail("Caught IOE while reading out file");
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (Throwable t) {
            }
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (Throwable t) {
            }
        }
    }
}
Also used : RecordComparatorFactory(org.apache.flink.runtime.testutils.recordutils.RecordComparatorFactory) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) BufferedReader(java.io.BufferedReader) Record(org.apache.flink.types.Record) FileReader(java.io.FileReader) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 53 with HashSet

use of java.util.HashSet in project flink by apache.

the class DataSinkTaskTest method testUnionDataSinkTask.

@Test
public void testUnionDataSinkTask() {
    int keyCnt = 10;
    int valCnt = 20;
    super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
    final IteratorWrappingTestSingleInputGate<?>[] readers = new IteratorWrappingTestSingleInputGate[4];
    readers[0] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, 0, 0, false), 0, false);
    readers[1] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, keyCnt, 0, false), 0, false);
    readers[2] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, keyCnt * 2, 0, false), 0, false);
    readers[3] = super.addInput(new UniformRecordGenerator(keyCnt, valCnt, keyCnt * 3, 0, false), 0, false);
    DataSinkTask<Record> testTask = new DataSinkTask<>();
    super.registerFileOutputTask(testTask, MockOutputFormat.class, new File(tempTestPath).toURI().toString());
    try {
        // which checks forwards existing notifications on registerListener calls.
        for (IteratorWrappingTestSingleInputGate<?> reader : readers) {
            reader.notifyNonEmpty();
        }
        testTask.invoke();
    } catch (Exception e) {
        LOG.debug("Exception while invoking the test task.", e);
        Assert.fail("Invoke method caused exception.");
    }
    File tempTestFile = new File(this.tempTestPath);
    Assert.assertTrue("Temp output file does not exist", tempTestFile.exists());
    FileReader fr = null;
    BufferedReader br = null;
    try {
        fr = new FileReader(tempTestFile);
        br = new BufferedReader(fr);
        HashMap<Integer, HashSet<Integer>> keyValueCountMap = new HashMap<>(keyCnt);
        while (br.ready()) {
            String line = br.readLine();
            Integer key = Integer.parseInt(line.substring(0, line.indexOf("_")));
            Integer val = Integer.parseInt(line.substring(line.indexOf("_") + 1, line.length()));
            if (!keyValueCountMap.containsKey(key)) {
                keyValueCountMap.put(key, new HashSet<Integer>());
            }
            keyValueCountMap.get(key).add(val);
        }
        Assert.assertTrue("Invalid key count in out file. Expected: " + keyCnt + " Actual: " + keyValueCountMap.keySet().size(), keyValueCountMap.keySet().size() == keyCnt * 4);
        for (Integer key : keyValueCountMap.keySet()) {
            Assert.assertTrue("Invalid value count for key: " + key + ". Expected: " + valCnt + " Actual: " + keyValueCountMap.get(key).size(), keyValueCountMap.get(key).size() == valCnt);
        }
    } catch (FileNotFoundException e) {
        Assert.fail("Out file got lost...");
    } catch (IOException ioe) {
        Assert.fail("Caught IOE while reading out file");
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (Throwable t) {
            }
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (Throwable t) {
            }
        }
    }
}
Also used : HashMap(java.util.HashMap) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) IteratorWrappingTestSingleInputGate(org.apache.flink.runtime.io.network.partition.consumer.IteratorWrappingTestSingleInputGate) BufferedReader(java.io.BufferedReader) Record(org.apache.flink.types.Record) FileReader(java.io.FileReader) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 54 with HashSet

use of java.util.HashSet in project flink by apache.

the class DataSourceTaskTest method testDataSourceTask.

@Test
public void testDataSourceTask() {
    int keyCnt = 100;
    int valCnt = 20;
    this.outList = new ArrayList<Record>();
    try {
        InputFilePreparator.prepareInputFile(new UniformRecordGenerator(keyCnt, valCnt, false), this.tempTestPath, true);
    } catch (IOException e1) {
        Assert.fail("Unable to set-up test input file");
    }
    super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
    super.addOutput(this.outList);
    DataSourceTask<Record> testTask = new DataSourceTask<>();
    super.registerFileInputTask(testTask, MockInputFormat.class, new File(tempTestPath).toURI().toString(), "\n");
    try {
        testTask.invoke();
    } catch (Exception e) {
        System.err.println(e);
        Assert.fail("Invoke method caused exception.");
    }
    try {
        Field formatField = DataSourceTask.class.getDeclaredField("format");
        formatField.setAccessible(true);
        MockInputFormat inputFormat = (MockInputFormat) formatField.get(testTask);
        Assert.assertTrue("Invalid status of the input format. Expected for opened: true, Actual: " + inputFormat.opened, inputFormat.opened);
        Assert.assertTrue("Invalid status of the input format. Expected for closed: true, Actual: " + inputFormat.closed, inputFormat.closed);
    } catch (Exception e) {
        System.err.println(e);
        Assert.fail("Reflection error while trying to validate inputFormat status.");
    }
    Assert.assertTrue("Invalid output size. Expected: " + (keyCnt * valCnt) + " Actual: " + this.outList.size(), this.outList.size() == keyCnt * valCnt);
    HashMap<Integer, HashSet<Integer>> keyValueCountMap = new HashMap<>(keyCnt);
    for (Record kvp : this.outList) {
        int key = kvp.getField(0, IntValue.class).getValue();
        int val = kvp.getField(1, IntValue.class).getValue();
        if (!keyValueCountMap.containsKey(key)) {
            keyValueCountMap.put(key, new HashSet<Integer>());
        }
        keyValueCountMap.get(key).add(val);
    }
    Assert.assertTrue("Invalid key count in out file. Expected: " + keyCnt + " Actual: " + keyValueCountMap.keySet().size(), keyValueCountMap.keySet().size() == keyCnt);
    for (Integer mapKey : keyValueCountMap.keySet()) {
        Assert.assertTrue("Invalid value count for key: " + mapKey + ". Expected: " + valCnt + " Actual: " + keyValueCountMap.get(mapKey).size(), keyValueCountMap.get(mapKey).size() == valCnt);
    }
}
Also used : HashMap(java.util.HashMap) IOException(java.io.IOException) IOException(java.io.IOException) Field(java.lang.reflect.Field) Record(org.apache.flink.types.Record) UniformRecordGenerator(org.apache.flink.runtime.operators.testutils.UniformRecordGenerator) File(java.io.File) IntValue(org.apache.flink.types.IntValue) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 55 with HashSet

use of java.util.HashSet in project flink by apache.

the class YarnClusterDescriptorTest method testExplicitLibShipping.

/**
	 * Tests to ship a lib folder through the {@code YarnClusterDescriptor.addShipFiles}
	 */
@Test
public void testExplicitLibShipping() throws Exception {
    AbstractYarnClusterDescriptor descriptor = new YarnClusterDescriptor();
    descriptor.setLocalJarPath(new Path("/path/to/flink.jar"));
    descriptor.setConfigurationDirectory(temporaryFolder.getRoot().getAbsolutePath());
    descriptor.setConfigurationFilePath(new Path(temporaryFolder.getRoot().getPath()));
    descriptor.setFlinkConfiguration(new Configuration());
    File libFile = temporaryFolder.newFile("libFile.jar");
    File libFolder = temporaryFolder.newFolder().getAbsoluteFile();
    Assert.assertFalse(descriptor.shipFiles.contains(libFile));
    Assert.assertFalse(descriptor.shipFiles.contains(libFolder));
    List<File> shipFiles = new ArrayList<>();
    shipFiles.add(libFile);
    shipFiles.add(libFolder);
    descriptor.addShipFiles(shipFiles);
    Assert.assertTrue(descriptor.shipFiles.contains(libFile));
    Assert.assertTrue(descriptor.shipFiles.contains(libFolder));
    // only execute part of the deployment to test for shipped files
    Set<File> effectiveShipFiles = new HashSet<>();
    descriptor.addLibFolderToShipFiles(effectiveShipFiles);
    Assert.assertEquals(0, effectiveShipFiles.size());
    Assert.assertEquals(2, descriptor.shipFiles.size());
    Assert.assertTrue(descriptor.shipFiles.contains(libFile));
    Assert.assertTrue(descriptor.shipFiles.contains(libFolder));
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.flink.configuration.Configuration) ArrayList(java.util.ArrayList) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

HashSet (java.util.HashSet)12137 Set (java.util.Set)2609 ArrayList (java.util.ArrayList)2318 HashMap (java.util.HashMap)2096 Test (org.junit.Test)2060 Map (java.util.Map)1198 Iterator (java.util.Iterator)979 IOException (java.io.IOException)934 List (java.util.List)911 File (java.io.File)607 LinkedHashSet (java.util.LinkedHashSet)460 Test (org.testng.annotations.Test)460 TreeSet (java.util.TreeSet)271 Collection (java.util.Collection)233 LinkedList (java.util.LinkedList)224 Region (org.apache.geode.cache.Region)202 SSOException (com.iplanet.sso.SSOException)188 Date (java.util.Date)180 LinkedHashMap (java.util.LinkedHashMap)169 PartitionedRegion (org.apache.geode.internal.cache.PartitionedRegion)166