Search in sources :

Example 1 with DefaultInputSplitAssigner

use of org.apache.flink.api.common.io.DefaultInputSplitAssigner in project flink by apache.

the class DefaultSplitAssignerTest method testSerialSplitAssignment.

@Test
public void testSerialSplitAssignment() {
    try {
        final int NUM_SPLITS = 50;
        Set<InputSplit> splits = new HashSet<InputSplit>();
        for (int i = 0; i < NUM_SPLITS; i++) {
            splits.add(new GenericInputSplit(i, NUM_SPLITS));
        }
        DefaultInputSplitAssigner ia = new DefaultInputSplitAssigner(splits);
        InputSplit is = null;
        while ((is = ia.getNextInputSplit("", 0)) != null) {
            assertTrue(splits.remove(is));
        }
        assertTrue(splits.isEmpty());
        assertNull(ia.getNextInputSplit("", 0));
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : DefaultInputSplitAssigner(org.apache.flink.api.common.io.DefaultInputSplitAssigner) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 2 with DefaultInputSplitAssigner

use of org.apache.flink.api.common.io.DefaultInputSplitAssigner in project flink by apache.

the class DefaultSplitAssignerTest method testConcurrentSplitAssignment.

@Test
public void testConcurrentSplitAssignment() {
    try {
        final int NUM_THREADS = 10;
        final int NUM_SPLITS = 500;
        final int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;
        Set<InputSplit> splits = new HashSet<InputSplit>();
        for (int i = 0; i < NUM_SPLITS; i++) {
            splits.add(new GenericInputSplit(i, NUM_SPLITS));
        }
        final DefaultInputSplitAssigner ia = new DefaultInputSplitAssigner(splits);
        final AtomicInteger splitsRetrieved = new AtomicInteger(0);
        final AtomicInteger sumOfIds = new AtomicInteger(0);
        Runnable retriever = new Runnable() {

            @Override
            public void run() {
                String host = "";
                GenericInputSplit split;
                while ((split = (GenericInputSplit) ia.getNextInputSplit(host, 0)) != null) {
                    splitsRetrieved.incrementAndGet();
                    sumOfIds.addAndGet(split.getSplitNumber());
                }
            }
        };
        // create the threads
        Thread[] threads = new Thread[NUM_THREADS];
        for (int i = 0; i < NUM_THREADS; i++) {
            threads[i] = new Thread(retriever);
            threads[i].setDaemon(true);
        }
        // launch concurrently
        for (int i = 0; i < NUM_THREADS; i++) {
            threads[i].start();
        }
        // sync
        for (int i = 0; i < NUM_THREADS; i++) {
            threads[i].join(5000);
        }
        // verify
        for (int i = 0; i < NUM_THREADS; i++) {
            if (threads[i].isAlive()) {
                fail("The concurrency test case is erroneous, the thread did not respond in time.");
            }
        }
        assertEquals(NUM_SPLITS, splitsRetrieved.get());
        assertEquals(SUM_OF_IDS, sumOfIds.get());
        // nothing left
        assertNull(ia.getNextInputSplit("", 0));
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DefaultInputSplitAssigner(org.apache.flink.api.common.io.DefaultInputSplitAssigner) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

HashSet (java.util.HashSet)2 DefaultInputSplitAssigner (org.apache.flink.api.common.io.DefaultInputSplitAssigner)2 Test (org.junit.Test)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1