Search in sources :

Example 11 with DiscardingOutputCollector

use of org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector in project flink by apache.

the class NonReusingReOpenableHashTableITCase method doTest.

protected void doTest(TestData.TupleGeneratorIterator buildInput, TestData.TupleGeneratorIterator probeInput, TupleGenerator bgen, TupleGenerator pgen) throws Exception {
    // collect expected data
    final Map<Integer, Collection<TupleMatch>> expectedFirstMatchesMap = joinTuples(collectTupleData(buildInput), collectTupleData(probeInput));
    final List<Map<Integer, Collection<TupleMatch>>> expectedNMatchesMapList = new ArrayList<>(NUM_PROBES);
    final FlatJoinFunction[] nMatcher = new TupleMatchRemovingJoin[NUM_PROBES];
    for (int i = 0; i < NUM_PROBES; i++) {
        Map<Integer, Collection<TupleMatch>> tmp;
        expectedNMatchesMapList.add(tmp = deepCopy(expectedFirstMatchesMap));
        nMatcher[i] = new TupleMatchRemovingJoin(tmp);
    }
    final FlatJoinFunction firstMatcher = new TupleMatchRemovingJoin(expectedFirstMatchesMap);
    final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
    // reset the generators
    bgen.reset();
    pgen.reset();
    buildInput.reset();
    probeInput.reset();
    // compare with iterator values
    NonReusingBuildFirstReOpenableHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new NonReusingBuildFirstReOpenableHashJoinIterator<>(buildInput, probeInput, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
    iterator.open();
    // do first join with both inputs
    while (iterator.callWithNextKey(firstMatcher, collector)) ;
    // assert that each expected match was seen for the first input
    for (Entry<Integer, Collection<TupleMatch>> entry : expectedFirstMatchesMap.entrySet()) {
        if (!entry.getValue().isEmpty()) {
            Assert.fail("Collection for key " + entry.getKey() + " is not empty");
        }
    }
    for (int i = 0; i < NUM_PROBES; i++) {
        pgen.reset();
        probeInput.reset();
        // prepare ..
        iterator.reopenProbe(probeInput);
        // .. and do second join
        while (iterator.callWithNextKey(nMatcher[i], collector)) ;
        // assert that each expected match was seen for the second input
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedNMatchesMapList.get(i).entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    }
    iterator.close();
}
Also used : ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) TupleMatch(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch) TupleMatchRemovingJoin(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatchRemovingJoin) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Map(java.util.Map)

Example 12 with DiscardingOutputCollector

use of org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildFirstAndFullOuterJoin.

@Test
public void testBuildFirstAndFullOuterJoin() {
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 1000, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator input1 = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = fullOuterJoinTuples(collectTupleData(input1), collectTupleData(input2));
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        // reset the generators
        generator1.reset();
        generator2.reset();
        input1.reset();
        input2.reset();
        // compare with iterator values
        ReusingBuildFirstHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildFirstHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, true, true, false);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 13 with DiscardingOutputCollector

use of org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildFirstWithHighNumberOfCommonKeys.

@Test
public void testBuildFirstWithHighNumberOfCommonKeys() {
    // the size of the left and right inputs
    final int INPUT_1_SIZE = 200;
    final int INPUT_2_SIZE = 100;
    final int INPUT_1_DUPLICATES = 10;
    final int INPUT_2_DUPLICATES = 2000;
    final int DUPLICATE_KEY = 13;
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator gen1Iter = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator gen2Iter = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        final TestData.TupleConstantValueIterator const1Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "LEFT String for Duplicate Keys", INPUT_1_DUPLICATES);
        final TestData.TupleConstantValueIterator const2Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "RIGHT String for Duplicate Keys", INPUT_2_DUPLICATES);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        MutableObjectIterator<Tuple2<Integer, String>> input1 = new UnionIterator<>(inList1);
        MutableObjectIterator<Tuple2<Integer, String>> input2 = new UnionIterator<>(inList2);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = joinTuples(collectTupleData(input1), collectTupleData(input2));
        // re-create the whole thing for actual processing
        // reset the generators and iterators
        generator1.reset();
        generator2.reset();
        const1Iter.reset();
        const2Iter.reset();
        gen1Iter.reset();
        gen2Iter.reset();
        inList1.clear();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        inList2.clear();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        input1 = new UnionIterator<>(inList1);
        input2 = new UnionIterator<>(inList2);
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        ReusingBuildFirstHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildFirstHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) UnionIterator(org.apache.flink.runtime.operators.testutils.UnionIterator) ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 14 with DiscardingOutputCollector

use of org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildFirstWithMixedDataTypes.

@Test
public void testBuildFirstWithMixedDataTypes() {
    try {
        MutableObjectIterator<IntPair> input1 = new UniformIntPairGenerator(500, 40, false);
        final TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        // collect expected data
        final Map<Integer, Collection<TupleIntPairMatch>> expectedMatchesMap = joinIntPairs(collectIntPairData(input1), collectTupleData(input2));
        final FlatJoinFunction<IntPair, Tuple2<Integer, String>, Tuple2<Integer, String>> matcher = new TupleIntPairMatchRemovingMatcher(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        // reset the generators
        input1 = new UniformIntPairGenerator(500, 40, false);
        generator2.reset();
        input2.reset();
        // compare with iterator values
        ReusingBuildSecondHashJoinIterator<IntPair, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildSecondHashJoinIterator<>(input1, input2, this.pairSerializer, this.pairComparator, this.recordSerializer, this.record2Comparator, this.pairRecordPairComparator, this.memoryManager, this.ioManager, this.parentTask, 1.0, false, false, true);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleIntPairMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) IntPair(org.apache.flink.runtime.operators.testutils.types.IntPair) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) UniformIntPairGenerator(org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator) Test(org.junit.Test)

Example 15 with DiscardingOutputCollector

use of org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector in project flink by apache.

the class ReusingSortMergeInnerJoinIteratorITCase method testMerge.

@Test
public void testMerge() {
    try {
        final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, 4096, KeyMode.SORTED, ValueMode.RANDOM_LENGTH);
        final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, 2048, KeyMode.SORTED, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator input1 = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        // collect expected data
        final Map<Integer, Collection<Match>> expectedMatchesMap = matchValues(collectData(input1), collectData(input2));
        final FlatJoinFunction<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> joinFunction = new MatchRemovingJoiner(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<Tuple2<Integer, String>>();
        // reset the generators
        generator1.reset();
        generator2.reset();
        input1.reset();
        input2.reset();
        // compare with iterator values
        ReusingMergeInnerJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingMergeInnerJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>>(input1, input2, this.serializer1, this.comparator1, this.serializer2, this.comparator2, this.pairComparator, this.memoryManager, this.ioManager, PAGES_FOR_BNLJN, this.parentTask);
        iterator.open();
        while (iterator.callWithNextKey(joinFunction, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
            Assert.assertTrue("Collection for key " + entry.getKey() + " is not empty", entry.getValue().isEmpty());
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) TupleGenerator(org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator) MatchRemovingJoiner(org.apache.flink.runtime.operators.testutils.MatchRemovingJoiner) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Aggregations

Tuple2 (org.apache.flink.api.java.tuple.Tuple2)34 DiscardingOutputCollector (org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector)34 Collection (java.util.Collection)31 TestData (org.apache.flink.runtime.operators.testutils.TestData)31 Test (org.junit.Test)31 NullKeyFieldException (org.apache.flink.types.NullKeyFieldException)24 TupleGenerator (org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator)17 FlatJoinFunction (org.apache.flink.api.common.functions.FlatJoinFunction)15 ArrayList (java.util.ArrayList)9 MutableObjectIterator (org.apache.flink.util.MutableObjectIterator)7 MatchRemovingJoiner (org.apache.flink.runtime.operators.testutils.MatchRemovingJoiner)5 UniformIntPairGenerator (org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator)4 UnionIterator (org.apache.flink.runtime.operators.testutils.UnionIterator)4 IntPair (org.apache.flink.runtime.operators.testutils.types.IntPair)4 Map (java.util.Map)2 TupleMatch (org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch)2 TupleMatchRemovingJoin (org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatchRemovingJoin)2 GenericPairComparator (org.apache.flink.api.common.typeutils.GenericPairComparator)1 IntComparator (org.apache.flink.api.common.typeutils.base.IntComparator)1 TupleComparator (org.apache.flink.api.java.typeutils.runtime.TupleComparator)1