Search in sources :

Example 1 with TupleMatch

use of org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch in project flink by apache.

the class ReusingReOpenableHashTableITCase method doTest.

protected void doTest(TestData.TupleGeneratorIterator buildInput, TestData.TupleGeneratorIterator probeInput, TupleGenerator bgen, TupleGenerator pgen) throws Exception {
    // collect expected data
    final Map<Integer, Collection<TupleMatch>> expectedFirstMatchesMap = joinTuples(collectTupleData(buildInput), collectTupleData(probeInput));
    final List<Map<Integer, Collection<TupleMatch>>> expectedNMatchesMapList = new ArrayList<>(NUM_PROBES);
    final FlatJoinFunction[] nMatcher = new TupleMatchRemovingJoin[NUM_PROBES];
    for (int i = 0; i < NUM_PROBES; i++) {
        Map<Integer, Collection<TupleMatch>> tmp;
        expectedNMatchesMapList.add(tmp = deepCopy(expectedFirstMatchesMap));
        nMatcher[i] = new TupleMatchRemovingJoin(tmp);
    }
    final FlatJoinFunction firstMatcher = new TupleMatchRemovingJoin(expectedFirstMatchesMap);
    final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
    // reset the generators
    bgen.reset();
    pgen.reset();
    buildInput.reset();
    probeInput.reset();
    // compare with iterator values
    ReusingBuildFirstReOpenableHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildFirstReOpenableHashJoinIterator<>(buildInput, probeInput, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
    iterator.open();
    // do first join with both inputs
    while (iterator.callWithNextKey(firstMatcher, collector)) ;
    // assert that each expected match was seen for the first input
    for (Entry<Integer, Collection<TupleMatch>> entry : expectedFirstMatchesMap.entrySet()) {
        if (!entry.getValue().isEmpty()) {
            Assert.fail("Collection for key " + entry.getKey() + " is not empty");
        }
    }
    for (int i = 0; i < NUM_PROBES; i++) {
        pgen.reset();
        probeInput.reset();
        // prepare ..
        iterator.reopenProbe(probeInput);
        // .. and do second join
        while (iterator.callWithNextKey(nMatcher[i], collector)) ;
        // assert that each expected match was seen for the second input
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedNMatchesMapList.get(i).entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    }
    iterator.close();
}
Also used : ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) TupleMatch(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch) TupleMatchRemovingJoin(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatchRemovingJoin) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Map(java.util.Map)

Example 2 with TupleMatch

use of org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch in project flink by apache.

the class NonReusingReOpenableHashTableITCase method doTest.

protected void doTest(TestData.TupleGeneratorIterator buildInput, TestData.TupleGeneratorIterator probeInput, TupleGenerator bgen, TupleGenerator pgen) throws Exception {
    // collect expected data
    final Map<Integer, Collection<TupleMatch>> expectedFirstMatchesMap = joinTuples(collectTupleData(buildInput), collectTupleData(probeInput));
    final List<Map<Integer, Collection<TupleMatch>>> expectedNMatchesMapList = new ArrayList<>(NUM_PROBES);
    final FlatJoinFunction[] nMatcher = new TupleMatchRemovingJoin[NUM_PROBES];
    for (int i = 0; i < NUM_PROBES; i++) {
        Map<Integer, Collection<TupleMatch>> tmp;
        expectedNMatchesMapList.add(tmp = deepCopy(expectedFirstMatchesMap));
        nMatcher[i] = new TupleMatchRemovingJoin(tmp);
    }
    final FlatJoinFunction firstMatcher = new TupleMatchRemovingJoin(expectedFirstMatchesMap);
    final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
    // reset the generators
    bgen.reset();
    pgen.reset();
    buildInput.reset();
    probeInput.reset();
    // compare with iterator values
    NonReusingBuildFirstReOpenableHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new NonReusingBuildFirstReOpenableHashJoinIterator<>(buildInput, probeInput, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
    iterator.open();
    // do first join with both inputs
    while (iterator.callWithNextKey(firstMatcher, collector)) ;
    // assert that each expected match was seen for the first input
    for (Entry<Integer, Collection<TupleMatch>> entry : expectedFirstMatchesMap.entrySet()) {
        if (!entry.getValue().isEmpty()) {
            Assert.fail("Collection for key " + entry.getKey() + " is not empty");
        }
    }
    for (int i = 0; i < NUM_PROBES; i++) {
        pgen.reset();
        probeInput.reset();
        // prepare ..
        iterator.reopenProbe(probeInput);
        // .. and do second join
        while (iterator.callWithNextKey(nMatcher[i], collector)) ;
        // assert that each expected match was seen for the second input
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedNMatchesMapList.get(i).entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    }
    iterator.close();
}
Also used : ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) TupleMatch(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch) TupleMatchRemovingJoin(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatchRemovingJoin) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Map(java.util.Map)

Example 3 with TupleMatch

use of org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch in project flink by apache.

the class ReOpenableHashTableTestBase method deepCopy.

static Map<Integer, Collection<TupleMatch>> deepCopy(Map<Integer, Collection<TupleMatch>> expectedSecondMatchesMap) {
    Map<Integer, Collection<TupleMatch>> copy = new HashMap<>(expectedSecondMatchesMap.size());
    for (Map.Entry<Integer, Collection<TupleMatch>> entry : expectedSecondMatchesMap.entrySet()) {
        List<TupleMatch> matches = new ArrayList<TupleMatch>(entry.getValue().size());
        for (TupleMatch m : entry.getValue()) {
            matches.add(m);
        }
        copy.put(entry.getKey(), matches);
    }
    return copy;
}
Also used : TupleMatch(org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch)

Aggregations

TupleMatch (org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch)3 ArrayList (java.util.ArrayList)2 Collection (java.util.Collection)2 Map (java.util.Map)2 FlatJoinFunction (org.apache.flink.api.common.functions.FlatJoinFunction)2 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)2 TupleMatchRemovingJoin (org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatchRemovingJoin)2 DiscardingOutputCollector (org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector)2