Search in sources :

Example 11 with FlatJoinFunction

use of org.apache.flink.api.common.functions.FlatJoinFunction in project flink by apache.

the class InnerJoinOperatorBaseTest method testJoinPlain.

@Test
public void testJoinPlain() {
    final FlatJoinFunction<String, String, Integer> joiner = new FlatJoinFunction<String, String, Integer>() {

        @Override
        public void join(String first, String second, Collector<Integer> out) throws Exception {
            out.collect(first.length());
            out.collect(second.length());
        }
    };
    @SuppressWarnings({ "rawtypes", "unchecked" }) InnerJoinOperatorBase<String, String, Integer, FlatJoinFunction<String, String, Integer>> base = new InnerJoinOperatorBase(joiner, new BinaryOperatorInformation(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO), new int[0], new int[0], "TestJoiner");
    List<String> inputData1 = new ArrayList<String>(Arrays.asList("foo", "bar", "foobar"));
    List<String> inputData2 = new ArrayList<String>(Arrays.asList("foobar", "foo"));
    List<Integer> expected = new ArrayList<Integer>(Arrays.asList(3, 3, 6, 6));
    try {
        ExecutionConfig executionConfig = new ExecutionConfig();
        executionConfig.disableObjectReuse();
        List<Integer> resultSafe = base.executeOnCollections(inputData1, inputData2, null, executionConfig);
        executionConfig.enableObjectReuse();
        List<Integer> resultRegular = base.executeOnCollections(inputData1, inputData2, null, executionConfig);
        assertEquals(expected, resultSafe);
        assertEquals(expected, resultRegular);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) RichFlatJoinFunction(org.apache.flink.api.common.functions.RichFlatJoinFunction) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Collector(org.apache.flink.util.Collector) BinaryOperatorInformation(org.apache.flink.api.common.operators.BinaryOperatorInformation) Test(org.junit.Test)

Example 12 with FlatJoinFunction

use of org.apache.flink.api.common.functions.FlatJoinFunction in project flink by apache.

the class InnerJoinOperatorBaseTest method testTupleBaseJoiner.

@Test
public void testTupleBaseJoiner() {
    final FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>> joiner = new FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>() {

        @Override
        public void join(Tuple3<String, Double, Integer> first, Tuple2<Integer, String> second, Collector<Tuple2<Double, String>> out) {
            assertEquals(first.f0, second.f1);
            assertEquals(first.f2, second.f0);
            out.collect(new Tuple2<>(first.f1, second.f0.toString()));
        }
    };
    final TupleTypeInfo<Tuple3<String, Double, Integer>> leftTypeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Double.class, Integer.class);
    final TupleTypeInfo<Tuple2<Integer, String>> rightTypeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, String.class);
    final TupleTypeInfo<Tuple2<Double, String>> outTypeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Double.class, String.class);
    final int[] leftKeys = new int[] { 0, 2 };
    final int[] rightKeys = new int[] { 1, 0 };
    final String taskName = "Collection based tuple joiner";
    final BinaryOperatorInformation<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>> binaryOpInfo = new BinaryOperatorInformation<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>(leftTypeInfo, rightTypeInfo, outTypeInfo);
    final InnerJoinOperatorBase<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>, FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>> base = new InnerJoinOperatorBase<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>, FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>>(joiner, binaryOpInfo, leftKeys, rightKeys, taskName);
    final List<Tuple3<String, Double, Integer>> inputData1 = new ArrayList<Tuple3<String, Double, Integer>>(Arrays.asList(new Tuple3<>("foo", 42.0, 1), new Tuple3<>("bar", 1.0, 2), new Tuple3<>("bar", 2.0, 3), new Tuple3<>("foobar", 3.0, 4), new Tuple3<>("bar", 3.0, 3)));
    final List<Tuple2<Integer, String>> inputData2 = new ArrayList<Tuple2<Integer, String>>(Arrays.asList(new Tuple2<>(3, "bar"), new Tuple2<>(4, "foobar"), new Tuple2<>(2, "foo")));
    final Set<Tuple2<Double, String>> expected = new HashSet<Tuple2<Double, String>>(Arrays.asList(new Tuple2<>(2.0, "3"), new Tuple2<>(3.0, "3"), new Tuple2<>(3.0, "4")));
    try {
        final TaskInfo taskInfo = new TaskInfo("op", 1, 0, 1, 0);
        ExecutionConfig executionConfig = new ExecutionConfig();
        executionConfig.disableObjectReuse();
        List<Tuple2<Double, String>> resultSafe = base.executeOnCollections(inputData1, inputData2, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
        executionConfig.enableObjectReuse();
        List<Tuple2<Double, String>> resultRegular = base.executeOnCollections(inputData1, inputData2, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
        assertEquals(expected, new HashSet<>(resultSafe));
        assertEquals(expected, new HashSet<>(resultRegular));
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) TaskInfo(org.apache.flink.api.common.TaskInfo) Collector(org.apache.flink.util.Collector) RuntimeUDFContext(org.apache.flink.api.common.functions.util.RuntimeUDFContext) BinaryOperatorInformation(org.apache.flink.api.common.operators.BinaryOperatorInformation) HashSet(java.util.HashSet) Path(org.apache.flink.core.fs.Path) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Test(org.junit.Test)

Example 13 with FlatJoinFunction

use of org.apache.flink.api.common.functions.FlatJoinFunction in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildSecond.

@Test
public void testBuildSecond() {
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator input1 = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = joinTuples(collectTupleData(input1), collectTupleData(input2));
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        // reset the generators
        generator1.reset();
        generator2.reset();
        input1.reset();
        input2.reset();
        // compare with iterator values			
        ReusingBuildSecondHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildSecondHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 14 with FlatJoinFunction

use of org.apache.flink.api.common.functions.FlatJoinFunction in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildSecondAndFullOuterJoin.

@Test
public void testBuildSecondAndFullOuterJoin() {
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 1000, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator input1 = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = fullOuterJoinTuples(collectTupleData(input1), collectTupleData(input2));
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        // reset the generators
        generator1.reset();
        generator2.reset();
        input1.reset();
        input2.reset();
        // compare with iterator values
        ReusingBuildSecondHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildSecondHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, true, true, false);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 15 with FlatJoinFunction

use of org.apache.flink.api.common.functions.FlatJoinFunction in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildFirstAndBuildSideOuterJoin.

@Test
public void testBuildFirstAndBuildSideOuterJoin() {
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 1000, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator input1 = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator input2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = leftOuterJoinTuples(collectTupleData(input1), collectTupleData(input2));
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        // reset the generators
        generator1.reset();
        generator2.reset();
        input1.reset();
        input2.reset();
        // compare with iterator values
        ReusingBuildFirstHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildFirstHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, true, false);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Aggregations

FlatJoinFunction (org.apache.flink.api.common.functions.FlatJoinFunction)18 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)16 Test (org.junit.Test)16 DiscardingOutputCollector (org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector)15 TestData (org.apache.flink.runtime.operators.testutils.TestData)13 Collection (java.util.Collection)12 NullKeyFieldException (org.apache.flink.types.NullKeyFieldException)10 ArrayList (java.util.ArrayList)7 Collector (org.apache.flink.util.Collector)3 Map (java.util.Map)2 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)2 BinaryOperatorInformation (org.apache.flink.api.common.operators.BinaryOperatorInformation)2 TupleMatch (org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatch)2 TupleMatchRemovingJoin (org.apache.flink.runtime.operators.hash.NonReusingHashJoinIteratorITCase.TupleMatchRemovingJoin)2 UnionIterator (org.apache.flink.runtime.operators.testutils.UnionIterator)2 MutableObjectIterator (org.apache.flink.util.MutableObjectIterator)2 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Plan (org.apache.flink.api.common.Plan)1 TaskInfo (org.apache.flink.api.common.TaskInfo)1