Search in sources :

Example 1 with BinaryOperatorInformation

use of org.apache.flink.api.common.operators.BinaryOperatorInformation in project flink by apache.

the class InnerJoinOperatorBaseTest method testJoinPlain.

@Test
public void testJoinPlain() {
    final FlatJoinFunction<String, String, Integer> joiner = new FlatJoinFunction<String, String, Integer>() {

        @Override
        public void join(String first, String second, Collector<Integer> out) throws Exception {
            out.collect(first.length());
            out.collect(second.length());
        }
    };
    @SuppressWarnings({ "rawtypes", "unchecked" }) InnerJoinOperatorBase<String, String, Integer, FlatJoinFunction<String, String, Integer>> base = new InnerJoinOperatorBase(joiner, new BinaryOperatorInformation(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO), new int[0], new int[0], "TestJoiner");
    List<String> inputData1 = new ArrayList<String>(Arrays.asList("foo", "bar", "foobar"));
    List<String> inputData2 = new ArrayList<String>(Arrays.asList("foobar", "foo"));
    List<Integer> expected = new ArrayList<Integer>(Arrays.asList(3, 3, 6, 6));
    try {
        ExecutionConfig executionConfig = new ExecutionConfig();
        executionConfig.disableObjectReuse();
        List<Integer> resultSafe = base.executeOnCollections(inputData1, inputData2, null, executionConfig);
        executionConfig.enableObjectReuse();
        List<Integer> resultRegular = base.executeOnCollections(inputData1, inputData2, null, executionConfig);
        assertEquals(expected, resultSafe);
        assertEquals(expected, resultRegular);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) RichFlatJoinFunction(org.apache.flink.api.common.functions.RichFlatJoinFunction) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Collector(org.apache.flink.util.Collector) BinaryOperatorInformation(org.apache.flink.api.common.operators.BinaryOperatorInformation) Test(org.junit.Test)

Example 2 with BinaryOperatorInformation

use of org.apache.flink.api.common.operators.BinaryOperatorInformation in project flink by apache.

the class OuterJoinOperatorBaseTest method setup.

@SuppressWarnings({ "rawtypes", "unchecked" })
@Before
public void setup() {
    joiner = new MockRichFlatJoinFunction();
    baseOperator = new OuterJoinOperatorBase(joiner, new BinaryOperatorInformation(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO), new int[0], new int[0], "TestJoiner", null);
    executionConfig = new ExecutionConfig();
    String taskName = "Test rich outer join function";
    TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
    HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<>();
    HashMap<String, Future<Path>> cpTasks = new HashMap<>();
    runtimeContext = new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup());
}
Also used : Accumulator(org.apache.flink.api.common.accumulators.Accumulator) UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) HashMap(java.util.HashMap) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) TaskInfo(org.apache.flink.api.common.TaskInfo) RuntimeUDFContext(org.apache.flink.api.common.functions.util.RuntimeUDFContext) Future(java.util.concurrent.Future) BinaryOperatorInformation(org.apache.flink.api.common.operators.BinaryOperatorInformation) Before(org.junit.Before)

Example 3 with BinaryOperatorInformation

use of org.apache.flink.api.common.operators.BinaryOperatorInformation in project flink by apache.

the class CoGroupOperator method translateToDataFlow.

@Override
@Internal
protected org.apache.flink.api.common.operators.base.CoGroupOperatorBase<?, ?, OUT, ?> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
    String name = getName() != null ? getName() : "CoGroup at " + defaultName;
    try {
        keys1.areCompatible(keys2);
    } catch (IncompatibleKeysException e) {
        throw new InvalidProgramException("The types of the key fields do not match.", e);
    }
    final org.apache.flink.api.common.operators.base.CoGroupOperatorBase<?, ?, OUT, ?> po;
    if (keys1 instanceof SelectorFunctionKeys && keys2 instanceof SelectorFunctionKeys) {
        @SuppressWarnings("unchecked") SelectorFunctionKeys<I1, ?> selectorKeys1 = (SelectorFunctionKeys<I1, ?>) keys1;
        @SuppressWarnings("unchecked") SelectorFunctionKeys<I2, ?> selectorKeys2 = (SelectorFunctionKeys<I2, ?>) keys2;
        po = translateSelectorFunctionCoGroup(selectorKeys1, selectorKeys2, function, getResultType(), name, input1, input2);
        po.setParallelism(getParallelism());
        po.setCustomPartitioner(customPartitioner);
    } else if (keys2 instanceof SelectorFunctionKeys) {
        int[] logicalKeyPositions1 = keys1.computeLogicalKeyPositions();
        @SuppressWarnings("unchecked") SelectorFunctionKeys<I2, ?> selectorKeys2 = (SelectorFunctionKeys<I2, ?>) keys2;
        po = translateSelectorFunctionCoGroupRight(logicalKeyPositions1, selectorKeys2, function, getInput1Type(), getResultType(), name, input1, input2);
        po.setParallelism(getParallelism());
        po.setCustomPartitioner(customPartitioner);
    } else if (keys1 instanceof SelectorFunctionKeys) {
        @SuppressWarnings("unchecked") SelectorFunctionKeys<I1, ?> selectorKeys1 = (SelectorFunctionKeys<I1, ?>) keys1;
        int[] logicalKeyPositions2 = keys2.computeLogicalKeyPositions();
        po = translateSelectorFunctionCoGroupLeft(selectorKeys1, logicalKeyPositions2, function, getInput2Type(), getResultType(), name, input1, input2);
    } else if (keys1 instanceof Keys.ExpressionKeys && keys2 instanceof Keys.ExpressionKeys) {
        try {
            keys1.areCompatible(keys2);
        } catch (IncompatibleKeysException e) {
            throw new InvalidProgramException("The types of the key fields do not match.", e);
        }
        int[] logicalKeyPositions1 = keys1.computeLogicalKeyPositions();
        int[] logicalKeyPositions2 = keys2.computeLogicalKeyPositions();
        CoGroupOperatorBase<I1, I2, OUT, CoGroupFunction<I1, I2, OUT>> op = new CoGroupOperatorBase<>(function, new BinaryOperatorInformation<>(getInput1Type(), getInput2Type(), getResultType()), logicalKeyPositions1, logicalKeyPositions2, name);
        op.setFirstInput(input1);
        op.setSecondInput(input2);
        po = op;
    } else {
        throw new UnsupportedOperationException("Unrecognized or incompatible key types.");
    }
    // configure shared characteristics
    po.setParallelism(getParallelism());
    po.setCustomPartitioner(customPartitioner);
    if (groupSortKeyOrderFirst.size() > 0) {
        Ordering o = new Ordering();
        for (Pair<Integer, Order> entry : groupSortKeyOrderFirst) {
            o.appendOrdering(entry.getLeft(), null, entry.getRight());
        }
        po.setGroupOrderForInputOne(o);
    }
    if (groupSortKeyOrderSecond.size() > 0) {
        Ordering o = new Ordering();
        for (Pair<Integer, Order> entry : groupSortKeyOrderSecond) {
            o.appendOrdering(entry.getLeft(), null, entry.getRight());
        }
        po.setGroupOrderForInputTwo(o);
    }
    return po;
}
Also used : SelectorFunctionKeys(org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys) CoGroupOperatorBase(org.apache.flink.api.common.operators.base.CoGroupOperatorBase) InvalidProgramException(org.apache.flink.api.common.InvalidProgramException) Ordering(org.apache.flink.api.common.operators.Ordering) BinaryOperatorInformation(org.apache.flink.api.common.operators.BinaryOperatorInformation) Order(org.apache.flink.api.common.operators.Order) ExpressionKeys(org.apache.flink.api.common.operators.Keys.ExpressionKeys) IncompatibleKeysException(org.apache.flink.api.common.operators.Keys.IncompatibleKeysException) Internal(org.apache.flink.annotation.Internal)

Example 4 with BinaryOperatorInformation

use of org.apache.flink.api.common.operators.BinaryOperatorInformation in project flink by apache.

the class InnerJoinOperatorBaseTest method testTupleBaseJoiner.

@Test
public void testTupleBaseJoiner() {
    final FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>> joiner = new FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>() {

        @Override
        public void join(Tuple3<String, Double, Integer> first, Tuple2<Integer, String> second, Collector<Tuple2<Double, String>> out) {
            assertEquals(first.f0, second.f1);
            assertEquals(first.f2, second.f0);
            out.collect(new Tuple2<>(first.f1, second.f0.toString()));
        }
    };
    final TupleTypeInfo<Tuple3<String, Double, Integer>> leftTypeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Double.class, Integer.class);
    final TupleTypeInfo<Tuple2<Integer, String>> rightTypeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, String.class);
    final TupleTypeInfo<Tuple2<Double, String>> outTypeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Double.class, String.class);
    final int[] leftKeys = new int[] { 0, 2 };
    final int[] rightKeys = new int[] { 1, 0 };
    final String taskName = "Collection based tuple joiner";
    final BinaryOperatorInformation<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>> binaryOpInfo = new BinaryOperatorInformation<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>(leftTypeInfo, rightTypeInfo, outTypeInfo);
    final InnerJoinOperatorBase<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>, FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>> base = new InnerJoinOperatorBase<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>, FlatJoinFunction<Tuple3<String, Double, Integer>, Tuple2<Integer, String>, Tuple2<Double, String>>>(joiner, binaryOpInfo, leftKeys, rightKeys, taskName);
    final List<Tuple3<String, Double, Integer>> inputData1 = new ArrayList<Tuple3<String, Double, Integer>>(Arrays.asList(new Tuple3<>("foo", 42.0, 1), new Tuple3<>("bar", 1.0, 2), new Tuple3<>("bar", 2.0, 3), new Tuple3<>("foobar", 3.0, 4), new Tuple3<>("bar", 3.0, 3)));
    final List<Tuple2<Integer, String>> inputData2 = new ArrayList<Tuple2<Integer, String>>(Arrays.asList(new Tuple2<>(3, "bar"), new Tuple2<>(4, "foobar"), new Tuple2<>(2, "foo")));
    final Set<Tuple2<Double, String>> expected = new HashSet<Tuple2<Double, String>>(Arrays.asList(new Tuple2<>(2.0, "3"), new Tuple2<>(3.0, "3"), new Tuple2<>(3.0, "4")));
    try {
        final TaskInfo taskInfo = new TaskInfo("op", 1, 0, 1, 0);
        ExecutionConfig executionConfig = new ExecutionConfig();
        executionConfig.disableObjectReuse();
        List<Tuple2<Double, String>> resultSafe = base.executeOnCollections(inputData1, inputData2, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
        executionConfig.enableObjectReuse();
        List<Tuple2<Double, String>> resultRegular = base.executeOnCollections(inputData1, inputData2, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
        assertEquals(expected, new HashSet<>(resultSafe));
        assertEquals(expected, new HashSet<>(resultRegular));
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) TaskInfo(org.apache.flink.api.common.TaskInfo) Collector(org.apache.flink.util.Collector) RuntimeUDFContext(org.apache.flink.api.common.functions.util.RuntimeUDFContext) BinaryOperatorInformation(org.apache.flink.api.common.operators.BinaryOperatorInformation) HashSet(java.util.HashSet) Path(org.apache.flink.core.fs.Path) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Test(org.junit.Test)

Aggregations

BinaryOperatorInformation (org.apache.flink.api.common.operators.BinaryOperatorInformation)4 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)3 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 TaskInfo (org.apache.flink.api.common.TaskInfo)2 FlatJoinFunction (org.apache.flink.api.common.functions.FlatJoinFunction)2 RuntimeUDFContext (org.apache.flink.api.common.functions.util.RuntimeUDFContext)2 UnregisteredMetricsGroup (org.apache.flink.metrics.groups.UnregisteredMetricsGroup)2 Collector (org.apache.flink.util.Collector)2 Test (org.junit.Test)2 HashSet (java.util.HashSet)1 Future (java.util.concurrent.Future)1 Internal (org.apache.flink.annotation.Internal)1 InvalidProgramException (org.apache.flink.api.common.InvalidProgramException)1 Accumulator (org.apache.flink.api.common.accumulators.Accumulator)1 RichFlatJoinFunction (org.apache.flink.api.common.functions.RichFlatJoinFunction)1 ExpressionKeys (org.apache.flink.api.common.operators.Keys.ExpressionKeys)1 IncompatibleKeysException (org.apache.flink.api.common.operators.Keys.IncompatibleKeysException)1 SelectorFunctionKeys (org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys)1 Order (org.apache.flink.api.common.operators.Order)1