Search in sources :

Example 96 with Tuple3

use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.

the class TableEnvironmentITCase method testIllegalName.

@Test(expected = TableException.class)
public void testIllegalName() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
    DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
    Table t = tableEnv.fromDataSet(ds);
    // Must fail. Table name matches internal name pattern.
    tableEnv.registerTable("_DataSetTable_42", t);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) Tuple3(org.apache.flink.api.java.tuple.Tuple3) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 97 with Tuple3

use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.

the class TableEnvironmentITCase method testRegisterExistingDatasetTable.

@Test(expected = TableException.class)
public void testRegisterExistingDatasetTable() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
    DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
    tableEnv.registerDataSet("MyTable", ds);
    DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.getSmall5TupleDataSet(env);
    // Must fail. Name is already used for different table.
    tableEnv.registerDataSet("MyTable", ds2);
}
Also used : Tuple5(org.apache.flink.api.java.tuple.Tuple5) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple3(org.apache.flink.api.java.tuple.Tuple3) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 98 with Tuple3

use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.

the class CopyOnWriteStateTableTest method testRandomModificationsAndCopyOnWriteIsolation.

/**
	 * This test does some random modifications to a state table and a reference (hash map). Then draws snapshots,
	 * performs more modifications and checks snapshot integrity.
	 */
@Test
public void testRandomModificationsAndCopyOnWriteIsolation() throws Exception {
    final RegisteredBackendStateMetaInfo<Integer, ArrayList<Integer>> metaInfo = new RegisteredBackendStateMetaInfo<>(StateDescriptor.Type.UNKNOWN, "test", IntSerializer.INSTANCE, // we use mutable state objects.
    new ArrayListSerializer<>(IntSerializer.INSTANCE));
    final MockInternalKeyContext<Integer> keyContext = new MockInternalKeyContext<>(IntSerializer.INSTANCE);
    final CopyOnWriteStateTable<Integer, Integer, ArrayList<Integer>> stateTable = new CopyOnWriteStateTable<>(keyContext, metaInfo);
    final HashMap<Tuple2<Integer, Integer>, ArrayList<Integer>> referenceMap = new HashMap<>();
    final Random random = new Random(42);
    // holds snapshots from the map under test
    CopyOnWriteStateTable.StateTableEntry<Integer, Integer, ArrayList<Integer>>[] snapshot = null;
    int snapshotSize = 0;
    // holds a reference snapshot from our reference map that we compare against
    Tuple3<Integer, Integer, ArrayList<Integer>>[] reference = null;
    int val = 0;
    int snapshotCounter = 0;
    int referencedSnapshotId = 0;
    final StateTransformationFunction<ArrayList<Integer>, Integer> transformationFunction = new StateTransformationFunction<ArrayList<Integer>, Integer>() {

        @Override
        public ArrayList<Integer> apply(ArrayList<Integer> previousState, Integer value) throws Exception {
            if (previousState == null) {
                previousState = new ArrayList<>();
            }
            previousState.add(value);
            // we give back the original, attempting to spot errors in to copy-on-write
            return previousState;
        }
    };
    // the main loop for modifications
    for (int i = 0; i < 10_000_000; ++i) {
        int key = random.nextInt(20);
        int namespace = random.nextInt(4);
        Tuple2<Integer, Integer> compositeKey = new Tuple2<>(key, namespace);
        int op = random.nextInt(7);
        ArrayList<Integer> state = null;
        ArrayList<Integer> referenceState = null;
        switch(op) {
            case 0:
            case 1:
                {
                    state = stateTable.get(key, namespace);
                    referenceState = referenceMap.get(compositeKey);
                    if (null == state) {
                        state = new ArrayList<>();
                        stateTable.put(key, namespace, state);
                        referenceState = new ArrayList<>();
                        referenceMap.put(compositeKey, referenceState);
                    }
                    break;
                }
            case 2:
                {
                    stateTable.put(key, namespace, new ArrayList<Integer>());
                    referenceMap.put(compositeKey, new ArrayList<Integer>());
                    break;
                }
            case 3:
                {
                    state = stateTable.putAndGetOld(key, namespace, new ArrayList<Integer>());
                    referenceState = referenceMap.put(compositeKey, new ArrayList<Integer>());
                    break;
                }
            case 4:
                {
                    stateTable.remove(key, namespace);
                    referenceMap.remove(compositeKey);
                    break;
                }
            case 5:
                {
                    state = stateTable.removeAndGetOld(key, namespace);
                    referenceState = referenceMap.remove(compositeKey);
                    break;
                }
            case 6:
                {
                    final int updateValue = random.nextInt(1000);
                    stateTable.transform(key, namespace, updateValue, transformationFunction);
                    referenceMap.put(compositeKey, transformationFunction.apply(referenceMap.remove(compositeKey), updateValue));
                    break;
                }
            default:
                {
                    Assert.fail("Unknown op-code " + op);
                }
        }
        Assert.assertEquals(referenceMap.size(), stateTable.size());
        if (state != null) {
            // mutate the states a bit...
            if (random.nextBoolean() && !state.isEmpty()) {
                state.remove(state.size() - 1);
                referenceState.remove(referenceState.size() - 1);
            } else {
                state.add(val);
                referenceState.add(val);
                ++val;
            }
        }
        Assert.assertEquals(referenceState, state);
        // snapshot triggering / comparison / release
        if (i > 0 && i % 500 == 0) {
            if (snapshot != null) {
                // check our referenced snapshot
                deepCheck(reference, convert(snapshot, snapshotSize));
                if (i % 1_000 == 0) {
                    // draw and release some other snapshot while holding on the old snapshot
                    ++snapshotCounter;
                    stateTable.snapshotTableArrays();
                    stateTable.releaseSnapshot(snapshotCounter);
                }
                //release the snapshot after some time
                if (i % 5_000 == 0) {
                    snapshot = null;
                    reference = null;
                    snapshotSize = 0;
                    stateTable.releaseSnapshot(referencedSnapshotId);
                }
            } else {
                // if there is no more referenced snapshot, we create one
                ++snapshotCounter;
                referencedSnapshotId = snapshotCounter;
                snapshot = stateTable.snapshotTableArrays();
                snapshotSize = stateTable.size();
                reference = manualDeepDump(referenceMap);
            }
        }
    }
}
Also used : StateTransformationFunction(org.apache.flink.runtime.state.StateTransformationFunction) HashMap(java.util.HashMap) RegisteredBackendStateMetaInfo(org.apache.flink.runtime.state.RegisteredBackendStateMetaInfo) ArrayList(java.util.ArrayList) Random(java.util.Random) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Test(org.junit.Test)

Example 99 with Tuple3

use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.

the class WindowFoldITCase method testFoldProcessWindow.

@Test
public void testFoldProcessWindow() throws Exception {
    testResults = new ArrayList<>();
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
    env.setParallelism(1);
    DataStream<Tuple2<String, Integer>> source1 = env.addSource(new SourceFunction<Tuple2<String, Integer>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
            ctx.collect(Tuple2.of("a", 0));
            ctx.collect(Tuple2.of("a", 1));
            ctx.collect(Tuple2.of("a", 2));
            ctx.collect(Tuple2.of("b", 3));
            ctx.collect(Tuple2.of("b", 4));
            ctx.collect(Tuple2.of("b", 5));
            ctx.collect(Tuple2.of("a", 6));
            ctx.collect(Tuple2.of("a", 7));
            ctx.collect(Tuple2.of("a", 8));
        // source is finite, so it will have an implicit MAX watermark when it finishes
        }

        @Override
        public void cancel() {
        }
    }).assignTimestampsAndWatermarks(new Tuple2TimestampExtractor());
    source1.keyBy(0).window(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS))).fold(Tuple2.of(0, "R:"), new FoldFunction<Tuple2<String, Integer>, Tuple2<Integer, String>>() {

        @Override
        public Tuple2<Integer, String> fold(Tuple2<Integer, String> accumulator, Tuple2<String, Integer> value) throws Exception {
            accumulator.f1 += value.f0;
            accumulator.f0 += value.f1;
            return accumulator;
        }
    }, new ProcessWindowFunction<Tuple2<Integer, String>, Tuple3<String, Integer, Integer>, Tuple, TimeWindow>() {

        @Override
        public void process(Tuple tuple, Context context, Iterable<Tuple2<Integer, String>> elements, Collector<Tuple3<String, Integer, Integer>> out) throws Exception {
            int i = 0;
            for (Tuple2<Integer, String> in : elements) {
                out.collect(new Tuple3<>(in.f1, in.f0, i++));
            }
        }
    }).addSink(new SinkFunction<Tuple3<String, Integer, Integer>>() {

        @Override
        public void invoke(Tuple3<String, Integer, Integer> value) throws Exception {
            testResults.add(value.toString());
        }
    });
    env.execute("Fold Process Window Test");
    List<String> expectedResult = Arrays.asList("(R:aaa,3,0)", "(R:aaa,21,0)", "(R:bbb,12,0)");
    Collections.sort(expectedResult);
    Collections.sort(testResults);
    Assert.assertEquals(expectedResult, testResults);
}
Also used : SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) FoldFunction(org.apache.flink.api.common.functions.FoldFunction) ProcessWindowFunction(org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Collector(org.apache.flink.util.Collector) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Tuple(org.apache.flink.api.java.tuple.Tuple) Test(org.junit.Test)

Example 100 with Tuple3

use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.

the class WindowFoldITCase method testFoldProcessAllWindow.

@Test
public void testFoldProcessAllWindow() throws Exception {
    testResults = new ArrayList<>();
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
    env.setParallelism(1);
    DataStream<Tuple2<String, Integer>> source1 = env.addSource(new SourceFunction<Tuple2<String, Integer>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
            ctx.collect(Tuple2.of("a", 0));
            ctx.collect(Tuple2.of("a", 1));
            ctx.collect(Tuple2.of("a", 2));
            ctx.collect(Tuple2.of("b", 3));
            ctx.collect(Tuple2.of("b", 4));
            ctx.collect(Tuple2.of("b", 5));
            ctx.collect(Tuple2.of("a", 6));
            ctx.collect(Tuple2.of("a", 7));
            ctx.collect(Tuple2.of("a", 8));
        // source is finite, so it will have an implicit MAX watermark when it finishes
        }

        @Override
        public void cancel() {
        }
    }).assignTimestampsAndWatermarks(new Tuple2TimestampExtractor());
    source1.windowAll(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS))).fold(Tuple2.of(0, "R:"), new FoldFunction<Tuple2<String, Integer>, Tuple2<Integer, String>>() {

        @Override
        public Tuple2<Integer, String> fold(Tuple2<Integer, String> accumulator, Tuple2<String, Integer> value) throws Exception {
            accumulator.f1 += value.f0;
            accumulator.f0 += value.f1;
            return accumulator;
        }
    }, new ProcessAllWindowFunction<Tuple2<Integer, String>, Tuple3<String, Integer, Integer>, TimeWindow>() {

        @Override
        public void process(Context context, Iterable<Tuple2<Integer, String>> elements, Collector<Tuple3<String, Integer, Integer>> out) throws Exception {
            int i = 0;
            for (Tuple2<Integer, String> in : elements) {
                out.collect(new Tuple3<>(in.f1, in.f0, i++));
            }
        }
    }).addSink(new SinkFunction<Tuple3<String, Integer, Integer>>() {

        @Override
        public void invoke(Tuple3<String, Integer, Integer> value) throws Exception {
            testResults.add(value.toString());
        }
    });
    env.execute("Fold Process Window Test");
    List<String> expectedResult = Arrays.asList("(R:aaa,3,0)", "(R:aaa,21,0)", "(R:bbb,12,0)");
    Collections.sort(expectedResult);
    Collections.sort(testResults);
    Assert.assertEquals(expectedResult, testResults);
}
Also used : SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) FoldFunction(org.apache.flink.api.common.functions.FoldFunction) ProcessAllWindowFunction(org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Collector(org.apache.flink.util.Collector) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Test(org.junit.Test)

Aggregations

Tuple3 (org.apache.flink.api.java.tuple.Tuple3)559 Test (org.junit.Test)506 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)415 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)182 Plan (org.apache.flink.api.common.Plan)89 Tuple5 (org.apache.flink.api.java.tuple.Tuple5)74 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)63 OptimizedPlan (org.apache.flink.optimizer.plan.OptimizedPlan)55 SinkPlanNode (org.apache.flink.optimizer.plan.SinkPlanNode)53 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)43 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)43 DualInputPlanNode (org.apache.flink.optimizer.plan.DualInputPlanNode)38 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)37 IOException (java.io.IOException)32 ArrayList (java.util.ArrayList)31 Configuration (org.apache.flink.configuration.Configuration)29 EventTimeTrigger (org.apache.flink.streaming.api.windowing.triggers.EventTimeTrigger)27 FieldSet (org.apache.flink.api.common.operators.util.FieldSet)24 TypeHint (org.apache.flink.api.common.typeinfo.TypeHint)24 Tuple1 (org.apache.flink.api.java.tuple.Tuple1)21