Search in sources :

Example 66 with ListStateDescriptor

use of org.apache.flink.api.common.state.ListStateDescriptor in project flink by apache.

the class StateBackendTestBase method testListStateDefaultValue.

/**
	 * Verify that an empty {@code ListState} yields {@code null}.
	 */
@Test
public void testListStateDefaultValue() throws Exception {
    AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);
    ListStateDescriptor<String> kvId = new ListStateDescriptor<>("id", String.class);
    kvId.initializeSerializerUnlessSet(new ExecutionConfig());
    ListState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
    backend.setCurrentKey(1);
    assertNull(state.get());
    state.add("Ciao");
    state.add("Bello");
    assertThat(state.get(), containsInAnyOrder("Ciao", "Bello"));
    state.clear();
    assertNull(state.get());
    backend.dispose();
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Test(org.junit.Test)

Example 67 with ListStateDescriptor

use of org.apache.flink.api.common.state.ListStateDescriptor in project flink by apache.

the class StateBackendTestBase method testConcurrentMapIfQueryable.

/**
	 * Tests that {@link AbstractHeapState} instances respect the queryable
	 * flag and create concurrent variants for internal state structures.
	 */
@SuppressWarnings("unchecked")
protected void testConcurrentMapIfQueryable() throws Exception {
    final int numberOfKeyGroups = 1;
    AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE, numberOfKeyGroups, new KeyGroupRange(0, 0), new DummyEnvironment("test_op", 1, 0));
    {
        // ValueState
        ValueStateDescriptor<Integer> desc = new ValueStateDescriptor<>("value-state", Integer.class, -1);
        desc.setQueryable("my-query");
        desc.initializeSerializerUnlessSet(new ExecutionConfig());
        ValueState<Integer> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, desc);
        InternalKvState<VoidNamespace> kvState = (InternalKvState<VoidNamespace>) state;
        assertTrue(kvState instanceof AbstractHeapState);
        kvState.setCurrentNamespace(VoidNamespace.INSTANCE);
        backend.setCurrentKey(1);
        state.update(121818273);
        StateTable<?, ?, ?> stateTable = ((AbstractHeapState<?, ?, ?, ?, ?>) kvState).getStateTable();
        checkConcurrentStateTable(stateTable, numberOfKeyGroups);
    }
    {
        // ListState
        ListStateDescriptor<Integer> desc = new ListStateDescriptor<>("list-state", Integer.class);
        desc.setQueryable("my-query");
        desc.initializeSerializerUnlessSet(new ExecutionConfig());
        ListState<Integer> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, desc);
        InternalKvState<VoidNamespace> kvState = (InternalKvState<VoidNamespace>) state;
        assertTrue(kvState instanceof AbstractHeapState);
        kvState.setCurrentNamespace(VoidNamespace.INSTANCE);
        backend.setCurrentKey(1);
        state.add(121818273);
        StateTable<?, ?, ?> stateTable = ((AbstractHeapState<?, ?, ?, ?, ?>) kvState).getStateTable();
        checkConcurrentStateTable(stateTable, numberOfKeyGroups);
    }
    {
        // ReducingState
        ReducingStateDescriptor<Integer> desc = new ReducingStateDescriptor<>("reducing-state", new ReduceFunction<Integer>() {

            @Override
            public Integer reduce(Integer value1, Integer value2) throws Exception {
                return value1 + value2;
            }
        }, Integer.class);
        desc.setQueryable("my-query");
        desc.initializeSerializerUnlessSet(new ExecutionConfig());
        ReducingState<Integer> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, desc);
        InternalKvState<VoidNamespace> kvState = (InternalKvState<VoidNamespace>) state;
        assertTrue(kvState instanceof AbstractHeapState);
        kvState.setCurrentNamespace(VoidNamespace.INSTANCE);
        backend.setCurrentKey(1);
        state.add(121818273);
        StateTable<?, ?, ?> stateTable = ((AbstractHeapState<?, ?, ?, ?, ?>) kvState).getStateTable();
        checkConcurrentStateTable(stateTable, numberOfKeyGroups);
    }
    {
        // FoldingState
        FoldingStateDescriptor<Integer, Integer> desc = new FoldingStateDescriptor<>("folding-state", 0, new FoldFunction<Integer, Integer>() {

            @Override
            public Integer fold(Integer accumulator, Integer value) throws Exception {
                return accumulator + value;
            }
        }, Integer.class);
        desc.setQueryable("my-query");
        desc.initializeSerializerUnlessSet(new ExecutionConfig());
        FoldingState<Integer, Integer> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, desc);
        InternalKvState<VoidNamespace> kvState = (InternalKvState<VoidNamespace>) state;
        assertTrue(kvState instanceof AbstractHeapState);
        kvState.setCurrentNamespace(VoidNamespace.INSTANCE);
        backend.setCurrentKey(1);
        state.add(121818273);
        StateTable<?, ?, ?> stateTable = ((AbstractHeapState<?, ?, ?, ?, ?>) kvState).getStateTable();
        checkConcurrentStateTable(stateTable, numberOfKeyGroups);
    }
    {
        // MapState
        MapStateDescriptor<Integer, String> desc = new MapStateDescriptor<>("map-state", Integer.class, String.class);
        desc.setQueryable("my-query");
        desc.initializeSerializerUnlessSet(new ExecutionConfig());
        MapState<Integer, String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, desc);
        InternalKvState<VoidNamespace> kvState = (InternalKvState<VoidNamespace>) state;
        assertTrue(kvState instanceof AbstractHeapState);
        kvState.setCurrentNamespace(VoidNamespace.INSTANCE);
        backend.setCurrentKey(1);
        state.put(121818273, "121818273");
        int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(1, numberOfKeyGroups);
        StateTable stateTable = ((AbstractHeapState) kvState).getStateTable();
        assertNotNull("State not set", stateTable.get(keyGroupIndex));
        checkConcurrentStateTable(stateTable, numberOfKeyGroups);
    }
    backend.dispose();
}
Also used : NestedMapsStateTable(org.apache.flink.runtime.state.heap.NestedMapsStateTable) StateTable(org.apache.flink.runtime.state.heap.StateTable) AbstractHeapState(org.apache.flink.runtime.state.heap.AbstractHeapState) MapStateDescriptor(org.apache.flink.api.common.state.MapStateDescriptor) FoldFunction(org.apache.flink.api.common.functions.FoldFunction) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ReduceFunction(org.apache.flink.api.common.functions.ReduceFunction) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) FoldingStateDescriptor(org.apache.flink.api.common.state.FoldingStateDescriptor) ValueStateDescriptor(org.apache.flink.api.common.state.ValueStateDescriptor) ListState(org.apache.flink.api.common.state.ListState) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) MapState(org.apache.flink.api.common.state.MapState) DummyEnvironment(org.apache.flink.runtime.operators.testutils.DummyEnvironment) ReducingState(org.apache.flink.api.common.state.ReducingState) ValueState(org.apache.flink.api.common.state.ValueState) InternalValueState(org.apache.flink.runtime.state.internal.InternalValueState) InternalKvState(org.apache.flink.runtime.state.internal.InternalKvState) FoldingState(org.apache.flink.api.common.state.FoldingState)

Example 68 with ListStateDescriptor

use of org.apache.flink.api.common.state.ListStateDescriptor in project flink by apache.

the class WindowOperatorTest method testSessionWindowsWithProcessFunction.

@Test
@SuppressWarnings("unchecked")
public void testSessionWindowsWithProcessFunction() throws Exception {
    closeCalled.set(0);
    final int SESSION_SIZE = 3;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(SESSION_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableProcessWindowFunction<>(new SessionProcessWindowFunction()), EventTimeTrigger.create(), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
    // do a snapshot, close and restore again
    OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
    testHarness.close();
    testHarness.setup();
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
    testHarness.processWatermark(new Watermark(12000));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
    expectedOutput.add(new Watermark(12000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
    testHarness.processWatermark(new Watermark(17999));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
    expectedOutput.add(new Watermark(17999));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
    testHarness.close();
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OperatorStateHandles(org.apache.flink.streaming.runtime.tasks.OperatorStateHandles) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 69 with ListStateDescriptor

use of org.apache.flink.api.common.state.ListStateDescriptor in project flink by apache.

the class WindowOperatorTest method testPointSessions.

@Test
@SuppressWarnings("unchecked")
public /**
	 * This tests a custom Session window assigner that assigns some elements to "point windows",
	 * windows that have the same timestamp for start and end.
	 *
	 * <p> In this test, elements that have 33 as the second tuple field will be put into a point
	 * window.
	 */
void testPointSessions() throws Exception {
    closeCalled.set(0);
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(new PointSessionWindows(3000), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new SessionWindowFunction()), EventTimeTrigger.create(), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 33), 1000));
    // do a snapshot, close and restore again
    OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
    testHarness.close();
    testHarness.setup();
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 33), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 33), 2500));
    testHarness.processWatermark(new Watermark(12000));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-36", 10L, 4000L), 3999));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-67", 0L, 3000L), 2999));
    expectedOutput.add(new Watermark(12000));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
    testHarness.close();
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OperatorStateHandles(org.apache.flink.streaming.runtime.tasks.OperatorStateHandles) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 70 with ListStateDescriptor

use of org.apache.flink.api.common.state.ListStateDescriptor in project flink by apache.

the class WindowOperatorTest method testCleanupTimerWithEmptyListStateForTumblingWindows.

@Test
public void testCleanupTimerWithEmptyListStateForTumblingWindows() throws Exception {
    final int WINDOW_SIZE = 2;
    final long LATENESS = 1;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ListStateDescriptor<Tuple2<String, Integer>> windowStateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(TumblingEventTimeWindows.of(Time.of(WINDOW_SIZE, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), windowStateDesc, new InternalIterableWindowFunction<>(new PassThroughFunction()), EventTimeTrigger.create(), LATENESS, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    // normal element
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1599));
    testHarness.processWatermark(new Watermark(1999));
    testHarness.processWatermark(new Watermark(2000));
    testHarness.processWatermark(new Watermark(5000));
    expected.add(new Watermark(1599));
    expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
    // here it fires and purges
    expected.add(new Watermark(1999));
    // here is the cleanup timer
    expected.add(new Watermark(2000));
    expected.add(new Watermark(5000));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
    testHarness.close();
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Aggregations

ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)82 Test (org.junit.Test)60 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)49 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)37 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)33 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)32 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)31 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)29 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)27 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)27 EventTimeTrigger (org.apache.flink.streaming.api.windowing.triggers.EventTimeTrigger)19 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)18 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)18 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)17 RichFunction (org.apache.flink.api.common.functions.RichFunction)16 TumblingEventTimeWindows (org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows)15 SlidingEventTimeWindows (org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows)12 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)11 PublicEvolving (org.apache.flink.annotation.PublicEvolving)9 Watermark (org.apache.flink.streaming.api.watermark.Watermark)9