Search in sources :

Example 66 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class RowTimeWindowDeduplicateOperatorTest method testRowTimeWindowDeduplicateKeepLastRow.

@Test
public void testRowTimeWindowDeduplicateKeepLastRow() throws Exception {
    SlicingWindowOperator<RowData, ?> operator = RowTimeWindowDeduplicateOperatorBuilder.builder().inputSerializer(INPUT_ROW_SER).shiftTimeZone(shiftTimeZone).keySerializer(KEY_SER).keepLastRow(true).rowtimeIndex(1).windowEndIndex(WINDOW_END_INDEX).build();
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
    testHarness.setup(OUT_SERIALIZER);
    testHarness.open();
    // process elements
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    // add elements out-of-order
    testHarness.processElement(insertRecord("key2", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 4L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 5L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 1002L, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 3007L, toUtcTimestampMills(3999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 3008L, toUtcTimestampMills(3999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 3001L, toUtcTimestampMills(3999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 2L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 1004L, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 1006L, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 1007L, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processWatermark(new Watermark(999));
    expectedOutput.add(insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key2", 5L, toUtcTimestampMills(999L, shiftTimeZone)));
    expectedOutput.add(new Watermark(999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.processWatermark(new Watermark(1999));
    expectedOutput.add(insertRecord("key1", 1007L, toUtcTimestampMills(1999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key2", 1002L, toUtcTimestampMills(1999L, shiftTimeZone)));
    expectedOutput.add(new Watermark(1999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // do a snapshot, close and restore again
    testHarness.prepareSnapshotPreBarrier(0L);
    OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
    testHarness.close();
    expectedOutput.clear();
    testHarness = createTestHarness(operator);
    testHarness.setup(OUT_SERIALIZER);
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processWatermark(new Watermark(3999));
    expectedOutput.add(insertRecord("key2", 3008L, toUtcTimestampMills(3999L, shiftTimeZone)));
    expectedOutput.add(new Watermark(3999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // late element, should be dropped
    testHarness.processElement(insertRecord("key2", 3001L, toUtcTimestampMills(3500L, shiftTimeZone)));
    testHarness.processWatermark(new Watermark(4999));
    expectedOutput.add(new Watermark(4999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    assertEquals(1, operator.getNumLateRecordsDropped().getCount());
    testHarness.close();
}
Also used : RowData(org.apache.flink.table.data.RowData) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 67 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class SlicingWindowAggOperatorTest method testEventTimeHoppingWindows.

@Test
public void testEventTimeHoppingWindows() throws Exception {
    final SliceAssigner assigner = SliceAssigners.hopping(2, shiftTimeZone, Duration.ofSeconds(3), Duration.ofSeconds(1));
    final SumAndCountAggsFunction aggsFunction = new SumAndCountAggsFunction(assigner);
    SlicingWindowOperator<RowData, ?> operator = SlicingWindowAggOperatorBuilder.builder().inputSerializer(INPUT_ROW_SER).shiftTimeZone(shiftTimeZone).keySerializer(KEY_SER).assigner(assigner).aggregate(wrapGenerated(aggsFunction), ACC_SER).countStarIndex(1).build();
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
    testHarness.setup(OUT_SERIALIZER);
    testHarness.open();
    // process elements
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    // add elements out-of-order
    testHarness.processElement(insertRecord("key2", 1, fromEpochMillis(3999L)));
    testHarness.processElement(insertRecord("key2", 1, fromEpochMillis(3000L)));
    testHarness.processElement(insertRecord("key1", 1, fromEpochMillis(20L)));
    testHarness.processElement(insertRecord("key1", 1, fromEpochMillis(0L)));
    testHarness.processElement(insertRecord("key1", 1, fromEpochMillis(999L)));
    testHarness.processElement(insertRecord("key2", 1, fromEpochMillis(1998L)));
    testHarness.processElement(insertRecord("key2", 1, fromEpochMillis(1999L)));
    testHarness.processElement(insertRecord("key2", 1, fromEpochMillis(1000L)));
    testHarness.processWatermark(new Watermark(999));
    expectedOutput.add(insertRecord("key1", 3L, 3L, localMills(-2000L), localMills(1000L)));
    expectedOutput.add(new Watermark(999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.processWatermark(new Watermark(1999));
    expectedOutput.add(insertRecord("key1", 3L, 3L, localMills(-1000L), localMills(2000L)));
    expectedOutput.add(insertRecord("key2", 3L, 3L, localMills(-1000L), localMills(2000L)));
    expectedOutput.add(new Watermark(1999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.processWatermark(new Watermark(2999));
    expectedOutput.add(insertRecord("key1", 3L, 3L, localMills(0L), localMills(3000L)));
    expectedOutput.add(insertRecord("key2", 3L, 3L, localMills(0L), localMills(3000L)));
    expectedOutput.add(new Watermark(2999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // do a snapshot, close and restore again
    testHarness.prepareSnapshotPreBarrier(0L);
    OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
    testHarness.close();
    assertTrue("Close was not called.", aggsFunction.closeCalled.get() > 0);
    expectedOutput.clear();
    testHarness = createTestHarness(operator);
    testHarness.setup(OUT_SERIALIZER);
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processWatermark(new Watermark(3999));
    expectedOutput.add(insertRecord("key2", 5L, 5L, localMills(1000L), localMills(4000L)));
    expectedOutput.add(new Watermark(3999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // late element for [1K, 4K), but should be accumulated into [2K, 5K), [3K, 6K)
    testHarness.processElement(insertRecord("key2", 1, fromEpochMillis(3500L)));
    testHarness.processWatermark(new Watermark(4999));
    expectedOutput.add(insertRecord("key2", 3L, 3L, localMills(2000L), localMills(5000L)));
    expectedOutput.add(new Watermark(4999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // late for all assigned windows, should be dropped
    testHarness.processElement(insertRecord("key1", 1, fromEpochMillis(2999L)));
    testHarness.processWatermark(new Watermark(5999));
    expectedOutput.add(insertRecord("key2", 3L, 3L, localMills(3000L), localMills(6000L)));
    expectedOutput.add(new Watermark(5999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // those don't have any effect...
    testHarness.processWatermark(new Watermark(6999));
    testHarness.processWatermark(new Watermark(7999));
    expectedOutput.add(new Watermark(6999));
    expectedOutput.add(new Watermark(7999));
    ASSERTER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    assertEquals(1, operator.getNumLateRecordsDropped().getCount());
    testHarness.close();
}
Also used : SliceAssigner(org.apache.flink.table.runtime.operators.window.slicing.SliceAssigner) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 68 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class PojoSerializerUpgradeTest method testPojoSerializerUpgrade.

private void testPojoSerializerUpgrade(String classSourceA, String classSourceB, boolean hasBField, boolean isKeyedState) throws Exception {
    final Configuration taskConfiguration = new Configuration();
    final ExecutionConfig executionConfig = new ExecutionConfig();
    final KeySelector<Long, Long> keySelector = new IdentityKeySelector<>();
    final Collection<Long> inputs = Arrays.asList(1L, 2L, 45L, 67L, 1337L);
    // run the program with classSourceA
    File rootPath = temporaryFolder.newFolder();
    File sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceA);
    compileClass(sourceFile);
    final ClassLoader classLoader = URLClassLoader.newInstance(new URL[] { rootPath.toURI().toURL() }, Thread.currentThread().getContextClassLoader());
    OperatorSubtaskState stateHandles = runOperator(taskConfiguration, executionConfig, new StreamMap<>(new StatefulMapper(isKeyedState, false, hasBField)), keySelector, isKeyedState, stateBackend, classLoader, null, inputs);
    // run the program with classSourceB
    rootPath = temporaryFolder.newFolder();
    sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceB);
    compileClass(sourceFile);
    final ClassLoader classLoaderB = URLClassLoader.newInstance(new URL[] { rootPath.toURI().toURL() }, Thread.currentThread().getContextClassLoader());
    runOperator(taskConfiguration, executionConfig, new StreamMap<>(new StatefulMapper(isKeyedState, true, hasBField)), keySelector, isKeyedState, stateBackend, classLoaderB, stateHandles, inputs);
}
Also used : Configuration(org.apache.flink.configuration.Configuration) URLClassLoader(java.net.URLClassLoader) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) File(java.io.File) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState)

Example 69 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class CoordinatorEventsExactlyOnceITCase method createSnapshot.

static TaskStateSnapshot createSnapshot(StreamStateHandle handle, OperatorID operatorId) {
    final OperatorStateHandle.StateMetaInfo metaInfo = new OperatorStateHandle.StateMetaInfo(new long[] { 0 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE);
    final OperatorStateHandle state = new OperatorStreamStateHandle(Collections.singletonMap("état_et_moi_:_ça_fait_deux", metaInfo), handle);
    final OperatorSubtaskState oss = OperatorSubtaskState.builder().setManagedOperatorState(state).build();
    return new TaskStateSnapshot(Collections.singletonMap(operatorId, oss));
}
Also used : TaskStateSnapshot(org.apache.flink.runtime.checkpoint.TaskStateSnapshot) OperatorStreamStateHandle(org.apache.flink.runtime.state.OperatorStreamStateHandle) OperatorStateHandle(org.apache.flink.runtime.state.OperatorStateHandle) PrioritizedOperatorSubtaskState(org.apache.flink.runtime.checkpoint.PrioritizedOperatorSubtaskState) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState)

Example 70 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class AbstractStreamOperatorTest method testEnsureProcessingTimeTimerRegisteredOnRestore.

/**
 * Verify that a low-level timer is set for processing-time timers in case of restore.
 */
@Test
public void testEnsureProcessingTimeTimerRegisteredOnRestore() throws Exception {
    OperatorSubtaskState snapshot;
    try (KeyedOneInputStreamOperatorTestHarness<Integer, Tuple2<Integer, String>, String> testHarness = createTestHarness()) {
        testHarness.open();
        testHarness.setProcessingTime(0L);
        testHarness.processElement(new Tuple2<>(1, "SET_PROC_TIME_TIMER:20"), 0);
        testHarness.processElement(new Tuple2<>(0, "SET_STATE:HELLO"), 0);
        testHarness.processElement(new Tuple2<>(1, "SET_STATE:CIAO"), 0);
        testHarness.processElement(new Tuple2<>(0, "SET_PROC_TIME_TIMER:10"), 0);
        snapshot = testHarness.snapshot(0, 0);
    }
    try (KeyedOneInputStreamOperatorTestHarness<Integer, Tuple2<Integer, String>, String> testHarness1 = createTestHarness()) {
        testHarness1.setProcessingTime(0L);
        testHarness1.setup();
        testHarness1.initializeState(snapshot);
        testHarness1.open();
        testHarness1.setProcessingTime(10L);
        assertThat(extractResult(testHarness1), contains("ON_PROC_TIME:HELLO"));
        testHarness1.setProcessingTime(20L);
        assertThat(extractResult(testHarness1), contains("ON_PROC_TIME:CIAO"));
    }
}
Also used : Tuple2(org.apache.flink.api.java.tuple.Tuple2) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Aggregations

OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)178 Test (org.junit.Test)142 Watermark (org.apache.flink.streaming.api.watermark.Watermark)52 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)37 RowData (org.apache.flink.table.data.RowData)31 ArrayList (java.util.ArrayList)28 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)25 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)23 Map (java.util.Map)22 OperatorID (org.apache.flink.runtime.jobgraph.OperatorID)21 OneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness)19 HashMap (java.util.HashMap)18 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)18 TypeHint (org.apache.flink.api.common.typeinfo.TypeHint)16 Event (org.apache.flink.cep.Event)16 SubEvent (org.apache.flink.cep.SubEvent)16 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)15 GenericRowData (org.apache.flink.table.data.GenericRowData)15 Ignore (org.junit.Ignore)15 TaskStateSnapshot (org.apache.flink.runtime.checkpoint.TaskStateSnapshot)14