Search in sources :

Example 46 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class RetractableTopNFunctionTest method testConstantRankRangeWithoutOffsetWithRowNumber.

@Test
public void testConstantRankRangeWithoutOffsetWithRowNumber() throws Exception {
    AbstractTopNFunction func = createFunction(RankType.ROW_NUMBER, new ConstantRankRange(1, 2), true, true);
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(func);
    testHarness.open();
    testHarness.processElement(insertRecord("book", 1L, 12));
    testHarness.processElement(insertRecord("book", 2L, 19));
    testHarness.processElement(insertRecord("book", 4L, 11));
    testHarness.processElement(insertRecord("fruit", 4L, 33));
    testHarness.processElement(insertRecord("fruit", 3L, 44));
    testHarness.processElement(insertRecord("fruit", 5L, 22));
    List<Object> expectedOutput = new ArrayList<>();
    // ("book", 1L, 12)
    // ("book", 2L, 19)
    expectedOutput.add(insertRecord("book", 1L, 12, 1L));
    expectedOutput.add(insertRecord("book", 2L, 19, 2L));
    // ("book", 4L, 11)
    expectedOutput.add(updateBeforeRecord("book", 1L, 12, 1L));
    expectedOutput.add(updateAfterRecord("book", 4L, 11, 1L));
    expectedOutput.add(updateBeforeRecord("book", 2L, 19, 2L));
    expectedOutput.add(updateAfterRecord("book", 1L, 12, 2L));
    // ("fruit", 4L, 33)
    // ("fruit", 3L, 44)
    expectedOutput.add(insertRecord("fruit", 4L, 33, 1L));
    expectedOutput.add(insertRecord("fruit", 3L, 44, 2L));
    // ("fruit", 5L, 22)
    expectedOutput.add(updateBeforeRecord("fruit", 4L, 33, 1L));
    expectedOutput.add(updateAfterRecord("fruit", 5L, 22, 1L));
    expectedOutput.add(updateBeforeRecord("fruit", 3L, 44, 2L));
    expectedOutput.add(updateAfterRecord("fruit", 4L, 33, 2L));
    assertorWithRowNumber.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
    // do a snapshot, data could be recovered from state
    OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
    testHarness.close();
    expectedOutput.clear();
    func = createFunction(RankType.ROW_NUMBER, new ConstantRankRange(1, 2), true, true);
    testHarness = createTestHarness(func);
    testHarness.setup();
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(insertRecord("book", 1L, 10));
    expectedOutput.add(updateBeforeRecord("book", 4L, 11, 1L));
    expectedOutput.add(updateAfterRecord("book", 1L, 10, 1L));
    expectedOutput.add(updateBeforeRecord("book", 1L, 12, 2L));
    expectedOutput.add(updateAfterRecord("book", 4L, 11, 2L));
    assertorWithRowNumber.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
    testHarness.close();
}
Also used : RowData(org.apache.flink.table.data.RowData) ArrayList(java.util.ArrayList) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 47 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class WindowRankOperatorTest method testTop2WindowsWithoutRankNumber.

@Test
public void testTop2WindowsWithoutRankNumber() throws Exception {
    SlicingWindowOperator<RowData, ?> operator = WindowRankOperatorBuilder.builder().inputSerializer(INPUT_ROW_SER).shiftTimeZone(shiftTimeZone).keySerializer(KEY_SER).sortKeyComparator(GENERATED_SORT_KEY_COMPARATOR).sortKeySelector(SORT_KEY_SELECTOR).outputRankNumber(false).rankStart(1).rankEnd(2).windowEndIndex(WINDOW_END_INDEX).build();
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
    testHarness.setup(OUT_SERIALIZER_WITHOUT_RANK_NUMBER);
    testHarness.open();
    // process elements
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    // add elements out-of-order
    testHarness.processElement(insertRecord("key2", 1, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 4, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 5, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 3, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 2, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 7, toUtcTimestampMills(3999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 8, toUtcTimestampMills(3999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key2", 1, toUtcTimestampMills(3999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 2, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 1, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 3, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 3, toUtcTimestampMills(999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 4, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 6, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processElement(insertRecord("key1", 7, toUtcTimestampMills(1999L, shiftTimeZone)));
    testHarness.processWatermark(new Watermark(999));
    expectedOutput.add(insertRecord("key1", 1, toUtcTimestampMills(999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key1", 2, toUtcTimestampMills(999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key2", 1, toUtcTimestampMills(999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key2", 3, toUtcTimestampMills(999L, shiftTimeZone)));
    expectedOutput.add(new Watermark(999));
    ASSERTER_WITHOUT_RANK_NUMBER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.processWatermark(new Watermark(1999));
    expectedOutput.add(insertRecord("key1", 4, toUtcTimestampMills(1999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key1", 6, toUtcTimestampMills(1999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key2", 2, toUtcTimestampMills(1999L, shiftTimeZone)));
    expectedOutput.add(new Watermark(1999));
    ASSERTER_WITHOUT_RANK_NUMBER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    // do a snapshot, close and restore again
    testHarness.prepareSnapshotPreBarrier(0L);
    OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
    testHarness.close();
    expectedOutput.clear();
    testHarness = createTestHarness(operator);
    testHarness.setup(OUT_SERIALIZER_WITHOUT_RANK_NUMBER);
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processWatermark(new Watermark(3999));
    expectedOutput.add(insertRecord("key2", 1, toUtcTimestampMills(3999L, shiftTimeZone)));
    expectedOutput.add(insertRecord("key2", 7, toUtcTimestampMills(3999L, shiftTimeZone)));
    expectedOutput.add(new Watermark(3999));
    ASSERTER_WITHOUT_RANK_NUMBER.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.close();
}
Also used : RowData(org.apache.flink.table.data.RowData) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 48 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class RowTimeSortOperatorTest method testSortOnTwoFields.

@Test
public void testSortOnTwoFields() throws Exception {
    InternalTypeInfo<RowData> inputRowType = InternalTypeInfo.ofFields(new IntType(), new BigIntType(), VarCharType.STRING_TYPE, new IntType());
    // Note: RowTimeIdx must be 0 in product environment, the value is 1 here just for simplify
    // the testing
    int rowTimeIdx = 1;
    GeneratedRecordComparator gComparator = new GeneratedRecordComparator("", "", new Object[0]) {

        private static final long serialVersionUID = -6067266199060901331L;

        @Override
        public RecordComparator newInstance(ClassLoader classLoader) {
            return IntRecordComparator.INSTANCE;
        }
    };
    RowDataHarnessAssertor assertor = new RowDataHarnessAssertor(inputRowType.toRowFieldTypes());
    RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, gComparator);
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
    testHarness.open();
    testHarness.processElement(insertRecord(3, 3L, "Hello world", 3));
    testHarness.processElement(insertRecord(2, 2L, "Hello", 2));
    testHarness.processElement(insertRecord(6, 2L, "Luke Skywalker", 6));
    testHarness.processElement(insertRecord(5, 3L, "I am fine.", 5));
    testHarness.processElement(insertRecord(7, 1L, "Comment#1", 7));
    testHarness.processElement(insertRecord(9, 4L, "Comment#3", 9));
    testHarness.processElement(insertRecord(10, 4L, "Comment#4", 10));
    testHarness.processElement(insertRecord(8, 4L, "Comment#2", 8));
    testHarness.processElement(insertRecord(1, 1L, "Hi", 2));
    testHarness.processElement(insertRecord(1, 1L, "Hi", 1));
    testHarness.processElement(insertRecord(4, 3L, "Helloworld, how are you?", 4));
    testHarness.processElement(insertRecord(4, 5L, "Hello, how are you?", 4));
    testHarness.processWatermark(new Watermark(4L));
    List<Object> expectedOutput = new ArrayList<>();
    expectedOutput.add(insertRecord(1, 1L, "Hi", 2));
    expectedOutput.add(insertRecord(1, 1L, "Hi", 1));
    expectedOutput.add(insertRecord(7, 1L, "Comment#1", 7));
    expectedOutput.add(insertRecord(2, 2L, "Hello", 2));
    expectedOutput.add(insertRecord(6, 2L, "Luke Skywalker", 6));
    expectedOutput.add(insertRecord(3, 3L, "Hello world", 3));
    expectedOutput.add(insertRecord(4, 3L, "Helloworld, how are you?", 4));
    expectedOutput.add(insertRecord(5, 3L, "I am fine.", 5));
    expectedOutput.add(insertRecord(8, 4L, "Comment#2", 8));
    expectedOutput.add(insertRecord(9, 4L, "Comment#3", 9));
    expectedOutput.add(insertRecord(10, 4L, "Comment#4", 10));
    expectedOutput.add(new Watermark(4L));
    // do a snapshot, data could be recovered from state
    OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
    assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
    testHarness.close();
    expectedOutput.clear();
    operator = createSortOperator(inputRowType, rowTimeIdx, gComparator);
    testHarness = createTestHarness(operator);
    testHarness.initializeState(snapshot);
    testHarness.open();
    // late data will be dropped
    testHarness.processElement(insertRecord(5, 3L, "I am fine.", 6));
    testHarness.processWatermark(new Watermark(5L));
    expectedOutput.add(insertRecord(4, 5L, "Hello, how are you?", 4));
    expectedOutput.add(new Watermark(5L));
    assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
    // those watermark has no effect
    testHarness.processWatermark(new Watermark(11L));
    testHarness.processWatermark(new Watermark(12L));
    expectedOutput.add(new Watermark(11L));
    expectedOutput.add(new Watermark(12L));
    assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
}
Also used : ArrayList(java.util.ArrayList) BigIntType(org.apache.flink.table.types.logical.BigIntType) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) BigIntType(org.apache.flink.table.types.logical.BigIntType) IntType(org.apache.flink.table.types.logical.IntType) RowData(org.apache.flink.table.data.RowData) RowDataHarnessAssertor(org.apache.flink.table.runtime.util.RowDataHarnessAssertor) GeneratedRecordComparator(org.apache.flink.table.runtime.generated.GeneratedRecordComparator) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 49 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class StreamSortOperatorTest method test.

@Test
public void test() throws Exception {
    StreamSortOperator operator = createSortOperator();
    OneInputStreamOperatorTestHarness<RowData, BinaryRowData> testHarness = createTestHarness(operator);
    testHarness.open();
    testHarness.processElement(insertRecord("hi", 1));
    testHarness.processElement(insertRecord("hello", 2));
    testHarness.processElement(insertRecord("world", 3));
    testHarness.processElement(insertRecord("word", 4));
    List<Object> expectedOutput = new ArrayList<>();
    expectedOutput.add(insertRecord("hello", 2));
    expectedOutput.add(insertRecord("hi", 1));
    expectedOutput.add(insertRecord("word", 4));
    expectedOutput.add(insertRecord("world", 3));
    // do a snapshot, data could be recovered from state
    OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
    testHarness.close();
    assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
    expectedOutput.clear();
    operator = createSortOperator();
    testHarness = createTestHarness(operator);
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(insertRecord("abc", 1));
    testHarness.processElement(insertRecord("aa", 1));
    testHarness.close();
    expectedOutput.add(insertRecord("aa", 1));
    expectedOutput.add(insertRecord("abc", 1));
    expectedOutput.add(insertRecord("hello", 2));
    expectedOutput.add(insertRecord("hi", 1));
    expectedOutput.add(insertRecord("word", 4));
    expectedOutput.add(insertRecord("world", 3));
    assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
}
Also used : RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ArrayList(java.util.ArrayList) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 50 with OperatorSubtaskState

use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.

the class WindowOperatorTest method testEventTimeSessionWindows.

@Test
@SuppressWarnings("unchecked")
public void testEventTimeSessionWindows() throws Exception {
    closeCalled.set(0);
    WindowOperator operator = WindowOperatorBuilder.builder().withInputFields(inputFieldTypes).withShiftTimezone(shiftTimeZone).session(Duration.ofSeconds(3)).withEventTime(2).aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // add elements out-of-order
    testHarness.processElement(insertRecord("key2", 1, 0L));
    testHarness.processElement(insertRecord("key2", 2, 1000L));
    testHarness.processElement(insertRecord("key2", 3, 2500L));
    testHarness.processElement(insertRecord("key1", 1, 10L));
    testHarness.processElement(insertRecord("key1", 2, 1000L));
    // do a snapshot, close and restore again
    OperatorSubtaskState snapshotV2 = testHarness.snapshot(0L, 0);
    testHarness.close();
    expectedOutput.clear();
    testHarness = createTestHarness(operator);
    testHarness.setup();
    testHarness.initializeState(snapshotV2);
    testHarness.open();
    assertEquals(0L, operator.getWatermarkLatency().getValue());
    testHarness.processElement(insertRecord("key1", 3, 2500L));
    testHarness.processElement(insertRecord("key2", 4, 5501L));
    testHarness.processElement(insertRecord("key2", 5, 6000L));
    testHarness.processElement(insertRecord("key2", 5, 6000L));
    testHarness.processElement(insertRecord("key2", 6, 6050L));
    testHarness.processWatermark(new Watermark(12000));
    expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 6L, 3L, localMills(10L), localMills(5500L), localMills(5499L))));
    expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 6L, 3L, localMills(0L), localMills(5500L), localMills(5499L))));
    expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 20L, 4L, localMills(5501L), localMills(9050L), localMills(9049L))));
    expectedOutput.add(new Watermark(12000));
    // add a late data
    testHarness.processElement(insertRecord("key1", 3, 4000L));
    testHarness.processElement(insertRecord("key2", 10, 15000L));
    testHarness.processElement(insertRecord("key2", 20, 15000L));
    testHarness.processWatermark(new Watermark(17999));
    expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 30L, 2L, localMills(15000L), localMills(18000L), localMills(17999L))));
    expectedOutput.add(new Watermark(17999));
    assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.setProcessingTime(18000);
    assertEquals(1L, operator.getWatermarkLatency().getValue());
    testHarness.close();
    // we close once in the rest...
    assertEquals("Close was not called.", 2, closeCalled.get());
    assertEquals(1, operator.getNumLateRecordsDropped().getCount());
}
Also used : JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Aggregations

OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)178 Test (org.junit.Test)142 Watermark (org.apache.flink.streaming.api.watermark.Watermark)52 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)37 RowData (org.apache.flink.table.data.RowData)31 ArrayList (java.util.ArrayList)28 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)25 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)23 Map (java.util.Map)22 OperatorID (org.apache.flink.runtime.jobgraph.OperatorID)21 OneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness)19 HashMap (java.util.HashMap)18 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)18 TypeHint (org.apache.flink.api.common.typeinfo.TypeHint)16 Event (org.apache.flink.cep.Event)16 SubEvent (org.apache.flink.cep.SubEvent)16 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)15 GenericRowData (org.apache.flink.table.data.GenericRowData)15 Ignore (org.junit.Ignore)15 TaskStateSnapshot (org.apache.flink.runtime.checkpoint.TaskStateSnapshot)14