Search in sources :

Example 91 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class CoBroadcastWithNonKeyedOperatorTest method testScaleUp.

@Test
public void testScaleUp() throws Exception {
    final Set<String> keysToRegister = new HashSet<>();
    keysToRegister.add("test1");
    keysToRegister.add("test2");
    keysToRegister.add("test3");
    final OperatorSubtaskState mergedSnapshot;
    try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 = getInitializedTestHarness(new TestFunctionWithOutput(keysToRegister), 10, 2, 0, STATE_DESCRIPTOR);
        TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 = getInitializedTestHarness(new TestFunctionWithOutput(keysToRegister), 10, 2, 1, STATE_DESCRIPTOR)) {
        // make sure all operators have the same state
        testHarness1.processElement2(new StreamRecord<>(3));
        testHarness2.processElement2(new StreamRecord<>(3));
        mergedSnapshot = AbstractStreamOperatorTestHarness.repackageState(testHarness1.snapshot(0L, 0L), testHarness2.snapshot(0L, 0L));
    }
    final Set<String> expected = new HashSet<>(3);
    expected.add("test1=3");
    expected.add("test2=3");
    expected.add("test3=3");
    final OperatorSubtaskState initState1 = repartitionInitState(mergedSnapshot, 10, 2, 3, 0);
    final OperatorSubtaskState initState2 = repartitionInitState(mergedSnapshot, 10, 2, 3, 1);
    final OperatorSubtaskState initState3 = repartitionInitState(mergedSnapshot, 10, 2, 3, 2);
    try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 = getInitializedTestHarness(new TestFunctionWithOutput(keysToRegister), 10, 3, 0, initState1, STATE_DESCRIPTOR);
        TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 = getInitializedTestHarness(new TestFunctionWithOutput(keysToRegister), 10, 3, 1, initState2, STATE_DESCRIPTOR);
        TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness3 = getInitializedTestHarness(new TestFunctionWithOutput(keysToRegister), 10, 3, 2, initState3, STATE_DESCRIPTOR)) {
        testHarness1.processElement1(new StreamRecord<>("trigger"));
        testHarness2.processElement1(new StreamRecord<>("trigger"));
        testHarness3.processElement1(new StreamRecord<>("trigger"));
        Queue<?> output1 = testHarness1.getOutput();
        Queue<?> output2 = testHarness2.getOutput();
        Queue<?> output3 = testHarness3.getOutput();
        Assert.assertEquals(expected.size(), output1.size());
        for (Object o : output1) {
            StreamRecord<String> rec = (StreamRecord<String>) o;
            Assert.assertTrue(expected.contains(rec.getValue()));
        }
        Assert.assertEquals(expected.size(), output2.size());
        for (Object o : output2) {
            StreamRecord<String> rec = (StreamRecord<String>) o;
            Assert.assertTrue(expected.contains(rec.getValue()));
        }
        Assert.assertEquals(expected.size(), output3.size());
        for (Object o : output3) {
            StreamRecord<String> rec = (StreamRecord<String>) o;
            Assert.assertTrue(expected.contains(rec.getValue()));
        }
    }
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 92 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class StreamElementQueueTest method testPut.

@Test
public void testPut() {
    StreamElementQueue<Integer> queue = createStreamElementQueue(2);
    Watermark watermark = new Watermark(0L);
    StreamRecord<Integer> streamRecord = new StreamRecord<>(42, 1L);
    // add two elements to reach capacity
    assertTrue(queue.tryPut(watermark).isPresent());
    assertTrue(queue.tryPut(streamRecord).isPresent());
    assertEquals(2, queue.size());
    // queue full, cannot add new element
    assertFalse(queue.tryPut(new Watermark(2L)).isPresent());
    // check if expected values are returned (for checkpointing)
    assertEquals(Arrays.asList(watermark, streamRecord), queue.values());
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 93 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class CoBroadcastWithKeyedOperatorTest method testScaleUp.

@Test
public void testScaleUp() throws Exception {
    final Set<String> keysToRegister = new HashSet<>();
    keysToRegister.add("test1");
    keysToRegister.add("test2");
    keysToRegister.add("test3");
    final OperatorSubtaskState mergedSnapshot;
    try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 = getInitializedTestHarness(BasicTypeInfo.STRING_TYPE_INFO, new IdentityKeySelector<>(), new TestFunctionWithOutput(keysToRegister), 10, 2, 0);
        TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 = getInitializedTestHarness(BasicTypeInfo.STRING_TYPE_INFO, new IdentityKeySelector<>(), new TestFunctionWithOutput(keysToRegister), 10, 2, 1)) {
        // make sure all operators have the same state
        testHarness1.processElement2(new StreamRecord<>(3));
        testHarness2.processElement2(new StreamRecord<>(3));
        mergedSnapshot = AbstractStreamOperatorTestHarness.repackageState(testHarness1.snapshot(0L, 0L), testHarness2.snapshot(0L, 0L));
    }
    final Set<String> expected = new HashSet<>(3);
    expected.add("test1=3");
    expected.add("test2=3");
    expected.add("test3=3");
    OperatorSubtaskState operatorSubtaskState1 = repartitionInitState(mergedSnapshot, 10, 2, 3, 0);
    OperatorSubtaskState operatorSubtaskState2 = repartitionInitState(mergedSnapshot, 10, 2, 3, 1);
    OperatorSubtaskState operatorSubtaskState3 = repartitionInitState(mergedSnapshot, 10, 2, 3, 2);
    try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 = getInitializedTestHarness(BasicTypeInfo.STRING_TYPE_INFO, new IdentityKeySelector<>(), new TestFunctionWithOutput(keysToRegister), 10, 3, 0, operatorSubtaskState1);
        TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 = getInitializedTestHarness(BasicTypeInfo.STRING_TYPE_INFO, new IdentityKeySelector<>(), new TestFunctionWithOutput(keysToRegister), 10, 3, 1, operatorSubtaskState2);
        TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness3 = getInitializedTestHarness(BasicTypeInfo.STRING_TYPE_INFO, new IdentityKeySelector<>(), new TestFunctionWithOutput(keysToRegister), 10, 3, 2, operatorSubtaskState3)) {
        testHarness1.processElement1(new StreamRecord<>("trigger"));
        testHarness2.processElement1(new StreamRecord<>("trigger"));
        testHarness3.processElement1(new StreamRecord<>("trigger"));
        Queue<?> output1 = testHarness1.getOutput();
        Queue<?> output2 = testHarness2.getOutput();
        Queue<?> output3 = testHarness3.getOutput();
        assertEquals(expected.size(), output1.size());
        for (Object o : output1) {
            StreamRecord<String> rec = (StreamRecord<String>) o;
            assertTrue(expected.contains(rec.getValue()));
        }
        assertEquals(expected.size(), output2.size());
        for (Object o : output2) {
            StreamRecord<String> rec = (StreamRecord<String>) o;
            assertTrue(expected.contains(rec.getValue()));
        }
        assertEquals(expected.size(), output3.size());
        for (Object o : output3) {
            StreamRecord<String> rec = (StreamRecord<String>) o;
            assertTrue(expected.contains(rec.getValue()));
        }
    }
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 94 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class BatchMultipleInputStreamOperatorTest method testProcess.

@Test
public void testProcess() throws Exception {
    TestingBatchMultipleInputStreamOperator op = createMultipleInputStreamOperator();
    List<StreamElement> outputData = op.getOutputData();
    TestingTwoInputStreamOperator joinOp2 = (TestingTwoInputStreamOperator) op.getTailWrapper().getStreamOperator();
    TableOperatorWrapper<?> joinWrapper1 = op.getTailWrapper().getInputWrappers().get(0);
    TestingTwoInputStreamOperator joinOp1 = (TestingTwoInputStreamOperator) joinWrapper1.getStreamOperator();
    TableOperatorWrapper<?> aggWrapper1 = joinWrapper1.getInputWrappers().get(0);
    TestingOneInputStreamOperator aggOp1 = (TestingOneInputStreamOperator) aggWrapper1.getStreamOperator();
    TableOperatorWrapper<?> aggWrapper2 = joinWrapper1.getInputWrappers().get(1);
    TestingOneInputStreamOperator aggOp2 = (TestingOneInputStreamOperator) aggWrapper2.getStreamOperator();
    List<Input> inputs = op.getInputs();
    assertEquals(3, inputs.size());
    Input input1 = inputs.get(0);
    Input input2 = inputs.get(1);
    Input input3 = inputs.get(2);
    assertTrue(input1 instanceof OneInput);
    assertTrue(input2 instanceof OneInput);
    assertTrue(input3 instanceof SecondInputOfTwoInput);
    assertNull(joinOp2.getCurrentElement1());
    assertNull(joinOp2.getCurrentElement2());
    assertNull(joinOp1.getCurrentElement1());
    assertNull(joinOp1.getCurrentElement2());
    assertNull(aggOp1.getCurrentElement());
    assertNull(aggOp2.getCurrentElement());
    assertTrue(outputData.isEmpty());
    // process first input (input id is 3)
    StreamRecord<RowData> element1 = new StreamRecord<>(GenericRowData.of(StringData.fromString("123")), 456);
    input3.processElement(element1);
    assertEquals(element1, joinOp2.getCurrentElement2());
    assertNull(joinOp2.getCurrentElement1());
    assertTrue(outputData.isEmpty());
    // finish first input
    assertTrue(joinOp2.getEndInputs().isEmpty());
    op.endInput(3);
    assertTrue(outputData.isEmpty());
    assertEquals(Collections.singletonList(2), joinOp2.getEndInputs());
    // process second input (input id is 1)
    StreamRecord<RowData> element2 = new StreamRecord<>(GenericRowData.of(StringData.fromString("124")), 457);
    input1.processElement(element2);
    assertEquals(element2, aggOp1.getCurrentElement());
    assertNull(joinOp1.getCurrentElement1());
    assertNull(joinOp2.getCurrentElement1());
    assertTrue(outputData.isEmpty());
    // finish second input
    assertTrue(joinOp1.getEndInputs().isEmpty());
    op.endInput(1);
    assertEquals(Collections.singletonList(1), joinOp1.getEndInputs());
    assertEquals(Collections.singletonList(2), joinOp2.getEndInputs());
    assertEquals(element2, joinOp1.getCurrentElement1());
    assertTrue(outputData.isEmpty());
    // process third input (input id is 2)
    StreamRecord<RowData> element3 = new StreamRecord<>(GenericRowData.of(StringData.fromString("125")), 458);
    input2.processElement(element3);
    assertEquals(element3, aggOp2.getCurrentElement());
    assertNull(joinOp1.getCurrentElement2());
    assertNull(joinOp2.getCurrentElement1());
    assertTrue(outputData.isEmpty());
    // finish third input
    assertEquals(Collections.singletonList(1), joinOp1.getEndInputs());
    op.endInput(2);
    assertEquals(Arrays.asList(1, 2), joinOp1.getEndInputs());
    assertEquals(Arrays.asList(2, 1), joinOp2.getEndInputs());
    assertEquals(element3, joinOp1.getCurrentElement2());
    assertEquals(3, outputData.size());
}
Also used : SecondInputOfTwoInput(org.apache.flink.table.runtime.operators.multipleinput.input.SecondInputOfTwoInput) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) StreamElement(org.apache.flink.streaming.runtime.streamrecord.StreamElement) SecondInputOfTwoInput(org.apache.flink.table.runtime.operators.multipleinput.input.SecondInputOfTwoInput) OneInput(org.apache.flink.table.runtime.operators.multipleinput.input.OneInput) Input(org.apache.flink.streaming.api.operators.Input) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) OneInput(org.apache.flink.table.runtime.operators.multipleinput.input.OneInput) Test(org.junit.Test)

Example 95 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class WindowOperatorTest method testSideOutputDueToLatenessSessionZeroLatenessPurgingTrigger.

@Test
public void testSideOutputDueToLatenessSessionZeroLatenessPurgingTrigger() throws Exception {
    final int gapSize = 3;
    final long lateness = 0;
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(gapSize)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), PurgingTrigger.of(EventTimeTrigger.create()), lateness, lateOutputTag);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = createTestHarness(operator);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be dropped because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    // this is side output as late
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    // this is also side output as late (we test that they are not accidentally merged)
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10100));
    sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10100));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 14500L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
    TestHarnessUtil.assertOutputEqualsSorted("SideOutput was not correct.", sideExpected, (Iterable) sideActual, new Tuple2ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Aggregations

StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)270 Test (org.junit.Test)212 ArrayList (java.util.ArrayList)156 List (java.util.List)151 Event (org.apache.flink.cep.Event)136 SimpleCondition (org.apache.flink.cep.pattern.conditions.SimpleCondition)61 SubEvent (org.apache.flink.cep.SubEvent)47 Watermark (org.apache.flink.streaming.api.watermark.Watermark)45 NFATestHarness (org.apache.flink.cep.utils.NFATestHarness)39 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)36 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)34 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)31 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)31 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)30 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)28 HashMap (java.util.HashMap)23 Map (java.util.Map)23 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)23 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)20 KeySelector (org.apache.flink.api.java.functions.KeySelector)20