Search in sources :

Example 81 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class StreamArrowPythonGroupWindowAggregateFunctionOperatorTest method testFinishBundleTriggeredOnCheckpoint.

@Test
public void testFinishBundleTriggeredOnCheckpoint() throws Exception {
    Configuration conf = new Configuration();
    conf.setInteger(PythonOptions.MAX_BUNDLE_SIZE, 10);
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = getTestHarness(conf);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c2", 0L, 0L), initialTime + 1));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c4", 1L, 6000L), initialTime + 2));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c6", 2L, 10000L), initialTime + 3));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c2", "c8", 3L, 0L), initialTime + 4));
    testHarness.processWatermark(new Watermark(10000L));
    // checkpoint trigger finishBundle
    testHarness.prepareSnapshotPreBarrier(0L);
    expectedOutput.add(new Watermark(10000L));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", 0L, TimestampData.fromEpochMillis(-5000L), TimestampData.fromEpochMillis(5000L))));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", 3L, TimestampData.fromEpochMillis(-5000L), TimestampData.fromEpochMillis(5000L))));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", 3L, TimestampData.fromEpochMillis(0L), TimestampData.fromEpochMillis(10000L))));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", 0L, TimestampData.fromEpochMillis(0L), TimestampData.fromEpochMillis(10000L))));
    assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.processWatermark(20000L);
    testHarness.close();
    expectedOutput.add(new Watermark(20000L));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", 1L, TimestampData.fromEpochMillis(5000L), TimestampData.fromEpochMillis(15000L))));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", 2L, TimestampData.fromEpochMillis(10000L), TimestampData.fromEpochMillis(20000L))));
    assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
Also used : RowData(org.apache.flink.table.data.RowData) Configuration(org.apache.flink.configuration.Configuration) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 82 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class PythonStreamGroupAggregateOperatorTest method testStateCleanupTimer.

@Test
public void testStateCleanupTimer() throws Exception {
    Configuration conf = new Configuration();
    conf.setString("table.exec.state.ttl", "100");
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = getTestHarness(conf);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.setProcessingTime(0L);
    testHarness.processElement(new StreamRecord<>(newRow(true, "c1", 0L), initialTime + 1));
    testHarness.setProcessingTime(500L);
    testHarness.processElement(new StreamRecord<>(newRow(true, "c2", 1L), initialTime + 2));
    testHarness.setProcessingTime(599L);
    testHarness.processElement(new StreamRecord<>(newRow(true, "c2", 2L), initialTime + 3));
    testHarness.setProcessingTime(1000L);
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "state_cleanup_triggered: c1", 100L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", 1L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", 2L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "state_cleanup_triggered: c2", 699L)));
    assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.close();
}
Also used : RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) Configuration(org.apache.flink.configuration.Configuration) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Test(org.junit.Test)

Example 83 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class StreamArrowPythonRowTimeBoundedRangeOperatorTest method testFinishBundleTriggeredByCount.

@Test
public void testFinishBundleTriggeredByCount() throws Exception {
    Configuration conf = new Configuration();
    conf.setInteger(PythonOptions.MAX_BUNDLE_SIZE, 4);
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = getTestHarness(conf);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c2", 0L, 1L), initialTime + 1));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c4", 1L, 1L), initialTime + 2));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c6", 2L, 10L), initialTime + 3));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c2", "c8", 3L, 2L), initialTime + 3));
    assertOutputEquals("FinishBundle should not be triggered.", expectedOutput, testHarness.getOutput());
    testHarness.processWatermark(new Watermark(1000L));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c2", 0L, 1L, 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c4", 1L, 1L, 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", "c8", 3L, 2L, 3L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c6", 2L, 10L, 2L)));
    expectedOutput.add(new Watermark(1000L));
    assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.close();
}
Also used : RowData(org.apache.flink.table.data.RowData) Configuration(org.apache.flink.configuration.Configuration) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 84 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class StreamArrowPythonRowTimeBoundedRangeOperatorTest method testOverWindowAggregateFunction.

@Test
public void testOverWindowAggregateFunction() throws Exception {
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = getTestHarness(new Configuration());
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c2", 0L, 1L), initialTime + 1));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c4", 1L, 1L), initialTime + 2));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c6", 2L, 10L), initialTime + 3));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c2", "c8", 3L, 2L), initialTime + 3));
    testHarness.processWatermark(Long.MAX_VALUE);
    testHarness.close();
    expectedOutput.add(new Watermark(Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c2", 0L, 1L, 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c4", 1L, 1L, 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", "c8", 3L, 2L, 3L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c6", 2L, 10L, 2L)));
    assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
Also used : RowData(org.apache.flink.table.data.RowData) Configuration(org.apache.flink.configuration.Configuration) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 85 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class StreamArrowPythonRowTimeBoundedRangeOperatorTest method testFinishBundleTriggeredByTime.

@Test
public void testFinishBundleTriggeredByTime() throws Exception {
    Configuration conf = new Configuration();
    conf.setInteger(PythonOptions.MAX_BUNDLE_SIZE, 10);
    conf.setLong(PythonOptions.MAX_BUNDLE_TIME_MILLS, 1000L);
    OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = getTestHarness(conf);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c2", 0L, 1L), initialTime + 1));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c4", 1L, 1L), initialTime + 2));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c1", "c6", 2L, 10L), initialTime + 3));
    testHarness.processElement(new StreamRecord<>(newBinaryRow(true, "c2", "c8", 3L, 2L), initialTime + 3));
    testHarness.processWatermark(new Watermark(10000L));
    expectedOutput.add(new Watermark(10000L));
    assertOutputEquals("FinishBundle should not be triggered.", expectedOutput, testHarness.getOutput());
    testHarness.setProcessingTime(1000L);
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c2", 0L, 1L, 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c4", 1L, 1L, 0L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c2", "c8", 3L, 2L, 3L)));
    expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c6", 2L, 10L, 2L)));
    assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.close();
}
Also used : RowData(org.apache.flink.table.data.RowData) Configuration(org.apache.flink.configuration.Configuration) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Aggregations

RowData (org.apache.flink.table.data.RowData)602 Test (org.junit.Test)201 GenericRowData (org.apache.flink.table.data.GenericRowData)178 ArrayList (java.util.ArrayList)109 RowType (org.apache.flink.table.types.logical.RowType)105 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)90 Watermark (org.apache.flink.streaming.api.watermark.Watermark)84 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)72 Transformation (org.apache.flink.api.dag.Transformation)70 Configuration (org.apache.flink.configuration.Configuration)68 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)67 List (java.util.List)65 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)54 DataType (org.apache.flink.table.types.DataType)52 Map (java.util.Map)42 LogicalType (org.apache.flink.table.types.logical.LogicalType)41 TableException (org.apache.flink.table.api.TableException)34 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)33 RowDataKeySelector (org.apache.flink.table.runtime.keyselector.RowDataKeySelector)32 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)31