Search in sources :

Example 51 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class AbstractPythonStreamAggregateOperator method processElement.

@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
    RowData value = element.getValue();
    processElementInternal(value);
    elementCount++;
    checkInvokeFinishBundleByCount();
    emitResults();
}
Also used : RowData(org.apache.flink.table.data.RowData)

Example 52 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class BatchArrowPythonGroupWindowAggregateFunctionOperator method emitResult.

@Override
@SuppressWarnings("ConstantConditions")
public void emitResult(Tuple2<byte[], Integer> resultTuple) throws Exception {
    byte[] udafResult = resultTuple.f0;
    int length = resultTuple.f1;
    bais.setBuffer(udafResult, 0, length);
    int rowCount = arrowSerializer.load();
    for (int i = 0; i < rowCount; i++) {
        Tuple2<RowData, TimeWindow> input = inputKeyAndWindow.poll();
        RowData key = input.f0;
        TimeWindow window = input.f1;
        setWindowProperty(window);
        windowAggResult.replace(key, arrowSerializer.read(i));
        rowDataWrapper.collect(reuseJoinedRow.replace(windowAggResult, windowProperty));
    }
    arrowSerializer.resetReader();
}
Also used : RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) GenericRowData(org.apache.flink.table.data.GenericRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) TimeWindow(org.apache.flink.table.runtime.operators.window.TimeWindow)

Example 53 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class AbstractStreamArrowPythonBoundedRangeOperator method triggerWindowProcess.

private void triggerWindowProcess(long upperLimit, List<RowData> inputs) throws Exception {
    long lowerLimit = upperLimit - lowerBoundary;
    if (inputs != null) {
        Iterator<Map.Entry<Long, List<RowData>>> iter = inputState.iterator();
        while (iter.hasNext()) {
            Map.Entry<Long, List<RowData>> entry = iter.next();
            long dataTs = entry.getKey();
            if (dataTs >= lowerLimit) {
                if (dataTs <= upperLimit) {
                    List<RowData> dataList = entry.getValue();
                    for (RowData data : dataList) {
                        arrowSerializer.write(getFunctionInput(data));
                        currentBatchCount++;
                    }
                }
            } else {
                iter.remove();
            }
        }
        inputData.add(inputs);
        invokeCurrentBatch();
    }
}
Also used : RowData(org.apache.flink.table.data.RowData) List(java.util.List) LinkedList(java.util.LinkedList) Map(java.util.Map)

Example 54 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class PythonScalarFunctionOperator method emitResult.

@Override
@SuppressWarnings("ConstantConditions")
public void emitResult(Tuple2<byte[], Integer> resultTuple) throws IOException {
    byte[] rawUdfResult = resultTuple.f0;
    int length = resultTuple.f1;
    RowData input = forwardedInputQueue.poll();
    reuseJoinedRow.setRowKind(input.getRowKind());
    bais.setBuffer(rawUdfResult, 0, length);
    RowData udfResult = udfOutputTypeSerializer.deserialize(baisWrapper);
    rowDataWrapper.collect(reuseJoinedRow.replace(input, udfResult));
}
Also used : RowData(org.apache.flink.table.data.RowData)

Example 55 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class ArrowPythonScalarFunctionOperator method emitResult.

@Override
@SuppressWarnings("ConstantConditions")
public void emitResult(Tuple2<byte[], Integer> resultTuple) throws Exception {
    byte[] udfResult = resultTuple.f0;
    int length = resultTuple.f1;
    bais.setBuffer(udfResult, 0, length);
    int rowCount = arrowSerializer.load();
    for (int i = 0; i < rowCount; i++) {
        RowData input = forwardedInputQueue.poll();
        reuseJoinedRow.setRowKind(input.getRowKind());
        rowDataWrapper.collect(reuseJoinedRow.replace(input, arrowSerializer.read(i)));
    }
    arrowSerializer.resetReader();
}
Also used : RowData(org.apache.flink.table.data.RowData)

Aggregations

RowData (org.apache.flink.table.data.RowData)602 Test (org.junit.Test)201 GenericRowData (org.apache.flink.table.data.GenericRowData)178 ArrayList (java.util.ArrayList)109 RowType (org.apache.flink.table.types.logical.RowType)105 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)90 Watermark (org.apache.flink.streaming.api.watermark.Watermark)84 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)72 Transformation (org.apache.flink.api.dag.Transformation)70 Configuration (org.apache.flink.configuration.Configuration)68 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)67 List (java.util.List)65 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)54 DataType (org.apache.flink.table.types.DataType)52 Map (java.util.Map)42 LogicalType (org.apache.flink.table.types.logical.LogicalType)41 TableException (org.apache.flink.table.api.TableException)34 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)33 RowDataKeySelector (org.apache.flink.table.runtime.keyselector.RowDataKeySelector)32 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)31