use of org.apache.flink.table.data.RowData in project flink by apache.
the class AbstractPythonStreamAggregateOperator method processElement.
@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
RowData value = element.getValue();
processElementInternal(value);
elementCount++;
checkInvokeFinishBundleByCount();
emitResults();
}
use of org.apache.flink.table.data.RowData in project flink by apache.
the class BatchArrowPythonGroupWindowAggregateFunctionOperator method emitResult.
@Override
@SuppressWarnings("ConstantConditions")
public void emitResult(Tuple2<byte[], Integer> resultTuple) throws Exception {
byte[] udafResult = resultTuple.f0;
int length = resultTuple.f1;
bais.setBuffer(udafResult, 0, length);
int rowCount = arrowSerializer.load();
for (int i = 0; i < rowCount; i++) {
Tuple2<RowData, TimeWindow> input = inputKeyAndWindow.poll();
RowData key = input.f0;
TimeWindow window = input.f1;
setWindowProperty(window);
windowAggResult.replace(key, arrowSerializer.read(i));
rowDataWrapper.collect(reuseJoinedRow.replace(windowAggResult, windowProperty));
}
arrowSerializer.resetReader();
}
use of org.apache.flink.table.data.RowData in project flink by apache.
the class AbstractStreamArrowPythonBoundedRangeOperator method triggerWindowProcess.
private void triggerWindowProcess(long upperLimit, List<RowData> inputs) throws Exception {
long lowerLimit = upperLimit - lowerBoundary;
if (inputs != null) {
Iterator<Map.Entry<Long, List<RowData>>> iter = inputState.iterator();
while (iter.hasNext()) {
Map.Entry<Long, List<RowData>> entry = iter.next();
long dataTs = entry.getKey();
if (dataTs >= lowerLimit) {
if (dataTs <= upperLimit) {
List<RowData> dataList = entry.getValue();
for (RowData data : dataList) {
arrowSerializer.write(getFunctionInput(data));
currentBatchCount++;
}
}
} else {
iter.remove();
}
}
inputData.add(inputs);
invokeCurrentBatch();
}
}
use of org.apache.flink.table.data.RowData in project flink by apache.
the class PythonScalarFunctionOperator method emitResult.
@Override
@SuppressWarnings("ConstantConditions")
public void emitResult(Tuple2<byte[], Integer> resultTuple) throws IOException {
byte[] rawUdfResult = resultTuple.f0;
int length = resultTuple.f1;
RowData input = forwardedInputQueue.poll();
reuseJoinedRow.setRowKind(input.getRowKind());
bais.setBuffer(rawUdfResult, 0, length);
RowData udfResult = udfOutputTypeSerializer.deserialize(baisWrapper);
rowDataWrapper.collect(reuseJoinedRow.replace(input, udfResult));
}
use of org.apache.flink.table.data.RowData in project flink by apache.
the class ArrowPythonScalarFunctionOperator method emitResult.
@Override
@SuppressWarnings("ConstantConditions")
public void emitResult(Tuple2<byte[], Integer> resultTuple) throws Exception {
byte[] udfResult = resultTuple.f0;
int length = resultTuple.f1;
bais.setBuffer(udfResult, 0, length);
int rowCount = arrowSerializer.load();
for (int i = 0; i < rowCount; i++) {
RowData input = forwardedInputQueue.poll();
reuseJoinedRow.setRowKind(input.getRowKind());
rowDataWrapper.collect(reuseJoinedRow.replace(input, arrowSerializer.read(i)));
}
arrowSerializer.resetReader();
}
Aggregations