use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class BatchArrowPythonGroupWindowAggregateFunctionOperator method open.
@Override
public void open() throws Exception {
super.open();
inputKeyAndWindow = new LinkedList<>();
windowProperty = new GenericRowData(namedProperties.length);
windowAggResult = new JoinedRowData();
windowsGrouping = new HeapWindowsGrouping(maxLimitSize, windowSize, slideSize, inputTimeFieldIndex, false);
forwardedInputSerializer = new RowDataSerializer(inputType);
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class BatchArrowPythonOverWindowAggregateFunctionOperator method open.
@Override
public void open() throws Exception {
super.open();
forwardedInputSerializer = new RowDataSerializer(inputType);
this.lastKeyDataStartPos = 0;
windowBoundaryWithDataBaos = new ByteArrayOutputStreamWithPos();
windowBoundaryWithDataWrapper = new DataOutputViewStreamWrapper(windowBoundaryWithDataBaos);
boundedRangeWindowBoundaries = new ArrayList<>(lowerBoundary.length);
boundedRangeWindowIndex = new ArrayList<>();
for (int i = 0; i < lowerBoundary.length; i++) {
// range window with bounded preceding or bounded following
if (isRangeWindows[i] && (lowerBoundary[i] != Long.MIN_VALUE || upperBoundary[i] != Long.MAX_VALUE)) {
boundedRangeWindowIndex.add(i);
boundedRangeWindowBoundaries.add(new ArrayList<>());
}
}
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class ArrowSourceFunctionTest method init.
@BeforeClass
public static void init() {
fieldTypes.add(new VarCharType());
List<RowType.RowField> rowFields = new ArrayList<>();
for (int i = 0; i < fieldTypes.size(); i++) {
rowFields.add(new RowType.RowField("f" + i, fieldTypes.get(i)));
}
rowType = new RowType(rowFields);
dataType = TypeConversions.fromLogicalToDataType(rowType);
serializer = new RowDataSerializer(fieldTypes.toArray(new LogicalType[0]));
allocator = ArrowUtils.getRootAllocator().newChildAllocator("stdout", 0, Long.MAX_VALUE);
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class AbstractPythonStreamAggregateOperatorTest method getTestHarness.
protected OneInputStreamOperatorTestHarness getTestHarness(Configuration config) throws Exception {
RowType outputType = getOutputType();
OneInputStreamOperator operator = getTestOperator(config);
KeyedOneInputStreamOperatorTestHarness testHarness = new KeyedOneInputStreamOperatorTestHarness(operator, KeySelectorUtil.getRowDataSelector(getGrouping(), InternalTypeInfo.of(getInputType())), InternalTypeInfo.of(getKeyType()), 1, 1, 0);
testHarness.getStreamConfig().setManagedMemoryFractionOperatorOfUseCase(ManagedMemoryUseCase.PYTHON, 0.5);
testHarness.setup(new RowDataSerializer(outputType));
return testHarness;
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class BinaryRowDataTest method testNestedRowWithBinaryRowEquals.
@Test
public void testNestedRowWithBinaryRowEquals() {
BinaryRowData nestedBinaryRow = new BinaryRowData(2);
{
BinaryRowWriter writer = new BinaryRowWriter(nestedBinaryRow);
writer.writeInt(0, 42);
LogicalType innerType = DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.STRING()), DataTypes.FIELD("f1", DataTypes.DOUBLE())).getLogicalType();
RowDataSerializer innerSerializer = (RowDataSerializer) (TypeSerializer<?>) InternalSerializers.create(innerType);
writer.writeRow(1, GenericRowData.of(StringData.fromString("Test"), 12.345), innerSerializer);
writer.complete();
}
BinaryRowData innerBinaryRow = new BinaryRowData(2);
{
BinaryRowWriter writer = new BinaryRowWriter(innerBinaryRow);
writer.writeString(0, StringData.fromString("Test"));
writer.writeDouble(1, 12.345);
writer.complete();
}
assertEquals(innerBinaryRow, nestedBinaryRow.getRow(1, 2));
assertEquals(nestedBinaryRow.getRow(1, 2), innerBinaryRow);
}
Aggregations