use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class PythonTableFunctionOperator method open.
@Override
@SuppressWarnings("unchecked")
public void open() throws Exception {
super.open();
rowDataWrapper = new StreamRecordRowDataWrappingCollector(output);
reuseJoinedRow = new JoinedRowData();
udtfInputProjection = udtfInputGeneratedProjection.newInstance(Thread.currentThread().getContextClassLoader());
forwardedInputSerializer = new RowDataSerializer(inputType);
udtfInputTypeSerializer = PythonTypeUtils.toInternalSerializer(udfInputType);
udtfOutputTypeSerializer = PythonTypeUtils.toInternalSerializer(udfOutputType);
input = null;
hasJoined = false;
isFinishResult = true;
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class ArrowReaderWriterTest method getTestData.
@Override
public RowData[] getTestData() {
RowData row1 = StreamRecordUtils.row((byte) 1, (short) 2, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), GenericRowData.of(1, StringData.fromString("hello"), new GenericArrayData(new StringData[] { StringData.fromString("hello") }), TimestampData.fromEpochMillis(3600000), GenericRowData.of(1, StringData.fromString("hello"))));
BinaryRowData row2 = StreamRecordUtils.binaryrow((byte) 1, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new String[] { null, null, null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, GenericRowData.of(1, StringData.fromString("hello"))), new RowDataSerializer(rowFieldType)));
RowData row3 = StreamRecordUtils.row(null, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new String[] { null, null, null }), GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null));
BinaryRowData row4 = StreamRecordUtils.binaryrow((byte) 1, null, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null), new RowDataSerializer(rowFieldType)));
RowData row5 = StreamRecordUtils.row(new Object[fieldTypes.size()]);
BinaryRowData row6 = StreamRecordUtils.binaryrow(new Object[fieldTypes.size()]);
return new RowData[] { row1, row2, row3, row4, row5, row6 };
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class AbstractBatchArrowPythonAggregateFunctionOperatorTest method getTestHarness.
public OneInputStreamOperatorTestHarness<RowData, RowData> getTestHarness(Configuration config) throws Exception {
RowType inputType = getInputType();
RowType outputType = getOutputType();
AbstractArrowPythonAggregateFunctionOperator operator = getTestOperator(config, new PythonFunctionInfo[] { new PythonFunctionInfo(PythonScalarFunctionOperatorTestBase.DummyPythonFunction.INSTANCE, new Integer[] { 0 }) }, inputType, outputType, new int[] { 0 }, new int[] { 2 });
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = new OneInputStreamOperatorTestHarness<>(operator);
testHarness.getStreamConfig().setManagedMemoryFractionOperatorOfUseCase(ManagedMemoryUseCase.PYTHON, 0.5);
testHarness.setup(new RowDataSerializer(outputType));
return testHarness;
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class AbstractStreamArrowPythonAggregateFunctionOperatorTest method getTestHarness.
public OneInputStreamOperatorTestHarness<RowData, RowData> getTestHarness(Configuration config) throws Exception {
RowType inputType = getInputType();
RowType outputType = getOutputType();
AbstractArrowPythonAggregateFunctionOperator operator = getTestOperator(config, new PythonFunctionInfo[] { new PythonFunctionInfo(PythonScalarFunctionOperatorTestBase.DummyPythonFunction.INSTANCE, new Integer[] { 0 }) }, inputType, outputType, new int[] { 0 }, new int[] { 2 });
int[] grouping = new int[] { 0 };
RowDataKeySelector keySelector = KeySelectorUtil.getRowDataSelector(grouping, InternalTypeInfo.of(getInputType()));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, keySelector.getProducedType());
testHarness.getStreamConfig().setManagedMemoryFractionOperatorOfUseCase(ManagedMemoryUseCase.PYTHON, 0.5);
testHarness.setup(new RowDataSerializer(outputType));
return testHarness;
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class BaseMaterializedResultTest method createInternalBinaryRowDataConverter.
static Function<Row, BinaryRowData> createInternalBinaryRowDataConverter(DataType dataType) {
DataStructureConverter<Object, Object> converter = DataStructureConverters.getConverter(dataType);
RowDataSerializer serializer = new RowDataSerializer((RowType) dataType.getLogicalType());
return row -> serializer.toBinaryRow((RowData) converter.toInternalOrNull(row)).copy();
}
Aggregations