Search in sources :

Example 16 with RowDataSerializer

use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.

the class BatchArrowPythonGroupWindowAggregateFunctionOperator method open.

@Override
public void open() throws Exception {
    super.open();
    inputKeyAndWindow = new LinkedList<>();
    windowProperty = new GenericRowData(namedProperties.length);
    windowAggResult = new JoinedRowData();
    windowsGrouping = new HeapWindowsGrouping(maxLimitSize, windowSize, slideSize, inputTimeFieldIndex, false);
    forwardedInputSerializer = new RowDataSerializer(inputType);
}
Also used : JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) GenericRowData(org.apache.flink.table.data.GenericRowData) HeapWindowsGrouping(org.apache.flink.table.runtime.operators.window.grouping.HeapWindowsGrouping) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer)

Example 17 with RowDataSerializer

use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.

the class BatchArrowPythonOverWindowAggregateFunctionOperator method open.

@Override
public void open() throws Exception {
    super.open();
    forwardedInputSerializer = new RowDataSerializer(inputType);
    this.lastKeyDataStartPos = 0;
    windowBoundaryWithDataBaos = new ByteArrayOutputStreamWithPos();
    windowBoundaryWithDataWrapper = new DataOutputViewStreamWrapper(windowBoundaryWithDataBaos);
    boundedRangeWindowBoundaries = new ArrayList<>(lowerBoundary.length);
    boundedRangeWindowIndex = new ArrayList<>();
    for (int i = 0; i < lowerBoundary.length; i++) {
        // range window with bounded preceding or bounded following
        if (isRangeWindows[i] && (lowerBoundary[i] != Long.MIN_VALUE || upperBoundary[i] != Long.MAX_VALUE)) {
            boundedRangeWindowIndex.add(i);
            boundedRangeWindowBoundaries.add(new ArrayList<>());
        }
    }
}
Also used : DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) ByteArrayOutputStreamWithPos(org.apache.flink.core.memory.ByteArrayOutputStreamWithPos) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer)

Example 18 with RowDataSerializer

use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.

the class ArrowSourceFunctionTest method init.

@BeforeClass
public static void init() {
    fieldTypes.add(new VarCharType());
    List<RowType.RowField> rowFields = new ArrayList<>();
    for (int i = 0; i < fieldTypes.size(); i++) {
        rowFields.add(new RowType.RowField("f" + i, fieldTypes.get(i)));
    }
    rowType = new RowType(rowFields);
    dataType = TypeConversions.fromLogicalToDataType(rowType);
    serializer = new RowDataSerializer(fieldTypes.toArray(new LogicalType[0]));
    allocator = ArrowUtils.getRootAllocator().newChildAllocator("stdout", 0, Long.MAX_VALUE);
}
Also used : ArrayList(java.util.ArrayList) RowType(org.apache.flink.table.types.logical.RowType) VarCharType(org.apache.flink.table.types.logical.VarCharType) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer) BeforeClass(org.junit.BeforeClass)

Example 19 with RowDataSerializer

use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.

the class AbstractPythonStreamAggregateOperatorTest method getTestHarness.

protected OneInputStreamOperatorTestHarness getTestHarness(Configuration config) throws Exception {
    RowType outputType = getOutputType();
    OneInputStreamOperator operator = getTestOperator(config);
    KeyedOneInputStreamOperatorTestHarness testHarness = new KeyedOneInputStreamOperatorTestHarness(operator, KeySelectorUtil.getRowDataSelector(getGrouping(), InternalTypeInfo.of(getInputType())), InternalTypeInfo.of(getKeyType()), 1, 1, 0);
    testHarness.getStreamConfig().setManagedMemoryFractionOperatorOfUseCase(ManagedMemoryUseCase.PYTHON, 0.5);
    testHarness.setup(new RowDataSerializer(outputType));
    return testHarness;
}
Also used : OneInputStreamOperator(org.apache.flink.streaming.api.operators.OneInputStreamOperator) RowType(org.apache.flink.table.types.logical.RowType) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer)

Example 20 with RowDataSerializer

use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.

the class BinaryRowDataTest method testNestedRowWithBinaryRowEquals.

@Test
public void testNestedRowWithBinaryRowEquals() {
    BinaryRowData nestedBinaryRow = new BinaryRowData(2);
    {
        BinaryRowWriter writer = new BinaryRowWriter(nestedBinaryRow);
        writer.writeInt(0, 42);
        LogicalType innerType = DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.STRING()), DataTypes.FIELD("f1", DataTypes.DOUBLE())).getLogicalType();
        RowDataSerializer innerSerializer = (RowDataSerializer) (TypeSerializer<?>) InternalSerializers.create(innerType);
        writer.writeRow(1, GenericRowData.of(StringData.fromString("Test"), 12.345), innerSerializer);
        writer.complete();
    }
    BinaryRowData innerBinaryRow = new BinaryRowData(2);
    {
        BinaryRowWriter writer = new BinaryRowWriter(innerBinaryRow);
        writer.writeString(0, StringData.fromString("Test"));
        writer.writeDouble(1, 12.345);
        writer.complete();
    }
    assertEquals(innerBinaryRow, nestedBinaryRow.getRow(1, 2));
    assertEquals(nestedBinaryRow.getRow(1, 2), innerBinaryRow);
}
Also used : BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer) BinaryRowDataSerializer(org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer) Test(org.junit.Test)

Aggregations

RowDataSerializer (org.apache.flink.table.runtime.typeutils.RowDataSerializer)27 RowData (org.apache.flink.table.data.RowData)12 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)11 RowType (org.apache.flink.table.types.logical.RowType)11 BinaryRowWriter (org.apache.flink.table.data.writer.BinaryRowWriter)6 RowDataKeySelector (org.apache.flink.table.runtime.keyselector.RowDataKeySelector)6 ZoneId (java.time.ZoneId)5 Transformation (org.apache.flink.api.dag.Transformation)5 GenericRowData (org.apache.flink.table.data.GenericRowData)5 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)5 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)4 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)3 AggregateInfoList (org.apache.flink.table.planner.plan.utils.AggregateInfoList)3 PerKeyStateDataViewStore (org.apache.flink.table.runtime.dataview.PerKeyStateDataViewStore)3 IntType (org.apache.flink.table.types.logical.IntType)3 LogicalType (org.apache.flink.table.types.logical.LogicalType)3 Test (org.junit.Test)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)2