Search in sources :

Example 76 with GenericRowData

use of org.apache.flink.table.data.GenericRowData in project flink by apache.

the class InputConversionOperator method processElement.

@Override
public void processElement(StreamRecord<E> element) throws Exception {
    final E externalRecord = element.getValue();
    final Object internalRecord;
    try {
        internalRecord = converter.toInternal(externalRecord);
    } catch (Exception e) {
        throw new FlinkRuntimeException(String.format("Error during input conversion from external DataStream API to " + "internal Table API data structures. Make sure that the " + "provided data types that configure the converters are " + "correctly declared in the schema. Affected record:\n%s", externalRecord), e);
    }
    final RowData payloadRowData;
    if (requiresWrapping) {
        final GenericRowData wrapped = new GenericRowData(RowKind.INSERT, 1);
        wrapped.setField(0, internalRecord);
        payloadRowData = wrapped;
    } else {
        // top-level records must not be null and will be skipped
        if (internalRecord == null) {
            return;
        }
        payloadRowData = (RowData) internalRecord;
    }
    final RowKind kind = payloadRowData.getRowKind();
    if (isInsertOnly && kind != RowKind.INSERT) {
        throw new FlinkRuntimeException(String.format("Error during input conversion. Conversion expects insert-only " + "records but DataStream API record contains: %s", kind));
    }
    if (!produceRowtimeMetadata) {
        output.collect(outRecord.replace(payloadRowData));
        return;
    }
    if (!element.hasTimestamp()) {
        throw new FlinkRuntimeException("Could not find timestamp in DataStream API record. " + "Make sure that timestamps have been assigned before and " + "the event-time characteristic is enabled.");
    }
    final GenericRowData rowtimeRowData = new GenericRowData(1);
    rowtimeRowData.setField(0, TimestampData.fromEpochMillis(element.getTimestamp()));
    final JoinedRowData joinedRowData = new JoinedRowData(kind, payloadRowData, rowtimeRowData);
    output.collect(outRecord.replace(joinedRowData));
}
Also used : RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) RowKind(org.apache.flink.types.RowKind) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException) GenericRowData(org.apache.flink.table.data.GenericRowData) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException)

Example 77 with GenericRowData

use of org.apache.flink.table.data.GenericRowData in project flink by apache.

the class WindowTableFunctionOperator method open.

@Override
public void open() throws Exception {
    super.open();
    this.collector = new TimestampedCollector<>(output);
    collector.eraseTimestamp();
    outRow = new JoinedRowData();
    windowProperties = new GenericRowData(3);
}
Also used : JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) GenericRowData(org.apache.flink.table.data.GenericRowData)

Example 78 with GenericRowData

use of org.apache.flink.table.data.GenericRowData in project flink by apache.

the class SumHashAggTestOperator method endInput.

public void endInput() throws Exception {
    StreamRecord<RowData> outElement = new StreamRecord<>(null);
    JoinedRowData hashAggOutput = new JoinedRowData();
    GenericRowData aggValueOutput = new GenericRowData(1);
    if (sorter == null) {
        // no spilling, output by iterating aggregate map.
        KeyValueIterator<BinaryRowData, BinaryRowData> iter = aggregateMap.getEntryIterator(false);
        while (iter.advanceNext()) {
            // set result and output
            aggValueOutput.setField(0, iter.getValue().isNullAt(0) ? null : iter.getValue().getLong(0));
            hashAggOutput.replace(iter.getKey(), aggValueOutput);
            getOutput().collect(outElement.replace(hashAggOutput));
        }
    } else {
        // spill last part of input' aggregation output buffer
        sorter.sortAndSpill(aggregateMap.getRecordAreaMemorySegments(), aggregateMap.getNumElements(), new BytesHashMapSpillMemorySegmentPool(aggregateMap.getBucketAreaMemorySegments()));
        // only release non-data memory in advance.
        aggregateMap.free(true);
        // fall back to sort based aggregation
        BinaryRowData lastKey = null;
        JoinedRowData fallbackInput = new JoinedRowData();
        boolean aggSumIsNull = false;
        long aggSum = -1;
        // free hash map memory, but not release back to memory manager
        MutableObjectIterator<Tuple2<BinaryRowData, BinaryRowData>> iterator = sorter.getKVIterator();
        Tuple2<BinaryRowData, BinaryRowData> kv;
        while ((kv = iterator.next()) != null) {
            BinaryRowData key = kv.f0;
            BinaryRowData value = kv.f1;
            // prepare input
            fallbackInput.replace(key, value);
            if (lastKey == null) {
                // found first key group
                lastKey = key.copy();
                aggSumIsNull = true;
                aggSum = -1L;
            } else if (key.getSizeInBytes() != lastKey.getSizeInBytes() || !(BinaryRowDataUtil.byteArrayEquals(key.getSegments()[0].getArray(), lastKey.getSegments()[0].getArray(), key.getSizeInBytes()))) {
                // output current group aggregate result
                aggValueOutput.setField(0, aggSumIsNull ? null : aggSum);
                hashAggOutput.replace(lastKey, aggValueOutput);
                getOutput().collect(outElement.replace(hashAggOutput));
                // found new group
                lastKey = key.copy();
                aggSumIsNull = true;
                aggSum = -1L;
            }
            if (!fallbackInput.isNullAt(1)) {
                long sumInput = fallbackInput.getLong(1);
                if (aggSumIsNull) {
                    aggSum = sumInput;
                } else {
                    aggSum = aggSum + sumInput;
                }
                aggSumIsNull = false;
            }
        }
        // output last key group aggregate result
        aggValueOutput.setField(0, aggSumIsNull ? null : aggSum);
        hashAggOutput.replace(lastKey, aggValueOutput);
        getOutput().collect(outElement.replace(hashAggOutput));
    }
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) GenericRowData(org.apache.flink.table.data.GenericRowData)

Example 79 with GenericRowData

use of org.apache.flink.table.data.GenericRowData in project flink by apache.

the class RowRowConverter method toInternal.

@Override
public RowData toInternal(Row external) {
    final int length = fieldConverters.length;
    final GenericRowData genericRow = new GenericRowData(external.getKind(), length);
    final Set<String> fieldNames = external.getFieldNames(false);
    // position-based field access
    if (fieldNames == null) {
        for (int pos = 0; pos < length; pos++) {
            final Object value = external.getField(pos);
            genericRow.setField(pos, fieldConverters[pos].toInternalOrNull(value));
        }
    } else // name-based field access
    {
        for (String fieldName : fieldNames) {
            final Integer targetPos = positionByName.get(fieldName);
            if (targetPos == null) {
                throw new IllegalArgumentException(String.format("Unknown field name '%s' for mapping to a row position. " + "Available names are: %s", fieldName, positionByName.keySet()));
            }
            final Object value = external.getField(fieldName);
            genericRow.setField(targetPos, fieldConverters[targetPos].toInternalOrNull(value));
        }
    }
    return genericRow;
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData)

Example 80 with GenericRowData

use of org.apache.flink.table.data.GenericRowData in project flink by apache.

the class RowDataSerializer method copyRowData.

@SuppressWarnings("unchecked")
private RowData copyRowData(RowData from, RowData reuse) {
    GenericRowData ret;
    if (reuse instanceof GenericRowData) {
        ret = (GenericRowData) reuse;
    } else {
        ret = new GenericRowData(from.getArity());
    }
    ret.setRowKind(from.getRowKind());
    for (int i = 0; i < from.getArity(); i++) {
        if (!from.isNullAt(i)) {
            ret.setField(i, fieldSerializers[i].copy((fieldGetters[i].getFieldOrNull(from))));
        } else {
            ret.setField(i, null);
        }
    }
    return ret;
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData)

Aggregations

GenericRowData (org.apache.flink.table.data.GenericRowData)94 RowData (org.apache.flink.table.data.RowData)32 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)16 Test (org.junit.Test)14 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)13 RowType (org.apache.flink.table.types.logical.RowType)13 IOException (java.io.IOException)11 ArrayList (java.util.ArrayList)11 IntType (org.apache.flink.table.types.logical.IntType)11 List (java.util.List)9 LogicalType (org.apache.flink.table.types.logical.LogicalType)9 GenericArrayData (org.apache.flink.table.data.GenericArrayData)6 StringData (org.apache.flink.table.data.StringData)6 Arrays (java.util.Arrays)5 HashMap (java.util.HashMap)5 OutputStream (java.io.OutputStream)4 PrintStream (java.io.PrintStream)4 Collections (java.util.Collections)4 Random (java.util.Random)4 Consumer (java.util.function.Consumer)4