use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class InputConversionOperator method processElement.
@Override
public void processElement(StreamRecord<E> element) throws Exception {
final E externalRecord = element.getValue();
final Object internalRecord;
try {
internalRecord = converter.toInternal(externalRecord);
} catch (Exception e) {
throw new FlinkRuntimeException(String.format("Error during input conversion from external DataStream API to " + "internal Table API data structures. Make sure that the " + "provided data types that configure the converters are " + "correctly declared in the schema. Affected record:\n%s", externalRecord), e);
}
final RowData payloadRowData;
if (requiresWrapping) {
final GenericRowData wrapped = new GenericRowData(RowKind.INSERT, 1);
wrapped.setField(0, internalRecord);
payloadRowData = wrapped;
} else {
// top-level records must not be null and will be skipped
if (internalRecord == null) {
return;
}
payloadRowData = (RowData) internalRecord;
}
final RowKind kind = payloadRowData.getRowKind();
if (isInsertOnly && kind != RowKind.INSERT) {
throw new FlinkRuntimeException(String.format("Error during input conversion. Conversion expects insert-only " + "records but DataStream API record contains: %s", kind));
}
if (!produceRowtimeMetadata) {
output.collect(outRecord.replace(payloadRowData));
return;
}
if (!element.hasTimestamp()) {
throw new FlinkRuntimeException("Could not find timestamp in DataStream API record. " + "Make sure that timestamps have been assigned before and " + "the event-time characteristic is enabled.");
}
final GenericRowData rowtimeRowData = new GenericRowData(1);
rowtimeRowData.setField(0, TimestampData.fromEpochMillis(element.getTimestamp()));
final JoinedRowData joinedRowData = new JoinedRowData(kind, payloadRowData, rowtimeRowData);
output.collect(outRecord.replace(joinedRowData));
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class WindowTableFunctionOperator method open.
@Override
public void open() throws Exception {
super.open();
this.collector = new TimestampedCollector<>(output);
collector.eraseTimestamp();
outRow = new JoinedRowData();
windowProperties = new GenericRowData(3);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class SumHashAggTestOperator method endInput.
public void endInput() throws Exception {
StreamRecord<RowData> outElement = new StreamRecord<>(null);
JoinedRowData hashAggOutput = new JoinedRowData();
GenericRowData aggValueOutput = new GenericRowData(1);
if (sorter == null) {
// no spilling, output by iterating aggregate map.
KeyValueIterator<BinaryRowData, BinaryRowData> iter = aggregateMap.getEntryIterator(false);
while (iter.advanceNext()) {
// set result and output
aggValueOutput.setField(0, iter.getValue().isNullAt(0) ? null : iter.getValue().getLong(0));
hashAggOutput.replace(iter.getKey(), aggValueOutput);
getOutput().collect(outElement.replace(hashAggOutput));
}
} else {
// spill last part of input' aggregation output buffer
sorter.sortAndSpill(aggregateMap.getRecordAreaMemorySegments(), aggregateMap.getNumElements(), new BytesHashMapSpillMemorySegmentPool(aggregateMap.getBucketAreaMemorySegments()));
// only release non-data memory in advance.
aggregateMap.free(true);
// fall back to sort based aggregation
BinaryRowData lastKey = null;
JoinedRowData fallbackInput = new JoinedRowData();
boolean aggSumIsNull = false;
long aggSum = -1;
// free hash map memory, but not release back to memory manager
MutableObjectIterator<Tuple2<BinaryRowData, BinaryRowData>> iterator = sorter.getKVIterator();
Tuple2<BinaryRowData, BinaryRowData> kv;
while ((kv = iterator.next()) != null) {
BinaryRowData key = kv.f0;
BinaryRowData value = kv.f1;
// prepare input
fallbackInput.replace(key, value);
if (lastKey == null) {
// found first key group
lastKey = key.copy();
aggSumIsNull = true;
aggSum = -1L;
} else if (key.getSizeInBytes() != lastKey.getSizeInBytes() || !(BinaryRowDataUtil.byteArrayEquals(key.getSegments()[0].getArray(), lastKey.getSegments()[0].getArray(), key.getSizeInBytes()))) {
// output current group aggregate result
aggValueOutput.setField(0, aggSumIsNull ? null : aggSum);
hashAggOutput.replace(lastKey, aggValueOutput);
getOutput().collect(outElement.replace(hashAggOutput));
// found new group
lastKey = key.copy();
aggSumIsNull = true;
aggSum = -1L;
}
if (!fallbackInput.isNullAt(1)) {
long sumInput = fallbackInput.getLong(1);
if (aggSumIsNull) {
aggSum = sumInput;
} else {
aggSum = aggSum + sumInput;
}
aggSumIsNull = false;
}
}
// output last key group aggregate result
aggValueOutput.setField(0, aggSumIsNull ? null : aggSum);
hashAggOutput.replace(lastKey, aggValueOutput);
getOutput().collect(outElement.replace(hashAggOutput));
}
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class RowRowConverter method toInternal.
@Override
public RowData toInternal(Row external) {
final int length = fieldConverters.length;
final GenericRowData genericRow = new GenericRowData(external.getKind(), length);
final Set<String> fieldNames = external.getFieldNames(false);
// position-based field access
if (fieldNames == null) {
for (int pos = 0; pos < length; pos++) {
final Object value = external.getField(pos);
genericRow.setField(pos, fieldConverters[pos].toInternalOrNull(value));
}
} else // name-based field access
{
for (String fieldName : fieldNames) {
final Integer targetPos = positionByName.get(fieldName);
if (targetPos == null) {
throw new IllegalArgumentException(String.format("Unknown field name '%s' for mapping to a row position. " + "Available names are: %s", fieldName, positionByName.keySet()));
}
final Object value = external.getField(fieldName);
genericRow.setField(targetPos, fieldConverters[targetPos].toInternalOrNull(value));
}
}
return genericRow;
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class RowDataSerializer method copyRowData.
@SuppressWarnings("unchecked")
private RowData copyRowData(RowData from, RowData reuse) {
GenericRowData ret;
if (reuse instanceof GenericRowData) {
ret = (GenericRowData) reuse;
} else {
ret = new GenericRowData(from.getArity());
}
ret.setRowKind(from.getRowKind());
for (int i = 0; i < from.getArity(); i++) {
if (!from.isNullAt(i)) {
ret.setField(i, fieldSerializers[i].copy((fieldGetters[i].getFieldOrNull(from))));
} else {
ret.setField(i, null);
}
}
return ret;
}
Aggregations