use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class StreamRecordUtils method binaryrow.
/**
* Receives a object array, generates a BinaryRowData based on the array.
*
* @param fields input object array
* @return generated BinaryRowData.
*/
public static BinaryRowData binaryrow(Object... fields) {
BinaryRowData row = new BinaryRowData(fields.length);
BinaryRowWriter writer = new BinaryRowWriter(row);
for (int j = 0; j < fields.length; j++) {
Object value = fields[j];
if (value == null) {
writer.setNullAt(j);
} else if (value instanceof Byte) {
writer.writeByte(j, (Byte) value);
} else if (value instanceof Short) {
writer.writeShort(j, (Short) value);
} else if (value instanceof Integer) {
writer.writeInt(j, (Integer) value);
} else if (value instanceof String) {
writer.writeString(j, StringData.fromString((String) value));
} else if (value instanceof Double) {
writer.writeDouble(j, (Double) value);
} else if (value instanceof Float) {
writer.writeFloat(j, (Float) value);
} else if (value instanceof Long) {
writer.writeLong(j, (Long) value);
} else if (value instanceof Boolean) {
writer.writeBoolean(j, (Boolean) value);
} else if (value instanceof byte[]) {
writer.writeBinary(j, (byte[]) value);
} else if (value instanceof DecimalData) {
DecimalData decimal = (DecimalData) value;
writer.writeDecimal(j, decimal, decimal.precision());
} else if (value instanceof TimestampData) {
TimestampData timestamp = (TimestampData) value;
writer.writeTimestamp(j, timestamp, 3);
} else if (value instanceof Tuple2 && ((Tuple2) value).f0 instanceof TimestampData) {
TimestampData timestamp = (TimestampData) ((Tuple2) value).f0;
writer.writeTimestamp(j, timestamp, (int) ((Tuple2) value).f1);
} else if (value instanceof Tuple2 && ((Tuple2) value).f0 instanceof ArrayData) {
ArrayData array = (ArrayData) ((Tuple2) value).f0;
ArrayDataSerializer serializer = (ArrayDataSerializer) ((Tuple2) value).f1;
writer.writeArray(j, array, serializer);
} else if (value instanceof Tuple2 && ((Tuple2) value).f0 instanceof RowData) {
RowData rowData = ((RowData) ((Tuple2) value).f0);
RowDataSerializer serializer = (RowDataSerializer) ((Tuple2) value).f1;
writer.writeRow(j, rowData, serializer);
} else {
throw new RuntimeException("Not support yet!");
}
}
writer.complete();
return row;
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class TableAssertionTest method testAssertRowDataWithConversion.
@Test
void testAssertRowDataWithConversion() {
DataType dataType = ROW(FIELD("a", INT()), FIELD("b", STRING()), FIELD("c", ARRAY(BOOLEAN().notNull())));
GenericRowData genericRowData = GenericRowData.of(10, StringData.fromString("my string"), new GenericArrayData(new boolean[] { true, false }));
BinaryRowData binaryRowData = new RowDataSerializer((RowType) dataType.getLogicalType()).toBinaryRow(genericRowData);
Row row = Row.of(10, "my string", new Boolean[] { true, false });
// Test equality with RowData
assertThat(binaryRowData).asGeneric(dataType).isEqualTo(genericRowData).isEqualTo(binaryRowData.copy());
assertThatRows(binaryRowData).asGeneric(dataType).containsOnly(genericRowData).containsOnly(binaryRowData);
// Test equality with Row
assertThat(binaryRowData).asRow(dataType).isEqualTo(row);
assertThatRows(binaryRowData).asRows(dataType).containsOnly(row);
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class StreamExecLocalWindowAggregate method translateToPlanInternal.
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
final RowType inputRowType = (RowType) inputEdge.getOutputType();
final ZoneId shiftTimeZone = TimeWindowUtil.getShiftTimeZone(windowing.getTimeAttributeType(), config.getLocalTimeZone());
final SliceAssigner sliceAssigner = createSliceAssigner(windowing, shiftTimeZone);
final AggregateInfoList aggInfoList = AggregateUtil.deriveStreamWindowAggregateInfoList(inputRowType, JavaScalaConversionUtil.toScala(Arrays.asList(aggCalls)), windowing.getWindow(), // isStateBackendDataViews
false);
final GeneratedNamespaceAggsHandleFunction<Long> generatedAggsHandler = createAggsHandler(sliceAssigner, aggInfoList, config, planner.getRelBuilder(), inputRowType.getChildren(), shiftTimeZone);
final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(grouping, InternalTypeInfo.of(inputRowType));
PagedTypeSerializer<RowData> keySer = (PagedTypeSerializer<RowData>) selector.getProducedType().toSerializer();
AbstractRowDataSerializer<RowData> valueSer = new RowDataSerializer(inputRowType);
WindowBuffer.LocalFactory bufferFactory = new RecordsWindowBuffer.LocalFactory(keySer, valueSer, new LocalAggCombiner.Factory(generatedAggsHandler));
final OneInputStreamOperator<RowData, RowData> localAggOperator = new LocalSlicingWindowAggOperator(selector, sliceAssigner, bufferFactory, shiftTimeZone);
return ExecNodeUtil.createOneInputTransformation(inputTransform, createTransformationMeta(LOCAL_WINDOW_AGGREGATE_TRANSFORMATION, config), SimpleOperatorFactory.of(localAggOperator), InternalTypeInfo.of(getOutputType()), inputTransform.getParallelism(), // use less memory here to let the chained head operator can have more memory
WINDOW_AGG_MEMORY_RATIO / 2);
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class StreamExecWindowDeduplicate method translateToPlanInternal.
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
// validate window strategy
if (!windowing.isRowtime()) {
throw new TableException("Processing time Window Deduplication is not supported yet.");
}
int windowEndIndex;
if (windowing instanceof WindowAttachedWindowingStrategy) {
windowEndIndex = ((WindowAttachedWindowingStrategy) windowing).getWindowEnd();
} else {
throw new UnsupportedOperationException(windowing.getClass().getName() + " is not supported yet.");
}
ExecEdge inputEdge = getInputEdges().get(0);
Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
ZoneId shiftTimeZone = TimeWindowUtil.getShiftTimeZone(windowing.getTimeAttributeType(), config.getLocalTimeZone());
RowType inputType = (RowType) inputEdge.getOutputType();
RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(partitionKeys, InternalTypeInfo.of(inputType));
OneInputStreamOperator<RowData, RowData> operator = RowTimeWindowDeduplicateOperatorBuilder.builder().inputSerializer(new RowDataSerializer(inputType)).shiftTimeZone(shiftTimeZone).keySerializer((PagedTypeSerializer<RowData>) selector.getProducedType().toSerializer()).keepLastRow(keepLastRow).rowtimeIndex(orderKey).windowEndIndex(windowEndIndex).build();
OneInputTransformation<RowData, RowData> transform = ExecNodeUtil.createOneInputTransformation(inputTransform, createTransformationMeta(WINDOW_DEDUPLICATE_TRANSFORMATION, config), SimpleOperatorFactory.of(operator), InternalTypeInfo.of(getOutputType()), inputTransform.getParallelism(), WINDOW_RANK_MEMORY_RATIO);
// set KeyType and Selector for state
transform.setStateKeySelector(selector);
transform.setStateKeyType(selector.getProducedType());
return transform;
}
use of org.apache.flink.table.runtime.typeutils.RowDataSerializer in project flink by apache.
the class StreamExecWindowRank method translateToPlanInternal.
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
// validate rank type
switch(rankType) {
case ROW_NUMBER:
break;
case RANK:
throw new TableException("RANK() function is not supported on Window TopN currently, only ROW_NUMBER() is supported.");
case DENSE_RANK:
throw new TableException("DENSE_RANK() function is not supported on Window TopN currently, only ROW_NUMBER() is supported.");
default:
throw new TableException(String.format("%s() function is not supported on Window TopN currently, only ROW_NUMBER() is supported.", rankType));
}
// validate window strategy
if (!windowing.isRowtime()) {
throw new TableException("Processing time Window TopN is not supported yet.");
}
int windowEndIndex;
if (windowing instanceof WindowAttachedWindowingStrategy) {
windowEndIndex = ((WindowAttachedWindowingStrategy) windowing).getWindowEnd();
} else {
throw new UnsupportedOperationException(windowing.getClass().getName() + " is not supported yet.");
}
ExecEdge inputEdge = getInputEdges().get(0);
RowType inputType = (RowType) inputEdge.getOutputType();
// validate rank range
ConstantRankRange constantRankRange;
if (rankRange instanceof ConstantRankRange) {
constantRankRange = (ConstantRankRange) rankRange;
} else {
throw new TableException(String.format("Rank strategy %s is not supported on window rank currently.", rankRange.toString(inputType.getFieldNames())));
}
Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
InternalTypeInfo<RowData> inputRowTypeInfo = InternalTypeInfo.of(inputType);
int[] sortFields = sortSpec.getFieldIndices();
RowDataKeySelector sortKeySelector = KeySelectorUtil.getRowDataSelector(sortFields, inputRowTypeInfo);
SortSpec.SortSpecBuilder builder = SortSpec.builder();
IntStream.range(0, sortFields.length).forEach(idx -> builder.addField(idx, sortSpec.getFieldSpec(idx).getIsAscendingOrder(), sortSpec.getFieldSpec(idx).getNullIsLast()));
SortSpec sortSpecInSortKey = builder.build();
ZoneId shiftTimeZone = TimeWindowUtil.getShiftTimeZone(windowing.getTimeAttributeType(), config.getLocalTimeZone());
GeneratedRecordComparator sortKeyComparator = ComparatorCodeGenerator.gen(config.getTableConfig(), "StreamExecSortComparator", RowType.of(sortSpec.getFieldTypes(inputType)), sortSpecInSortKey);
RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(partitionSpec.getFieldIndices(), inputRowTypeInfo);
OneInputStreamOperator<RowData, RowData> operator = WindowRankOperatorBuilder.builder().inputSerializer(new RowDataSerializer(inputType)).shiftTimeZone(shiftTimeZone).keySerializer((PagedTypeSerializer<RowData>) selector.getProducedType().toSerializer()).sortKeySelector(sortKeySelector).sortKeyComparator(sortKeyComparator).outputRankNumber(outputRankNumber).rankStart(constantRankRange.getRankStart()).rankEnd(constantRankRange.getRankEnd()).windowEndIndex(windowEndIndex).build();
OneInputTransformation<RowData, RowData> transform = ExecNodeUtil.createOneInputTransformation(inputTransform, createTransformationMeta(WINDOW_RANK_TRANSFORMATION, config), SimpleOperatorFactory.of(operator), InternalTypeInfo.of(getOutputType()), inputTransform.getParallelism(), WINDOW_RANK_MEMORY_RATIO);
// set KeyType and Selector for state
transform.setStateKeySelector(selector);
transform.setStateKeyType(selector.getProducedType());
return transform;
}
Aggregations