use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class KeySelectorUtil method getRowDataSelector.
/**
* Create a RowDataKeySelector to extract keys from DataStream which type is {@link
* InternalTypeInfo} of {@link RowData}.
*
* @param keyFields key fields
* @param rowType type of DataStream to extract keys
* @return the RowDataKeySelector to extract keys from DataStream which type is {@link
* InternalTypeInfo} of {@link RowData}.
*/
public static RowDataKeySelector getRowDataSelector(int[] keyFields, InternalTypeInfo<RowData> rowType) {
if (keyFields.length > 0) {
LogicalType[] inputFieldTypes = rowType.toRowFieldTypes();
LogicalType[] keyFieldTypes = new LogicalType[keyFields.length];
for (int i = 0; i < keyFields.length; ++i) {
keyFieldTypes[i] = inputFieldTypes[keyFields[i]];
}
// do not provide field names for the result key type,
// because we may have duplicate key fields and the field names may conflict
RowType returnType = RowType.of(keyFieldTypes);
RowType inputType = rowType.toRowType();
GeneratedProjection generatedProjection = ProjectionCodeGenerator.generateProjection(CodeGeneratorContext.apply(new TableConfig()), "KeyProjection", inputType, returnType, keyFields);
InternalTypeInfo<RowData> keyRowType = InternalTypeInfo.of(returnType);
return new BinaryRowDataKeySelector(keyRowType, generatedProjection);
} else {
return EmptyRowDataKeySelector.INSTANCE;
}
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class RawFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
final String charsetName = validateAndGetCharsetName(formatOptions);
final boolean isBigEndian = isBigEndian(formatOptions);
return new EncodingFormat<SerializationSchema<RowData>>() {
@Override
public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
final RowType physicalRowType = (RowType) consumedDataType.getLogicalType();
final LogicalType fieldType = validateAndExtractSingleField(physicalRowType);
return new RawFormatSerializationSchema(fieldType, charsetName, isBigEndian);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class CommonExecSink method getFieldInfoForLengthEnforcer.
/**
* Returns a List of {@link ConstraintEnforcer.FieldInfo}, each containing the info needed to
* determine whether a string or binary value needs trimming and/or padding.
*/
private List<ConstraintEnforcer.FieldInfo> getFieldInfoForLengthEnforcer(RowType physicalType, LengthEnforcerType enforcerType) {
LogicalTypeRoot staticType = null;
LogicalTypeRoot variableType = null;
int maxLength = 0;
switch(enforcerType) {
case CHAR:
staticType = LogicalTypeRoot.CHAR;
variableType = LogicalTypeRoot.VARCHAR;
maxLength = CharType.MAX_LENGTH;
break;
case BINARY:
staticType = LogicalTypeRoot.BINARY;
variableType = LogicalTypeRoot.VARBINARY;
maxLength = BinaryType.MAX_LENGTH;
}
final List<ConstraintEnforcer.FieldInfo> fieldsAndLengths = new ArrayList<>();
for (int i = 0; i < physicalType.getFieldCount(); i++) {
LogicalType type = physicalType.getTypeAt(i);
boolean isStatic = type.is(staticType);
// Should trim and possibly pad
if ((isStatic && (LogicalTypeChecks.getLength(type) < maxLength)) || (type.is(variableType) && (LogicalTypeChecks.getLength(type) < maxLength))) {
fieldsAndLengths.add(new ConstraintEnforcer.FieldInfo(i, LogicalTypeChecks.getLength(type), isStatic));
} else if (isStatic) {
// Should pad
fieldsAndLengths.add(new ConstraintEnforcer.FieldInfo(i, null, isStatic));
}
}
return fieldsAndLengths;
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LogicalTypeJsonDeserializer method deserializeDistinctTypeFromPlan.
private static LogicalType deserializeDistinctTypeFromPlan(ObjectIdentifier identifier, JsonNode logicalTypeNode, SerdeContext serdeContext) {
final LogicalType sourceType = deserialize(logicalTypeNode.get(FIELD_NAME_SOURCE_TYPE), serdeContext);
final DistinctType.Builder builder = DistinctType.newBuilder(identifier, sourceType);
if (logicalTypeNode.has(FIELD_NAME_FIELD_DESCRIPTION)) {
builder.description(logicalTypeNode.get(FIELD_NAME_FIELD_DESCRIPTION).asText());
}
return builder.build();
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class RelDataTypeJsonSerializer method serialize.
@Override
public void serialize(RelDataType relDataType, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
final SerdeContext serdeContext = SerdeContext.get(serializerProvider);
final DataTypeFactory dataTypeFactory = serdeContext.getFlinkContext().getCatalogManager().getDataTypeFactory();
// Conversion to LogicalType also ensures that Calcite's type system is materialized
// so data types like DECIMAL will receive a concrete precision and scale (not unspecified
// anymore).
final LogicalType logicalType = LogicalRelDataTypeConverter.toLogicalType(relDataType, dataTypeFactory);
serializerProvider.defaultSerializeValue(logicalType, jsonGenerator);
}
Aggregations