use of org.apache.flink.table.types.logical.LocalZonedTimestampType in project flink by apache.
the class BinaryWriter method write.
// --------------------------------------------------------------------------------------------
/**
* @deprecated Use {@link #createValueSetter(LogicalType)} for avoiding logical types during
* runtime.
*/
@Deprecated
static void write(BinaryWriter writer, int pos, Object o, LogicalType type, TypeSerializer<?> serializer) {
switch(type.getTypeRoot()) {
case BOOLEAN:
writer.writeBoolean(pos, (boolean) o);
break;
case TINYINT:
writer.writeByte(pos, (byte) o);
break;
case SMALLINT:
writer.writeShort(pos, (short) o);
break;
case INTEGER:
case DATE:
case TIME_WITHOUT_TIME_ZONE:
case INTERVAL_YEAR_MONTH:
writer.writeInt(pos, (int) o);
break;
case BIGINT:
case INTERVAL_DAY_TIME:
writer.writeLong(pos, (long) o);
break;
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) type;
writer.writeTimestamp(pos, (TimestampData) o, timestampType.getPrecision());
break;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType lzTs = (LocalZonedTimestampType) type;
writer.writeTimestamp(pos, (TimestampData) o, lzTs.getPrecision());
break;
case FLOAT:
writer.writeFloat(pos, (float) o);
break;
case DOUBLE:
writer.writeDouble(pos, (double) o);
break;
case CHAR:
case VARCHAR:
writer.writeString(pos, (StringData) o);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) type;
writer.writeDecimal(pos, (DecimalData) o, decimalType.getPrecision());
break;
case ARRAY:
writer.writeArray(pos, (ArrayData) o, (ArrayDataSerializer) serializer);
break;
case MAP:
case MULTISET:
writer.writeMap(pos, (MapData) o, (MapDataSerializer) serializer);
break;
case ROW:
case STRUCTURED_TYPE:
writer.writeRow(pos, (RowData) o, (RowDataSerializer) serializer);
break;
case RAW:
writer.writeRawValue(pos, (RawValueData<?>) o, (RawValueDataSerializer<?>) serializer);
break;
case BINARY:
case VARBINARY:
writer.writeBinary(pos, (byte[]) o);
break;
default:
throw new UnsupportedOperationException("Not support type: " + type);
}
}
use of org.apache.flink.table.types.logical.LocalZonedTimestampType in project flink by apache.
the class TypeInfoDataTypeConverter method fromDataTypeToTypeInfo.
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
Class<?> clazz = dataType.getConversionClass();
if (clazz.isPrimitive()) {
final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
if (foundTypeInfo != null) {
return foundTypeInfo;
}
}
LogicalType logicalType = fromDataTypeToLogicalType(dataType);
switch(logicalType.getTypeRoot()) {
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) logicalType;
int precision = timestampType.getPrecision();
if (timestampType.getKind() == TimestampKind.REGULAR) {
return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision)));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
int precisionLzTs = lzTs.getPrecision();
if (lzTs.getKind() == TimestampKind.REGULAR) {
return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case DECIMAL:
DecimalType decimalType = (DecimalType) logicalType;
return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
case CHAR:
case // ignore precision
VARCHAR:
return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO;
case BINARY:
case // ignore precision
VARBINARY:
return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
case INTERVAL_YEAR_MONTH:
return TimeIntervalTypeInfo.INTERVAL_MONTHS;
case INTERVAL_DAY_TIME:
return TimeIntervalTypeInfo.INTERVAL_MILLIS;
case ARRAY:
if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
return ObjectArrayTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case MAP:
KeyValueDataType mapType = (KeyValueDataType) dataType;
return new MapTypeInfo(fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType()));
case MULTISET:
return MultisetTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
case ROW:
if (RowData.class.isAssignableFrom(dataType.getConversionClass())) {
return InternalTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
} else if (Row.class == dataType.getConversionClass()) {
RowType logicalRowType = (RowType) logicalType;
return new RowTypeInfo(dataType.getChildren().stream().map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo).toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0]));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case RAW:
if (logicalType instanceof RawType) {
return ExternalTypeInfo.of(dataType);
}
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
default:
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
}
Aggregations