use of org.apache.flink.table.types.logical.TimestampType in project flink by apache.
the class DateTimeUtils method convertExtract.
private static long convertExtract(TimeUnitRange range, TimestampData ts, LogicalType type, TimeZone tz) {
TimeUnit startUnit = range.startUnit;
long millisecond = ts.getMillisecond();
int nanoOfMillisecond = ts.getNanoOfMillisecond();
long offset = tz.getOffset(millisecond);
long utcTs = millisecond + offset;
switch(startUnit) {
case MILLENNIUM:
case CENTURY:
case DECADE:
case YEAR:
case QUARTER:
case MONTH:
case DAY:
case DOW:
case DOY:
case ISODOW:
case ISOYEAR:
case WEEK:
if (type instanceof TimestampType) {
long d = divide(utcTs, TimeUnit.DAY.multiplier);
return extractFromDate(range, d);
} else if (type instanceof DateType) {
return divide(utcTs, TimeUnit.DAY.multiplier);
} else {
// TODO support it
throw new TableException(type + " is unsupported now.");
}
case EPOCH:
// TODO support it
throw new TableException("EPOCH is unsupported now.");
case MICROSECOND:
if (type instanceof TimestampType) {
long millis = divide(mod(utcTs, getFactor(startUnit)), startUnit.multiplier);
int micros = nanoOfMillisecond / 1000;
return millis + micros;
} else {
throw new TableException(type + " is unsupported now.");
}
case NANOSECOND:
if (type instanceof TimestampType) {
long millis = divide(mod(utcTs, getFactor(startUnit)), startUnit.multiplier);
return millis + nanoOfMillisecond;
} else {
throw new TableException(type + " is unsupported now.");
}
default:
}
long res = mod(utcTs, getFactor(startUnit));
res = divide(res, startUnit.multiplier);
return res;
}
use of org.apache.flink.table.types.logical.TimestampType in project flink by apache.
the class TypeInfoDataTypeConverter method fromDataTypeToTypeInfo.
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
Class<?> clazz = dataType.getConversionClass();
if (clazz.isPrimitive()) {
final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
if (foundTypeInfo != null) {
return foundTypeInfo;
}
}
LogicalType logicalType = fromDataTypeToLogicalType(dataType);
switch(logicalType.getTypeRoot()) {
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) logicalType;
int precision = timestampType.getPrecision();
if (timestampType.getKind() == TimestampKind.REGULAR) {
return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision)));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
int precisionLzTs = lzTs.getPrecision();
if (lzTs.getKind() == TimestampKind.REGULAR) {
return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case DECIMAL:
DecimalType decimalType = (DecimalType) logicalType;
return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
case CHAR:
case // ignore precision
VARCHAR:
return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO;
case BINARY:
case // ignore precision
VARBINARY:
return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
case INTERVAL_YEAR_MONTH:
return TimeIntervalTypeInfo.INTERVAL_MONTHS;
case INTERVAL_DAY_TIME:
return TimeIntervalTypeInfo.INTERVAL_MILLIS;
case ARRAY:
if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
return ObjectArrayTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case MAP:
KeyValueDataType mapType = (KeyValueDataType) dataType;
return new MapTypeInfo(fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType()));
case MULTISET:
return MultisetTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
case ROW:
if (RowData.class.isAssignableFrom(dataType.getConversionClass())) {
return InternalTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
} else if (Row.class == dataType.getConversionClass()) {
RowType logicalRowType = (RowType) logicalType;
return new RowTypeInfo(dataType.getChildren().stream().map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo).toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0]));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case RAW:
if (logicalType instanceof RawType) {
return ExternalTypeInfo.of(dataType);
}
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
default:
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
}
use of org.apache.flink.table.types.logical.TimestampType in project flink by apache.
the class EqualiserCodeGeneratorTest method testTimestamp.
@Test
public void testTimestamp() {
RecordEqualiser equaliser = new EqualiserCodeGenerator(new LogicalType[] { new TimestampType() }).generateRecordEqualiser("TIMESTAMP").newInstance(Thread.currentThread().getContextClassLoader());
Function<TimestampData, BinaryRowData> func = o -> {
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeTimestamp(0, o, 9);
writer.complete();
return row;
};
assertBoolean(equaliser, func, fromEpochMillis(1024), fromEpochMillis(1024), true);
assertBoolean(equaliser, func, fromEpochMillis(1024), fromEpochMillis(1025), false);
}
Aggregations