Search in sources :

Example 26 with TimestampType

use of org.apache.flink.table.types.logical.TimestampType in project flink by apache.

the class DateTimeUtils method convertExtract.

private static long convertExtract(TimeUnitRange range, TimestampData ts, LogicalType type, TimeZone tz) {
    TimeUnit startUnit = range.startUnit;
    long millisecond = ts.getMillisecond();
    int nanoOfMillisecond = ts.getNanoOfMillisecond();
    long offset = tz.getOffset(millisecond);
    long utcTs = millisecond + offset;
    switch(startUnit) {
        case MILLENNIUM:
        case CENTURY:
        case DECADE:
        case YEAR:
        case QUARTER:
        case MONTH:
        case DAY:
        case DOW:
        case DOY:
        case ISODOW:
        case ISOYEAR:
        case WEEK:
            if (type instanceof TimestampType) {
                long d = divide(utcTs, TimeUnit.DAY.multiplier);
                return extractFromDate(range, d);
            } else if (type instanceof DateType) {
                return divide(utcTs, TimeUnit.DAY.multiplier);
            } else {
                // TODO support it
                throw new TableException(type + " is unsupported now.");
            }
        case EPOCH:
            // TODO support it
            throw new TableException("EPOCH is unsupported now.");
        case MICROSECOND:
            if (type instanceof TimestampType) {
                long millis = divide(mod(utcTs, getFactor(startUnit)), startUnit.multiplier);
                int micros = nanoOfMillisecond / 1000;
                return millis + micros;
            } else {
                throw new TableException(type + " is unsupported now.");
            }
        case NANOSECOND:
            if (type instanceof TimestampType) {
                long millis = divide(mod(utcTs, getFactor(startUnit)), startUnit.multiplier);
                return millis + nanoOfMillisecond;
            } else {
                throw new TableException(type + " is unsupported now.");
            }
        default:
    }
    long res = mod(utcTs, getFactor(startUnit));
    res = divide(res, startUnit.multiplier);
    return res;
}
Also used : TableException(org.apache.flink.table.api.TableException) TimestampType(org.apache.flink.table.types.logical.TimestampType) DateType(org.apache.flink.table.types.logical.DateType)

Example 27 with TimestampType

use of org.apache.flink.table.types.logical.TimestampType in project flink by apache.

the class TypeInfoDataTypeConverter method fromDataTypeToTypeInfo.

public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
    Class<?> clazz = dataType.getConversionClass();
    if (clazz.isPrimitive()) {
        final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
        if (foundTypeInfo != null) {
            return foundTypeInfo;
        }
    }
    LogicalType logicalType = fromDataTypeToLogicalType(dataType);
    switch(logicalType.getTypeRoot()) {
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            TimestampType timestampType = (TimestampType) logicalType;
            int precision = timestampType.getPrecision();
            if (timestampType.getKind() == TimestampKind.REGULAR) {
                return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision)));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
            int precisionLzTs = lzTs.getPrecision();
            if (lzTs.getKind() == TimestampKind.REGULAR) {
                return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case DECIMAL:
            DecimalType decimalType = (DecimalType) logicalType;
            return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
        case CHAR:
        case // ignore precision
        VARCHAR:
            return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO;
        case BINARY:
        case // ignore precision
        VARBINARY:
            return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
        case INTERVAL_YEAR_MONTH:
            return TimeIntervalTypeInfo.INTERVAL_MONTHS;
        case INTERVAL_DAY_TIME:
            return TimeIntervalTypeInfo.INTERVAL_MILLIS;
        case ARRAY:
            if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
                return ObjectArrayTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case MAP:
            KeyValueDataType mapType = (KeyValueDataType) dataType;
            return new MapTypeInfo(fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType()));
        case MULTISET:
            return MultisetTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
        case ROW:
            if (RowData.class.isAssignableFrom(dataType.getConversionClass())) {
                return InternalTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
            } else if (Row.class == dataType.getConversionClass()) {
                RowType logicalRowType = (RowType) logicalType;
                return new RowTypeInfo(dataType.getChildren().stream().map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo).toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0]));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case RAW:
            if (logicalType instanceof RawType) {
                return ExternalTypeInfo.of(dataType);
            }
            return TypeConversions.fromDataTypeToLegacyInfo(dataType);
        default:
            return TypeConversions.fromDataTypeToLegacyInfo(dataType);
    }
}
Also used : LocalDateTime(java.time.LocalDateTime) TimestampData(org.apache.flink.table.data.TimestampData) DecimalDataTypeInfo(org.apache.flink.table.runtime.typeutils.DecimalDataTypeInfo) CollectionDataType(org.apache.flink.table.types.CollectionDataType) LogicalTypeDataTypeConverter.fromDataTypeToLogicalType(org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) RowTypeInfo(org.apache.flink.api.java.typeutils.RowTypeInfo) LegacyInstantTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyInstantTypeInfo) DecimalData(org.apache.flink.table.data.DecimalData) TimestampType(org.apache.flink.table.types.logical.TimestampType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) BigDecimalTypeInfo(org.apache.flink.table.runtime.typeutils.BigDecimalTypeInfo) RawType(org.apache.flink.table.types.logical.RawType) TimestampDataTypeInfo(org.apache.flink.table.runtime.typeutils.TimestampDataTypeInfo) Instant(java.time.Instant) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) LegacyLocalDateTimeTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo) MapTypeInfo(org.apache.flink.api.java.typeutils.MapTypeInfo) LegacyTimestampTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyTimestampTypeInfo) DecimalType(org.apache.flink.table.types.logical.DecimalType) Row(org.apache.flink.types.Row) StringData(org.apache.flink.table.data.StringData)

Example 28 with TimestampType

use of org.apache.flink.table.types.logical.TimestampType in project flink by apache.

the class EqualiserCodeGeneratorTest method testTimestamp.

@Test
public void testTimestamp() {
    RecordEqualiser equaliser = new EqualiserCodeGenerator(new LogicalType[] { new TimestampType() }).generateRecordEqualiser("TIMESTAMP").newInstance(Thread.currentThread().getContextClassLoader());
    Function<TimestampData, BinaryRowData> func = o -> {
        BinaryRowData row = new BinaryRowData(1);
        BinaryRowWriter writer = new BinaryRowWriter(row);
        writer.writeTimestamp(0, o, 9);
        writer.complete();
        return row;
    };
    assertBoolean(equaliser, func, fromEpochMillis(1024), fromEpochMillis(1024), true);
    assertBoolean(equaliser, func, fromEpochMillis(1024), fromEpochMillis(1025), false);
}
Also used : Types(org.apache.flink.api.common.typeinfo.Types) TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) IntStream(java.util.stream.IntStream) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) TimestampData(org.apache.flink.table.data.TimestampData) Assert.assertTrue(org.junit.Assert.assertTrue) VarCharType(org.apache.flink.table.types.logical.VarCharType) Test(org.junit.Test) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) Function(java.util.function.Function) TimestampData.fromEpochMillis(org.apache.flink.table.data.TimestampData.fromEpochMillis) IntSerializer(org.apache.flink.api.common.typeutils.base.IntSerializer) StringData(org.apache.flink.table.data.StringData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) TimestampType(org.apache.flink.table.types.logical.TimestampType) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) GenericRowData(org.apache.flink.table.data.GenericRowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) RawValueData(org.apache.flink.table.data.RawValueData) Assert(org.junit.Assert) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) TimestampData(org.apache.flink.table.data.TimestampData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) TimestampType(org.apache.flink.table.types.logical.TimestampType) Test(org.junit.Test)

Aggregations

TimestampType (org.apache.flink.table.types.logical.TimestampType)28 LogicalType (org.apache.flink.table.types.logical.LogicalType)17 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)13 RowType (org.apache.flink.table.types.logical.RowType)13 DecimalType (org.apache.flink.table.types.logical.DecimalType)12 IntType (org.apache.flink.table.types.logical.IntType)11 VarCharType (org.apache.flink.table.types.logical.VarCharType)10 ArrayType (org.apache.flink.table.types.logical.ArrayType)9 SmallIntType (org.apache.flink.table.types.logical.SmallIntType)9 BigIntType (org.apache.flink.table.types.logical.BigIntType)8 DateType (org.apache.flink.table.types.logical.DateType)7 FloatType (org.apache.flink.table.types.logical.FloatType)7 TinyIntType (org.apache.flink.table.types.logical.TinyIntType)7 ArrayList (java.util.ArrayList)6 BooleanType (org.apache.flink.table.types.logical.BooleanType)6 DoubleType (org.apache.flink.table.types.logical.DoubleType)6 ZonedTimestampType (org.apache.flink.table.types.logical.ZonedTimestampType)6 MapType (org.apache.flink.table.types.logical.MapType)5 Test (org.junit.Test)5 BigDecimal (java.math.BigDecimal)4