Search in sources :

Example 6 with DecimalType

use of org.apache.flink.table.types.logical.DecimalType in project flink by apache.

the class ParquetColumnarRowInputFormatTest method testReadingSplit.

private int testReadingSplit(List<Integer> expected, Path path, long splitStart, long splitLength, long seekToRow) throws IOException {
    LogicalType[] fieldTypes = new LogicalType[] { new VarCharType(VarCharType.MAX_LENGTH), new BooleanType(), new TinyIntType(), new SmallIntType(), new IntType(), new BigIntType(), new FloatType(), new DoubleType(), new TimestampType(9), new DecimalType(5, 0), new DecimalType(15, 0), new DecimalType(20, 0), new DecimalType(5, 0), new DecimalType(15, 0), new DecimalType(20, 0) };
    ParquetColumnarRowInputFormat format = new ParquetColumnarRowInputFormat(new Configuration(), RowType.of(fieldTypes, new String[] { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13", "f14" }), null, 500, false, true);
    // validate java serialization
    try {
        InstantiationUtil.clone(format);
    } catch (ClassNotFoundException e) {
        throw new IOException(e);
    }
    FileStatus fileStatus = path.getFileSystem().getFileStatus(path);
    BulkFormat.Reader<RowData> reader = format.restoreReader(EMPTY_CONF, new FileSourceSplit("id", path, splitStart, splitLength, fileStatus.getModificationTime(), fileStatus.getLen(), new String[0], new CheckpointedPosition(CheckpointedPosition.NO_OFFSET, seekToRow)));
    AtomicInteger cnt = new AtomicInteger(0);
    final AtomicReference<RowData> previousRow = new AtomicReference<>();
    forEachRemaining(reader, row -> {
        if (previousRow.get() == null) {
            previousRow.set(row);
        } else {
            // ParquetColumnarRowInputFormat should only have one row instance.
            assertSame(previousRow.get(), row);
        }
        Integer v = expected.get(cnt.get());
        if (v == null) {
            assertTrue(row.isNullAt(0));
            assertTrue(row.isNullAt(1));
            assertTrue(row.isNullAt(2));
            assertTrue(row.isNullAt(3));
            assertTrue(row.isNullAt(4));
            assertTrue(row.isNullAt(5));
            assertTrue(row.isNullAt(6));
            assertTrue(row.isNullAt(7));
            assertTrue(row.isNullAt(8));
            assertTrue(row.isNullAt(9));
            assertTrue(row.isNullAt(10));
            assertTrue(row.isNullAt(11));
            assertTrue(row.isNullAt(12));
            assertTrue(row.isNullAt(13));
            assertTrue(row.isNullAt(14));
        } else {
            assertEquals("" + v, row.getString(0).toString());
            assertEquals(v % 2 == 0, row.getBoolean(1));
            assertEquals(v.byteValue(), row.getByte(2));
            assertEquals(v.shortValue(), row.getShort(3));
            assertEquals(v.intValue(), row.getInt(4));
            assertEquals(v.longValue(), row.getLong(5));
            assertEquals(v.floatValue(), row.getFloat(6), 0);
            assertEquals(v.doubleValue(), row.getDouble(7), 0);
            assertEquals(toDateTime(v), row.getTimestamp(8, 9).toLocalDateTime());
            assertEquals(BigDecimal.valueOf(v), row.getDecimal(9, 5, 0).toBigDecimal());
            assertEquals(BigDecimal.valueOf(v), row.getDecimal(10, 15, 0).toBigDecimal());
            assertEquals(BigDecimal.valueOf(v), row.getDecimal(11, 20, 0).toBigDecimal());
            assertEquals(BigDecimal.valueOf(v), row.getDecimal(12, 5, 0).toBigDecimal());
            assertEquals(BigDecimal.valueOf(v), row.getDecimal(13, 15, 0).toBigDecimal());
            assertEquals(BigDecimal.valueOf(v), row.getDecimal(14, 20, 0).toBigDecimal());
        }
        cnt.incrementAndGet();
    });
    return cnt.get();
}
Also used : FileStatus(org.apache.flink.core.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) FileSourceSplit(org.apache.flink.connector.file.src.FileSourceSplit) LogicalType(org.apache.flink.table.types.logical.LogicalType) BigIntType(org.apache.flink.table.types.logical.BigIntType) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) FloatType(org.apache.flink.table.types.logical.FloatType) RowData(org.apache.flink.table.data.RowData) CheckpointedPosition(org.apache.flink.connector.file.src.util.CheckpointedPosition) TimestampType(org.apache.flink.table.types.logical.TimestampType) VarCharType(org.apache.flink.table.types.logical.VarCharType) BooleanType(org.apache.flink.table.types.logical.BooleanType) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DoubleType(org.apache.flink.table.types.logical.DoubleType) DecimalType(org.apache.flink.table.types.logical.DecimalType) BulkFormat(org.apache.flink.connector.file.src.reader.BulkFormat)

Example 7 with DecimalType

use of org.apache.flink.table.types.logical.DecimalType in project flink by apache.

the class ParquetRowDataWriter method createWriter.

private FieldWriter createWriter(LogicalType t, Type type) {
    if (type.isPrimitive()) {
        switch(t.getTypeRoot()) {
            case CHAR:
            case VARCHAR:
                return new StringWriter();
            case BOOLEAN:
                return new BooleanWriter();
            case BINARY:
            case VARBINARY:
                return new BinaryWriter();
            case DECIMAL:
                DecimalType decimalType = (DecimalType) t;
                return createDecimalWriter(decimalType.getPrecision(), decimalType.getScale());
            case TINYINT:
                return new ByteWriter();
            case SMALLINT:
                return new ShortWriter();
            case DATE:
            case TIME_WITHOUT_TIME_ZONE:
            case INTEGER:
                return new IntWriter();
            case BIGINT:
                return new LongWriter();
            case FLOAT:
                return new FloatWriter();
            case DOUBLE:
                return new DoubleWriter();
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                TimestampType timestampType = (TimestampType) t;
                return new TimestampWriter(timestampType.getPrecision());
            case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
                LocalZonedTimestampType localZonedTimestampType = (LocalZonedTimestampType) t;
                return new TimestampWriter(localZonedTimestampType.getPrecision());
            default:
                throw new UnsupportedOperationException("Unsupported type: " + type);
        }
    } else {
        GroupType groupType = type.asGroupType();
        LogicalTypeAnnotation logicalType = type.getLogicalTypeAnnotation();
        if (t instanceof ArrayType && logicalType instanceof LogicalTypeAnnotation.ListLogicalTypeAnnotation) {
            return new ArrayWriter(((ArrayType) t).getElementType(), groupType);
        } else if (t instanceof MapType && logicalType instanceof LogicalTypeAnnotation.MapLogicalTypeAnnotation) {
            return new MapWriter(((MapType) t).getKeyType(), ((MapType) t).getValueType(), groupType);
        } else if (t instanceof RowType && type instanceof GroupType) {
            return new RowWriter((RowType) t, groupType);
        } else {
            throw new UnsupportedOperationException("Unsupported type: " + type);
        }
    }
}
Also used : LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) RowType(org.apache.flink.table.types.logical.RowType) MapType(org.apache.flink.table.types.logical.MapType) ArrayType(org.apache.flink.table.types.logical.ArrayType) GroupType(org.apache.parquet.schema.GroupType) LogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation) DecimalType(org.apache.flink.table.types.logical.DecimalType) TimestampType(org.apache.flink.table.types.logical.TimestampType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType)

Example 8 with DecimalType

use of org.apache.flink.table.types.logical.DecimalType in project flink by apache.

the class DecimalDivideTypeStrategy method inferType.

@Override
public Optional<DataType> inferType(CallContext callContext) {
    final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
    final LogicalType dividend = argumentDataTypes.get(0).getLogicalType();
    final LogicalType divisor = argumentDataTypes.get(1).getLogicalType();
    // a hack to make legacy types possible until we drop them
    if (dividend instanceof LegacyTypeInformationType) {
        return Optional.of(argumentDataTypes.get(0));
    }
    if (divisor instanceof LegacyTypeInformationType) {
        return Optional.of(argumentDataTypes.get(1));
    }
    if (!isDecimalComputation(dividend, divisor)) {
        return Optional.empty();
    }
    final DecimalType decimalType = LogicalTypeMerging.findDivisionDecimalType(getPrecision(dividend), getScale(dividend), getPrecision(divisor), getScale(divisor));
    return Optional.of(fromLogicalToDataType(decimalType));
}
Also used : DataType(org.apache.flink.table.types.DataType) TypeConversions.fromLogicalToDataType(org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) DecimalType(org.apache.flink.table.types.logical.DecimalType) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType)

Example 9 with DecimalType

use of org.apache.flink.table.types.logical.DecimalType in project flink by apache.

the class DecimalPlusTypeStrategy method inferType.

@Override
public Optional<DataType> inferType(CallContext callContext) {
    final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
    final LogicalType addend1 = argumentDataTypes.get(0).getLogicalType();
    final LogicalType addend2 = argumentDataTypes.get(1).getLogicalType();
    // a hack to make legacy types possible until we drop them
    if (addend1 instanceof LegacyTypeInformationType) {
        return Optional.of(argumentDataTypes.get(0));
    }
    if (addend2 instanceof LegacyTypeInformationType) {
        return Optional.of(argumentDataTypes.get(1));
    }
    if (!isDecimalComputation(addend1, addend2)) {
        return Optional.empty();
    }
    final DecimalType decimalType = LogicalTypeMerging.findAdditionDecimalType(getPrecision(addend1), getScale(addend1), getPrecision(addend2), getScale(addend2));
    return Optional.of(fromLogicalToDataType(decimalType));
}
Also used : DataType(org.apache.flink.table.types.DataType) TypeConversions.fromLogicalToDataType(org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) DecimalType(org.apache.flink.table.types.logical.DecimalType) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType)

Example 10 with DecimalType

use of org.apache.flink.table.types.logical.DecimalType in project flink by apache.

the class LogicalTypeMerging method createCommonExactNumericType.

private static LogicalType createCommonExactNumericType(LogicalType resultType, LogicalType type) {
    // same EXACT_NUMERIC types
    if (type.equals(resultType)) {
        return resultType;
    }
    final LogicalTypeRoot resultTypeRoot = resultType.getTypeRoot();
    final LogicalTypeRoot typeRoot = type.getTypeRoot();
    // no DECIMAL types involved
    if (resultTypeRoot != DECIMAL && typeRoot != DECIMAL) {
        // type root contains order of precision
        if (getPrecision(type) > getPrecision(resultType)) {
            return type;
        }
        return resultType;
    }
    // determine DECIMAL with precision (p), scale (s) and number of whole digits (d):
    // d = max(p1 - s1, p2 - s2)
    // s <= max(s1, s2)
    // p = s + d
    final int p1 = getPrecision(resultType);
    final int p2 = getPrecision(type);
    final int s1 = getScale(resultType);
    final int s2 = getScale(type);
    final int maxPrecision = DecimalType.MAX_PRECISION;
    int d = Math.max(p1 - s1, p2 - s2);
    d = Math.min(d, maxPrecision);
    int s = Math.max(s1, s2);
    s = Math.min(s, maxPrecision - d);
    final int p = d + s;
    return new DecimalType(p, s);
}
Also used : DecimalType(org.apache.flink.table.types.logical.DecimalType) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot)

Aggregations

DecimalType (org.apache.flink.table.types.logical.DecimalType)27 LogicalType (org.apache.flink.table.types.logical.LogicalType)14 RowType (org.apache.flink.table.types.logical.RowType)12 TimestampType (org.apache.flink.table.types.logical.TimestampType)12 ArrayType (org.apache.flink.table.types.logical.ArrayType)8 BigIntType (org.apache.flink.table.types.logical.BigIntType)8 IntType (org.apache.flink.table.types.logical.IntType)8 BigDecimal (java.math.BigDecimal)7 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)7 SmallIntType (org.apache.flink.table.types.logical.SmallIntType)7 TinyIntType (org.apache.flink.table.types.logical.TinyIntType)7 VarBinaryType (org.apache.flink.table.types.logical.VarBinaryType)7 VarCharType (org.apache.flink.table.types.logical.VarCharType)7 DataType (org.apache.flink.table.types.DataType)6 BooleanType (org.apache.flink.table.types.logical.BooleanType)6 DoubleType (org.apache.flink.table.types.logical.DoubleType)6 FloatType (org.apache.flink.table.types.logical.FloatType)6 LegacyTypeInformationType (org.apache.flink.table.types.logical.LegacyTypeInformationType)6 ArrayList (java.util.ArrayList)5 DateType (org.apache.flink.table.types.logical.DateType)5