Search in sources :

Example 1 with DecimalData

use of org.apache.flink.table.data.DecimalData in project flink by apache.

the class DecimalDataRandomGeneratorTest method testMinMax.

@Test
public void testMinMax() {
    for (int precision = 1; precision <= 38; precision++) {
        for (int scale = 0; scale <= precision; scale++) {
            BigDecimal min = BigDecimal.valueOf(-10.0);
            BigDecimal max = BigDecimal.valueOf(10.0);
            DecimalDataRandomGenerator gen = new DecimalDataRandomGenerator(precision, scale, min.doubleValue(), max.doubleValue());
            DecimalData result = gen.next();
            Assert.assertNotNull("Null value for DECIMAL(" + precision + "," + scale + ")", result);
            Assert.assertThat("value must be greater than or equal to min", result.toBigDecimal(), greaterThanOrEqualTo(min));
            Assert.assertThat("value must be less than or equal to max", result.toBigDecimal(), lessThanOrEqualTo(max));
        }
    }
}
Also used : DecimalData(org.apache.flink.table.data.DecimalData) BigDecimal(java.math.BigDecimal) Test(org.junit.Test)

Example 2 with DecimalData

use of org.apache.flink.table.data.DecimalData in project flink by apache.

the class ExpressionConverter method extractValue.

/**
 * Extracts a value from a literal. Including planner-specific instances such as {@link
 * DecimalData}.
 */
@SuppressWarnings("unchecked")
public static <T> T extractValue(ValueLiteralExpression literal, Class<T> clazz) {
    final Optional<Object> possibleObject = literal.getValueAs(Object.class);
    if (!possibleObject.isPresent()) {
        throw new TableException("Invalid literal.");
    }
    final Object object = possibleObject.get();
    if (clazz.equals(BigDecimal.class)) {
        final Optional<BigDecimal> possibleDecimal = literal.getValueAs(BigDecimal.class);
        if (possibleDecimal.isPresent()) {
            return (T) possibleDecimal.get();
        }
        if (object instanceof DecimalData) {
            return (T) ((DecimalData) object).toBigDecimal();
        }
    }
    return literal.getValueAs(clazz).orElseThrow(() -> new TableException("Unsupported literal class: " + clazz));
}
Also used : DecimalData(org.apache.flink.table.data.DecimalData) TableException(org.apache.flink.table.api.TableException) BigDecimal(java.math.BigDecimal)

Example 3 with DecimalData

use of org.apache.flink.table.data.DecimalData in project flink by apache.

the class RowDataToAvroConverters method createConverter.

// --------------------------------------------------------------------------------
// IMPORTANT! We use anonymous classes instead of lambdas for a reason here. It is
// necessary because the maven shade plugin cannot relocate classes in
// SerializedLambdas (MSHADE-260). On the other hand we want to relocate Avro for
// sql-client uber jars.
// --------------------------------------------------------------------------------
/**
 * Creates a runtime converter according to the given logical type that converts objects of
 * Flink Table & SQL internal data structures to corresponding Avro data structures.
 */
public static RowDataToAvroConverter createConverter(LogicalType type) {
    final RowDataToAvroConverter converter;
    switch(type.getTypeRoot()) {
        case NULL:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return null;
                }
            };
            break;
        case TINYINT:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ((Byte) object).intValue();
                }
            };
            break;
        case SMALLINT:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ((Short) object).intValue();
                }
            };
            break;
        // boolean
        case BOOLEAN:
        // int
        case INTEGER:
        // long
        case INTERVAL_YEAR_MONTH:
        // long
        case BIGINT:
        // long
        case INTERVAL_DAY_TIME:
        // float
        case FLOAT:
        // double
        case DOUBLE:
        // int
        case TIME_WITHOUT_TIME_ZONE:
        case // int
        DATE:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return object;
                }
            };
            break;
        case CHAR:
        case VARCHAR:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return new Utf8(object.toString());
                }
            };
            break;
        case BINARY:
        case VARBINARY:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ByteBuffer.wrap((byte[]) object);
                }
            };
            break;
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ((TimestampData) object).toInstant().toEpochMilli();
                }
            };
            break;
        case DECIMAL:
            converter = new RowDataToAvroConverter() {

                private static final long serialVersionUID = 1L;

                @Override
                public Object convert(Schema schema, Object object) {
                    return ByteBuffer.wrap(((DecimalData) object).toUnscaledBytes());
                }
            };
            break;
        case ARRAY:
            converter = createArrayConverter((ArrayType) type);
            break;
        case ROW:
            converter = createRowConverter((RowType) type);
            break;
        case MAP:
        case MULTISET:
            converter = createMapConverter(type);
            break;
        case RAW:
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
    // wrap into nullable converter
    return new RowDataToAvroConverter() {

        private static final long serialVersionUID = 1L;

        @Override
        public Object convert(Schema schema, Object object) {
            if (object == null) {
                return null;
            }
            // get actual schema if it is a nullable schema
            Schema actualSchema;
            if (schema.getType() == Schema.Type.UNION) {
                List<Schema> types = schema.getTypes();
                int size = types.size();
                if (size == 2 && types.get(1).getType() == Schema.Type.NULL) {
                    actualSchema = types.get(0);
                } else if (size == 2 && types.get(0).getType() == Schema.Type.NULL) {
                    actualSchema = types.get(1);
                } else {
                    throw new IllegalArgumentException("The Avro schema is not a nullable type: " + schema.toString());
                }
            } else {
                actualSchema = schema;
            }
            return converter.convert(actualSchema, object);
        }
    };
}
Also used : TimestampData(org.apache.flink.table.data.TimestampData) Schema(org.apache.avro.Schema) RowType(org.apache.flink.table.types.logical.RowType) DecimalData(org.apache.flink.table.data.DecimalData) ArrayType(org.apache.flink.table.types.logical.ArrayType) Utf8(org.apache.avro.util.Utf8)

Example 4 with DecimalData

use of org.apache.flink.table.data.DecimalData in project flink by apache.

the class ParquetSplitReaderUtil method createVectorFromConstant.

public static ColumnVector createVectorFromConstant(LogicalType type, Object value, int batchSize) {
    switch(type.getTypeRoot()) {
        case CHAR:
        case VARCHAR:
        case BINARY:
        case VARBINARY:
            HeapBytesVector bsv = new HeapBytesVector(batchSize);
            if (value == null) {
                bsv.fillWithNulls();
            } else {
                bsv.fill(value instanceof byte[] ? (byte[]) value : value.toString().getBytes(StandardCharsets.UTF_8));
            }
            return bsv;
        case BOOLEAN:
            HeapBooleanVector bv = new HeapBooleanVector(batchSize);
            if (value == null) {
                bv.fillWithNulls();
            } else {
                bv.fill((boolean) value);
            }
            return bv;
        case TINYINT:
            HeapByteVector byteVector = new HeapByteVector(batchSize);
            if (value == null) {
                byteVector.fillWithNulls();
            } else {
                byteVector.fill(((Number) value).byteValue());
            }
            return byteVector;
        case SMALLINT:
            HeapShortVector sv = new HeapShortVector(batchSize);
            if (value == null) {
                sv.fillWithNulls();
            } else {
                sv.fill(((Number) value).shortValue());
            }
            return sv;
        case INTEGER:
            HeapIntVector iv = new HeapIntVector(batchSize);
            if (value == null) {
                iv.fillWithNulls();
            } else {
                iv.fill(((Number) value).intValue());
            }
            return iv;
        case BIGINT:
            HeapLongVector lv = new HeapLongVector(batchSize);
            if (value == null) {
                lv.fillWithNulls();
            } else {
                lv.fill(((Number) value).longValue());
            }
            return lv;
        case DECIMAL:
            DecimalType decimalType = (DecimalType) type;
            int precision = decimalType.getPrecision();
            int scale = decimalType.getScale();
            DecimalData decimal = value == null ? null : Preconditions.checkNotNull(DecimalData.fromBigDecimal((BigDecimal) value, precision, scale));
            ColumnVector internalVector;
            if (ParquetSchemaConverter.is32BitDecimal(precision)) {
                internalVector = createVectorFromConstant(new IntType(), decimal == null ? null : (int) decimal.toUnscaledLong(), batchSize);
            } else if (ParquetSchemaConverter.is64BitDecimal(precision)) {
                internalVector = createVectorFromConstant(new BigIntType(), decimal == null ? null : decimal.toUnscaledLong(), batchSize);
            } else {
                internalVector = createVectorFromConstant(new VarBinaryType(), decimal == null ? null : decimal.toUnscaledBytes(), batchSize);
            }
            return new ParquetDecimalVector(internalVector);
        case FLOAT:
            HeapFloatVector fv = new HeapFloatVector(batchSize);
            if (value == null) {
                fv.fillWithNulls();
            } else {
                fv.fill(((Number) value).floatValue());
            }
            return fv;
        case DOUBLE:
            HeapDoubleVector dv = new HeapDoubleVector(batchSize);
            if (value == null) {
                dv.fillWithNulls();
            } else {
                dv.fill(((Number) value).doubleValue());
            }
            return dv;
        case DATE:
            if (value instanceof LocalDate) {
                value = Date.valueOf((LocalDate) value);
            }
            return createVectorFromConstant(new IntType(), value == null ? null : toInternal((Date) value), batchSize);
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            HeapTimestampVector tv = new HeapTimestampVector(batchSize);
            if (value == null) {
                tv.fillWithNulls();
            } else {
                tv.fill(TimestampData.fromLocalDateTime((LocalDateTime) value));
            }
            return tv;
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
}
Also used : HeapShortVector(org.apache.flink.table.data.columnar.vector.heap.HeapShortVector) HeapLongVector(org.apache.flink.table.data.columnar.vector.heap.HeapLongVector) LocalDateTime(java.time.LocalDateTime) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) HeapByteVector(org.apache.flink.table.data.columnar.vector.heap.HeapByteVector) HeapDoubleVector(org.apache.flink.table.data.columnar.vector.heap.HeapDoubleVector) HeapTimestampVector(org.apache.flink.table.data.columnar.vector.heap.HeapTimestampVector) HeapBytesVector(org.apache.flink.table.data.columnar.vector.heap.HeapBytesVector) HeapIntVector(org.apache.flink.table.data.columnar.vector.heap.HeapIntVector) BigIntType(org.apache.flink.table.types.logical.BigIntType) LocalDate(java.time.LocalDate) HeapBooleanVector(org.apache.flink.table.data.columnar.vector.heap.HeapBooleanVector) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector) WritableColumnVector(org.apache.flink.table.data.columnar.vector.writable.WritableColumnVector) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) DecimalData(org.apache.flink.table.data.DecimalData) DecimalType(org.apache.flink.table.types.logical.DecimalType) HeapFloatVector(org.apache.flink.table.data.columnar.vector.heap.HeapFloatVector)

Example 5 with DecimalData

use of org.apache.flink.table.data.DecimalData in project flink by apache.

the class DecimalDataRandomGeneratorTest method testGenerateDecimalValues.

@Test
public void testGenerateDecimalValues() {
    for (int precision = 1; precision <= 38; precision++) {
        for (int scale = 0; scale <= precision; scale++) {
            DecimalDataRandomGenerator gen = new DecimalDataRandomGenerator(precision, scale, Double.MIN_VALUE, Double.MAX_VALUE);
            DecimalData value = gen.next();
            Assert.assertNotNull("Null value for DECIMAL(" + precision + "," + scale + ")", value);
            String strRepr = String.valueOf(value);
            if (strRepr.charAt(0) == '-') {
                // drop the negative sign
                strRepr = strRepr.substring(1);
            }
            if (scale != precision) {
                // need to account for decimal . and potential leading zeros
                Assert.assertThat("Wrong length for DECIMAL(" + precision + "," + scale + ") = " + strRepr, strRepr.length(), lessThanOrEqualTo(precision + 1));
            } else {
                // need to account for decimal . and potential leading zeros
                Assert.assertThat("Wrong length for DECIMAL(" + precision + "," + scale + ") = " + strRepr, strRepr.length(), lessThanOrEqualTo(precision + 2));
            }
            if (scale != 0) {
                String decimalPart = strRepr.split("\\.")[1];
                Assert.assertThat("Wrong length for DECIMAL(" + precision + "," + scale + ") = " + strRepr, decimalPart.length(), equalTo(scale));
            }
        }
    }
}
Also used : DecimalData(org.apache.flink.table.data.DecimalData) Test(org.junit.Test)

Aggregations

DecimalData (org.apache.flink.table.data.DecimalData)8 BigDecimal (java.math.BigDecimal)3 Test (org.junit.Test)3 TimestampData (org.apache.flink.table.data.TimestampData)2 LocalDate (java.time.LocalDate)1 LocalDateTime (java.time.LocalDateTime)1 Arrays (java.util.Arrays)1 Random (java.util.Random)1 Schema (org.apache.avro.Schema)1 Utf8 (org.apache.avro.util.Utf8)1 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)1 MemorySegment (org.apache.flink.core.memory.MemorySegment)1 MemorySegmentFactory (org.apache.flink.core.memory.MemorySegmentFactory)1 TableException (org.apache.flink.table.api.TableException)1 ArrayData (org.apache.flink.table.data.ArrayData)1 GenericRowData (org.apache.flink.table.data.GenericRowData)1 RowData (org.apache.flink.table.data.RowData)1 StringData (org.apache.flink.table.data.StringData)1 StringData.fromString (org.apache.flink.table.data.StringData.fromString)1 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)1