use of org.apache.flink.table.data.DecimalData in project flink by apache.
the class DecimalDataRandomGeneratorTest method testMinMax.
@Test
public void testMinMax() {
for (int precision = 1; precision <= 38; precision++) {
for (int scale = 0; scale <= precision; scale++) {
BigDecimal min = BigDecimal.valueOf(-10.0);
BigDecimal max = BigDecimal.valueOf(10.0);
DecimalDataRandomGenerator gen = new DecimalDataRandomGenerator(precision, scale, min.doubleValue(), max.doubleValue());
DecimalData result = gen.next();
Assert.assertNotNull("Null value for DECIMAL(" + precision + "," + scale + ")", result);
Assert.assertThat("value must be greater than or equal to min", result.toBigDecimal(), greaterThanOrEqualTo(min));
Assert.assertThat("value must be less than or equal to max", result.toBigDecimal(), lessThanOrEqualTo(max));
}
}
}
use of org.apache.flink.table.data.DecimalData in project flink by apache.
the class ExpressionConverter method extractValue.
/**
* Extracts a value from a literal. Including planner-specific instances such as {@link
* DecimalData}.
*/
@SuppressWarnings("unchecked")
public static <T> T extractValue(ValueLiteralExpression literal, Class<T> clazz) {
final Optional<Object> possibleObject = literal.getValueAs(Object.class);
if (!possibleObject.isPresent()) {
throw new TableException("Invalid literal.");
}
final Object object = possibleObject.get();
if (clazz.equals(BigDecimal.class)) {
final Optional<BigDecimal> possibleDecimal = literal.getValueAs(BigDecimal.class);
if (possibleDecimal.isPresent()) {
return (T) possibleDecimal.get();
}
if (object instanceof DecimalData) {
return (T) ((DecimalData) object).toBigDecimal();
}
}
return literal.getValueAs(clazz).orElseThrow(() -> new TableException("Unsupported literal class: " + clazz));
}
use of org.apache.flink.table.data.DecimalData in project flink by apache.
the class RowDataToAvroConverters method createConverter.
// --------------------------------------------------------------------------------
// IMPORTANT! We use anonymous classes instead of lambdas for a reason here. It is
// necessary because the maven shade plugin cannot relocate classes in
// SerializedLambdas (MSHADE-260). On the other hand we want to relocate Avro for
// sql-client uber jars.
// --------------------------------------------------------------------------------
/**
* Creates a runtime converter according to the given logical type that converts objects of
* Flink Table & SQL internal data structures to corresponding Avro data structures.
*/
public static RowDataToAvroConverter createConverter(LogicalType type) {
final RowDataToAvroConverter converter;
switch(type.getTypeRoot()) {
case NULL:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return null;
}
};
break;
case TINYINT:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ((Byte) object).intValue();
}
};
break;
case SMALLINT:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ((Short) object).intValue();
}
};
break;
// boolean
case BOOLEAN:
// int
case INTEGER:
// long
case INTERVAL_YEAR_MONTH:
// long
case BIGINT:
// long
case INTERVAL_DAY_TIME:
// float
case FLOAT:
// double
case DOUBLE:
// int
case TIME_WITHOUT_TIME_ZONE:
case // int
DATE:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return object;
}
};
break;
case CHAR:
case VARCHAR:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return new Utf8(object.toString());
}
};
break;
case BINARY:
case VARBINARY:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ByteBuffer.wrap((byte[]) object);
}
};
break;
case TIMESTAMP_WITHOUT_TIME_ZONE:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ((TimestampData) object).toInstant().toEpochMilli();
}
};
break;
case DECIMAL:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ByteBuffer.wrap(((DecimalData) object).toUnscaledBytes());
}
};
break;
case ARRAY:
converter = createArrayConverter((ArrayType) type);
break;
case ROW:
converter = createRowConverter((RowType) type);
break;
case MAP:
case MULTISET:
converter = createMapConverter(type);
break;
case RAW:
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
// wrap into nullable converter
return new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
if (object == null) {
return null;
}
// get actual schema if it is a nullable schema
Schema actualSchema;
if (schema.getType() == Schema.Type.UNION) {
List<Schema> types = schema.getTypes();
int size = types.size();
if (size == 2 && types.get(1).getType() == Schema.Type.NULL) {
actualSchema = types.get(0);
} else if (size == 2 && types.get(0).getType() == Schema.Type.NULL) {
actualSchema = types.get(1);
} else {
throw new IllegalArgumentException("The Avro schema is not a nullable type: " + schema.toString());
}
} else {
actualSchema = schema;
}
return converter.convert(actualSchema, object);
}
};
}
use of org.apache.flink.table.data.DecimalData in project flink by apache.
the class ParquetSplitReaderUtil method createVectorFromConstant.
public static ColumnVector createVectorFromConstant(LogicalType type, Object value, int batchSize) {
switch(type.getTypeRoot()) {
case CHAR:
case VARCHAR:
case BINARY:
case VARBINARY:
HeapBytesVector bsv = new HeapBytesVector(batchSize);
if (value == null) {
bsv.fillWithNulls();
} else {
bsv.fill(value instanceof byte[] ? (byte[]) value : value.toString().getBytes(StandardCharsets.UTF_8));
}
return bsv;
case BOOLEAN:
HeapBooleanVector bv = new HeapBooleanVector(batchSize);
if (value == null) {
bv.fillWithNulls();
} else {
bv.fill((boolean) value);
}
return bv;
case TINYINT:
HeapByteVector byteVector = new HeapByteVector(batchSize);
if (value == null) {
byteVector.fillWithNulls();
} else {
byteVector.fill(((Number) value).byteValue());
}
return byteVector;
case SMALLINT:
HeapShortVector sv = new HeapShortVector(batchSize);
if (value == null) {
sv.fillWithNulls();
} else {
sv.fill(((Number) value).shortValue());
}
return sv;
case INTEGER:
HeapIntVector iv = new HeapIntVector(batchSize);
if (value == null) {
iv.fillWithNulls();
} else {
iv.fill(((Number) value).intValue());
}
return iv;
case BIGINT:
HeapLongVector lv = new HeapLongVector(batchSize);
if (value == null) {
lv.fillWithNulls();
} else {
lv.fill(((Number) value).longValue());
}
return lv;
case DECIMAL:
DecimalType decimalType = (DecimalType) type;
int precision = decimalType.getPrecision();
int scale = decimalType.getScale();
DecimalData decimal = value == null ? null : Preconditions.checkNotNull(DecimalData.fromBigDecimal((BigDecimal) value, precision, scale));
ColumnVector internalVector;
if (ParquetSchemaConverter.is32BitDecimal(precision)) {
internalVector = createVectorFromConstant(new IntType(), decimal == null ? null : (int) decimal.toUnscaledLong(), batchSize);
} else if (ParquetSchemaConverter.is64BitDecimal(precision)) {
internalVector = createVectorFromConstant(new BigIntType(), decimal == null ? null : decimal.toUnscaledLong(), batchSize);
} else {
internalVector = createVectorFromConstant(new VarBinaryType(), decimal == null ? null : decimal.toUnscaledBytes(), batchSize);
}
return new ParquetDecimalVector(internalVector);
case FLOAT:
HeapFloatVector fv = new HeapFloatVector(batchSize);
if (value == null) {
fv.fillWithNulls();
} else {
fv.fill(((Number) value).floatValue());
}
return fv;
case DOUBLE:
HeapDoubleVector dv = new HeapDoubleVector(batchSize);
if (value == null) {
dv.fillWithNulls();
} else {
dv.fill(((Number) value).doubleValue());
}
return dv;
case DATE:
if (value instanceof LocalDate) {
value = Date.valueOf((LocalDate) value);
}
return createVectorFromConstant(new IntType(), value == null ? null : toInternal((Date) value), batchSize);
case TIMESTAMP_WITHOUT_TIME_ZONE:
HeapTimestampVector tv = new HeapTimestampVector(batchSize);
if (value == null) {
tv.fillWithNulls();
} else {
tv.fill(TimestampData.fromLocalDateTime((LocalDateTime) value));
}
return tv;
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
use of org.apache.flink.table.data.DecimalData in project flink by apache.
the class DecimalDataRandomGeneratorTest method testGenerateDecimalValues.
@Test
public void testGenerateDecimalValues() {
for (int precision = 1; precision <= 38; precision++) {
for (int scale = 0; scale <= precision; scale++) {
DecimalDataRandomGenerator gen = new DecimalDataRandomGenerator(precision, scale, Double.MIN_VALUE, Double.MAX_VALUE);
DecimalData value = gen.next();
Assert.assertNotNull("Null value for DECIMAL(" + precision + "," + scale + ")", value);
String strRepr = String.valueOf(value);
if (strRepr.charAt(0) == '-') {
// drop the negative sign
strRepr = strRepr.substring(1);
}
if (scale != precision) {
// need to account for decimal . and potential leading zeros
Assert.assertThat("Wrong length for DECIMAL(" + precision + "," + scale + ") = " + strRepr, strRepr.length(), lessThanOrEqualTo(precision + 1));
} else {
// need to account for decimal . and potential leading zeros
Assert.assertThat("Wrong length for DECIMAL(" + precision + "," + scale + ") = " + strRepr, strRepr.length(), lessThanOrEqualTo(precision + 2));
}
if (scale != 0) {
String decimalPart = strRepr.split("\\.")[1];
Assert.assertThat("Wrong length for DECIMAL(" + precision + "," + scale + ") = " + strRepr, decimalPart.length(), equalTo(scale));
}
}
}
}
Aggregations