Search in sources :

Example 16 with SqlDecimal

use of io.prestosql.spi.type.SqlDecimal in project hetu-core by openlookeng.

the class TestExpressionCompiler method assertExecute.

private void assertExecute(List<String> expressions, BigDecimal decimal) {
    Type type = getDecimalType(decimal);
    SqlDecimal value = decimal == null ? null : new SqlDecimal(decimal.unscaledValue(), decimal.precision(), decimal.scale());
    for (String expression : expressions) {
        assertExecute(expression, type, value);
    }
}
Also used : Type(io.prestosql.spi.type.Type) DecimalType.createDecimalType(io.prestosql.spi.type.DecimalType.createDecimalType) ArrayType(io.prestosql.spi.type.ArrayType) VarcharType.createVarcharType(io.prestosql.spi.type.VarcharType.createVarcharType) VarcharType.createUnboundedVarcharType(io.prestosql.spi.type.VarcharType.createUnboundedVarcharType) StructuralTestUtil.mapType(io.prestosql.util.StructuralTestUtil.mapType) VarcharType(io.prestosql.spi.type.VarcharType) SqlDecimal(io.prestosql.spi.type.SqlDecimal)

Example 17 with SqlDecimal

use of io.prestosql.spi.type.SqlDecimal in project hetu-core by openlookeng.

the class StructuralTestUtil method appendToBlockBuilder.

public static void appendToBlockBuilder(Type type, Object element, BlockBuilder blockBuilder) {
    Class<?> javaType = type.getJavaType();
    if (element == null) {
        blockBuilder.appendNull();
    } else if (type.getTypeSignature().getBase().equals(StandardTypes.ARRAY) && element instanceof Iterable<?>) {
        BlockBuilder subBlockBuilder = blockBuilder.beginBlockEntry();
        for (Object subElement : (Iterable<?>) element) {
            appendToBlockBuilder(type.getTypeParameters().get(0), subElement, subBlockBuilder);
        }
        blockBuilder.closeEntry();
    } else if (type.getTypeSignature().getBase().equals(StandardTypes.ROW) && element instanceof Iterable<?>) {
        BlockBuilder subBlockBuilder = blockBuilder.beginBlockEntry();
        int field = 0;
        for (Object subElement : (Iterable<?>) element) {
            appendToBlockBuilder(type.getTypeParameters().get(field), subElement, subBlockBuilder);
            field++;
        }
        blockBuilder.closeEntry();
    } else if (type.getTypeSignature().getBase().equals(StandardTypes.MAP) && element instanceof Map<?, ?>) {
        BlockBuilder subBlockBuilder = blockBuilder.beginBlockEntry();
        for (Map.Entry<?, ?> entry : ((Map<?, ?>) element).entrySet()) {
            appendToBlockBuilder(type.getTypeParameters().get(0), entry.getKey(), subBlockBuilder);
            appendToBlockBuilder(type.getTypeParameters().get(1), entry.getValue(), subBlockBuilder);
        }
        blockBuilder.closeEntry();
    } else if (javaType == boolean.class) {
        type.writeBoolean(blockBuilder, (Boolean) element);
    } else if (javaType == long.class) {
        if (element instanceof SqlDecimal) {
            type.writeLong(blockBuilder, ((SqlDecimal) element).getUnscaledValue().longValue());
        } else if (REAL.equals(type)) {
            type.writeLong(blockBuilder, floatToRawIntBits(((Number) element).floatValue()));
        } else {
            type.writeLong(blockBuilder, ((Number) element).longValue());
        }
    } else if (javaType == double.class) {
        type.writeDouble(blockBuilder, ((Number) element).doubleValue());
    } else if (javaType == Slice.class) {
        if (element instanceof String) {
            type.writeSlice(blockBuilder, Slices.utf8Slice(element.toString()));
        } else if (element instanceof byte[]) {
            type.writeSlice(blockBuilder, Slices.wrappedBuffer((byte[]) element));
        } else if (element instanceof SqlDecimal) {
            type.writeSlice(blockBuilder, Decimals.encodeUnscaledValue(((SqlDecimal) element).getUnscaledValue()));
        } else {
            type.writeSlice(blockBuilder, (Slice) element);
        }
    } else {
        type.writeObject(blockBuilder, element);
    }
}
Also used : Slice(io.airlift.slice.Slice) SqlDecimal(io.prestosql.spi.type.SqlDecimal) Map(java.util.Map) BlockBuilder(io.prestosql.spi.block.BlockBuilder)

Example 18 with SqlDecimal

use of io.prestosql.spi.type.SqlDecimal in project hetu-core by openlookeng.

the class OrcTester method preprocessWriteValueHive.

private static Object preprocessWriteValueHive(Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type instanceof CharType) {
        return new HiveChar((String) value, ((CharType) type).getLength());
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP)) {
        return Timestamp.ofEpochMilli(((SqlTimestamp) value).getMillis());
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type.getTypeSignature().getBase().equals(StandardTypes.ARRAY)) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
    }
    if (type.getTypeSignature().getBase().equals(StandardTypes.MAP)) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type.getTypeSignature().getBase().equals(StandardTypes.ROW)) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : OrcUtil(org.apache.hadoop.hive.ql.io.orc.OrcUtil) DateTimeZone(org.joda.time.DateTimeZone) LZ4(io.prestosql.orc.metadata.CompressionKind.LZ4) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) ZSTD(io.prestosql.orc.metadata.CompressionKind.ZSTD) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) BigDecimal(java.math.BigDecimal) Arrays.asList(java.util.Arrays.asList) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) RowFieldName(io.prestosql.spi.type.RowFieldName) BigInteger(java.math.BigInteger) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Assert.assertFalse(org.testng.Assert.assertFalse) IntWritable(org.apache.hadoop.io.IntWritable) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) Set(java.util.Set) VarbinaryType(io.prestosql.spi.type.VarbinaryType) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) TIMESTAMP(io.prestosql.spi.type.TimestampType.TIMESTAMP) ReaderOptions(org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions) Metadata(io.prestosql.metadata.Metadata) StandardCharsets(java.nio.charset.StandardCharsets) NONE(io.prestosql.orc.metadata.CompressionKind.NONE) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) FileAssert.fail(org.testng.FileAssert.fail) CompressionKind(io.prestosql.orc.metadata.CompressionKind) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) StandardTypes(io.prestosql.spi.type.StandardTypes) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) CharType(io.prestosql.spi.type.CharType) DataSize.succinctBytes(io.airlift.units.DataSize.succinctBytes) TypeSignatureParameter(io.prestosql.spi.type.TypeSignatureParameter) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) Decimals.rescale(io.prestosql.spi.type.Decimals.rescale) ArrayList(java.util.ArrayList) Lists(com.google.common.collect.Lists) VARCHAR(io.prestosql.spi.type.VarcharType.VARCHAR) SNAPPY(io.prestosql.orc.metadata.CompressionKind.SNAPPY) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) BOTH(io.prestosql.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH) DOUBLE(io.prestosql.spi.type.DoubleType.DOUBLE) Properties(java.util.Properties) AbstractIterator(com.google.common.collect.AbstractIterator) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) VARBINARY(io.prestosql.spi.type.VarbinaryType.VARBINARY) DateTimeTestingUtils.sqlTimestampOf(io.prestosql.testing.DateTimeTestingUtils.sqlTimestampOf) Chars.truncateToLengthAndTrimSpaces(io.prestosql.spi.type.Chars.truncateToLengthAndTrimSpaces) FloatWritable(org.apache.hadoop.io.FloatWritable) VarcharType(io.prestosql.spi.type.VarcharType) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) OrcFile(org.apache.hadoop.hive.ql.io.orc.OrcFile) DecimalType(io.prestosql.spi.type.DecimalType) LongWritable(org.apache.hadoop.io.LongWritable) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) ORC_12(io.prestosql.orc.OrcTester.Format.ORC_12) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) ORC_11(io.prestosql.orc.OrcTester.Format.ORC_11) OrcConf(org.apache.orc.OrcConf) RecordReader(org.apache.hadoop.hive.ql.io.orc.RecordReader) Path(org.apache.hadoop.fs.Path) BOOLEAN(io.prestosql.spi.type.BooleanType.BOOLEAN) Reader(org.apache.hadoop.hive.ql.io.orc.Reader) Type(io.prestosql.spi.type.Type) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) BlockBuilder(io.prestosql.spi.block.BlockBuilder) MetadataManager.createTestMetadataManager(io.prestosql.metadata.MetadataManager.createTestMetadataManager) TINYINT(io.prestosql.spi.type.TinyintType.TINYINT) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) DataSize(io.airlift.units.DataSize) List(java.util.List) ZLIB(io.prestosql.orc.metadata.CompressionKind.ZLIB) Entry(java.util.Map.Entry) Varchars.truncateToLength(io.prestosql.spi.type.Varchars.truncateToLength) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) IntStream(java.util.stream.IntStream) TypeInfoFactory.getCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo) SqlTimestamp(io.prestosql.spi.type.SqlTimestamp) Assert.assertNull(org.testng.Assert.assertNull) Logger(io.airlift.log.Logger) SESSION(io.prestosql.testing.TestingConnectorSession.SESSION) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) SqlDate(io.prestosql.spi.type.SqlDate) Assert.assertEquals(org.testng.Assert.assertEquals) Decimals(io.prestosql.spi.type.Decimals) HashMap(java.util.HashMap) INTEGER(io.prestosql.spi.type.IntegerType.INTEGER) SqlDecimal(io.prestosql.spi.type.SqlDecimal) DoubleWritable(org.apache.hadoop.io.DoubleWritable) Function(java.util.function.Function) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) HashSet(java.util.HashSet) NamedTypeSignature(io.prestosql.spi.type.NamedTypeSignature) MAX_BATCH_SIZE(io.prestosql.orc.OrcReader.MAX_BATCH_SIZE) ImmutableList(com.google.common.collect.ImmutableList) ByteWritable(org.apache.hadoop.io.ByteWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DATE(io.prestosql.spi.type.DateType.DATE) REAL(io.prestosql.spi.type.RealType.REAL) ObjectInspectorFactory.getStandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector) Block(io.prestosql.spi.block.Block) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) Page(io.prestosql.spi.Page) ObjectInspectorFactory.getStandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) SMALLINT(io.prestosql.spi.type.SmallintType.SMALLINT) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) TestingOrcPredicate.createOrcPredicate(io.prestosql.orc.TestingOrcPredicate.createOrcPredicate) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) VarcharType(io.prestosql.spi.type.VarcharType) HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) VarbinaryType(io.prestosql.spi.type.VarbinaryType) CharType(io.prestosql.spi.type.CharType) VarcharType(io.prestosql.spi.type.VarcharType) DecimalType(io.prestosql.spi.type.DecimalType) Type(io.prestosql.spi.type.Type) DecimalType(io.prestosql.spi.type.DecimalType) Arrays.asList(java.util.Arrays.asList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) CharType(io.prestosql.spi.type.CharType) Map(java.util.Map) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Example 19 with SqlDecimal

use of io.prestosql.spi.type.SqlDecimal in project hetu-core by openlookeng.

the class AbstractTestOrcReader method decimalSequence.

private static List<SqlDecimal> decimalSequence(String start, String step, int items, int precision, int scale) {
    BigInteger decimalStep = new BigInteger(step);
    List<SqlDecimal> values = new ArrayList<>();
    BigInteger nextValue = new BigInteger(start);
    for (int i = 0; i < items; i++) {
        values.add(new SqlDecimal(nextValue, precision, scale));
        nextValue = nextValue.add(decimalStep);
    }
    return values;
}
Also used : ArrayList(java.util.ArrayList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) BigInteger(java.math.BigInteger) SqlDecimal(io.prestosql.spi.type.SqlDecimal)

Example 20 with SqlDecimal

use of io.prestosql.spi.type.SqlDecimal in project hetu-core by openlookeng.

the class AbstractTestOrcReader method testDecimalSequence.

@Test
public void testDecimalSequence() throws Exception {
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_2, decimalSequence("-30", "1", 60, 2, 1));
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_4, decimalSequence("-3000", "1", 60_00, 4, 2));
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_8, decimalSequence("-3000000", "100", 60_000, 8, 4));
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_17, decimalSequence("-30000000000", "1000000", 60_000, 17, 8));
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_18, decimalSequence("-30000000000", "1000000", 60_000, 18, 8));
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_38, decimalSequence("-3000000000000000000", "100000000000000", 60_000, 38, 16));
    Random random = new Random(0);
    List<SqlDecimal> values = new ArrayList<>();
    values.add(new SqlDecimal(new BigInteger(Strings.repeat("9", 18)), DECIMAL_TYPE_PRECISION_18.getPrecision(), DECIMAL_TYPE_PRECISION_18.getScale()));
    values.add(new SqlDecimal(new BigInteger("-" + Strings.repeat("9", 18)), DECIMAL_TYPE_PRECISION_18.getPrecision(), DECIMAL_TYPE_PRECISION_18.getScale()));
    BigInteger nextValue = BigInteger.ONE;
    for (int i = 0; i < 59; i++) {
        values.add(new SqlDecimal(nextValue, DECIMAL_TYPE_PRECISION_18.getPrecision(), DECIMAL_TYPE_PRECISION_18.getScale()));
        values.add(new SqlDecimal(nextValue.negate(), DECIMAL_TYPE_PRECISION_18.getPrecision(), DECIMAL_TYPE_PRECISION_18.getScale()));
        nextValue = nextValue.multiply(BigInteger.valueOf(2));
    }
    for (int i = 0; i < 100_000; ++i) {
        BigInteger value = new BigInteger(59, random);
        if (random.nextBoolean()) {
            value = value.negate();
        }
        values.add(new SqlDecimal(value, DECIMAL_TYPE_PRECISION_18.getPrecision(), DECIMAL_TYPE_PRECISION_18.getScale()));
    }
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_18, values);
    random = new Random(0);
    values = new ArrayList<>();
    values.add(new SqlDecimal(new BigInteger(Strings.repeat("9", 38)), DECIMAL_TYPE_PRECISION_38.getPrecision(), DECIMAL_TYPE_PRECISION_38.getScale()));
    values.add(new SqlDecimal(new BigInteger("-" + Strings.repeat("9", 38)), DECIMAL_TYPE_PRECISION_38.getPrecision(), DECIMAL_TYPE_PRECISION_38.getScale()));
    nextValue = BigInteger.ONE;
    for (int i = 0; i < 127; i++) {
        values.add(new SqlDecimal(nextValue, 38, 16));
        values.add(new SqlDecimal(nextValue.negate(), 38, 16));
        nextValue = nextValue.multiply(BigInteger.valueOf(2));
    }
    for (int i = 0; i < 100_000; ++i) {
        BigInteger value = new BigInteger(126, random);
        if (random.nextBoolean()) {
            value = value.negate();
        }
        values.add(new SqlDecimal(value, DECIMAL_TYPE_PRECISION_38.getPrecision(), DECIMAL_TYPE_PRECISION_38.getScale()));
    }
    tester.testRoundTrip(DECIMAL_TYPE_PRECISION_38, values);
}
Also used : Random(java.util.Random) ArrayList(java.util.ArrayList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) BigInteger(java.math.BigInteger) SqlDecimal(io.prestosql.spi.type.SqlDecimal) Test(org.testng.annotations.Test)

Aggregations

SqlDecimal (io.prestosql.spi.type.SqlDecimal)24 BigInteger (java.math.BigInteger)13 ImmutableList (com.google.common.collect.ImmutableList)12 ArrayList (java.util.ArrayList)11 SqlVarbinary (io.prestosql.spi.type.SqlVarbinary)9 DecimalType (io.prestosql.spi.type.DecimalType)8 Test (org.testng.annotations.Test)8 Slice (io.airlift.slice.Slice)7 BlockBuilder (io.prestosql.spi.block.BlockBuilder)7 SqlDate (io.prestosql.spi.type.SqlDate)7 SqlTimestamp (io.prestosql.spi.type.SqlTimestamp)7 Type (io.prestosql.spi.type.Type)7 Lists.newArrayList (com.google.common.collect.Lists.newArrayList)6 Map (java.util.Map)6 ImmutableMap (com.google.common.collect.ImmutableMap)5 ArrayType (io.prestosql.spi.type.ArrayType)5 VarcharType (io.prestosql.spi.type.VarcharType)5 List (java.util.List)5 Collectors.toList (java.util.stream.Collectors.toList)5 BigDecimal (java.math.BigDecimal)4