Search in sources :

Example 11 with DecimalType

use of io.prestosql.spi.type.DecimalType in project hetu-core by openlookeng.

the class RcFileTester method preprocessWriteValueOld.

private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP)) {
        long millis = ((SqlTimestamp) value).getMillis();
        if (format == Format.BINARY) {
            millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
        }
        return Timestamp.ofEpochMilli(millis);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type.getTypeSignature().getBase().equals(ARRAY)) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
    }
    if (type.getTypeSignature().getBase().equals(MAP)) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type.getTypeSignature().getBase().equals(ROW)) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : SnappyCodec(org.apache.hadoop.io.compress.SnappyCodec) DateTimeZone(org.joda.time.DateTimeZone) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) FileSplit(org.apache.hadoop.mapred.FileSplit) RCFileInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat) Files.createTempDirectory(java.nio.file.Files.createTempDirectory) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) RowType(io.prestosql.spi.type.RowType) NONE(io.prestosql.rcfile.RcFileTester.Compression.NONE) BigInteger(java.math.BigInteger) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Assert.assertFalse(org.testng.Assert.assertFalse) LazyBinaryArray(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray) IntWritable(org.apache.hadoop.io.IntWritable) SERIALIZATION_LIB(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) BytesRefArrayWritable(org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable) META_TABLE_COLUMN_TYPES(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) LazyArray(org.apache.hadoop.hive.serde2.lazy.LazyArray) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) TIMESTAMP(io.prestosql.spi.type.TimestampType.TIMESTAMP) Metadata(io.prestosql.metadata.Metadata) UncheckedIOException(java.io.UncheckedIOException) LzoCodec(com.hadoop.compression.lzo.LzoCodec) BooleanWritable(org.apache.hadoop.io.BooleanWritable) RecordReader(org.apache.hadoop.mapred.RecordReader) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Lz4Codec(org.apache.hadoop.io.compress.Lz4Codec) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) StructObject(org.apache.hadoop.hive.serde2.StructObject) Functions.constant(com.google.common.base.Functions.constant) TypeSignatureParameter(io.prestosql.spi.type.TypeSignatureParameter) META_TABLE_COLUMNS(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS) Decimals.rescale(io.prestosql.spi.type.Decimals.rescale) ArrayList(java.util.ArrayList) MapType(io.prestosql.spi.type.MapType) Lists(com.google.common.collect.Lists) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) VARCHAR(io.prestosql.spi.type.VarcharType.VARCHAR) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) DOUBLE(io.prestosql.spi.type.DoubleType.DOUBLE) ROW(io.prestosql.spi.type.StandardTypes.ROW) LinkedHashSet(java.util.LinkedHashSet) HadoopNative(io.prestosql.hadoop.HadoopNative) Properties(java.util.Properties) RcFileDecoderUtils.findFirstSyncPosition(io.prestosql.rcfile.RcFileDecoderUtils.findFirstSyncPosition) AbstractIterator(com.google.common.collect.AbstractIterator) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) NULL(org.apache.hadoop.mapred.Reporter.NULL) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) VARBINARY(io.prestosql.spi.type.VarbinaryType.VARBINARY) DateTimeTestingUtils.sqlTimestampOf(io.prestosql.testing.DateTimeTestingUtils.sqlTimestampOf) SIZE_OF_LONG(io.airlift.slice.SizeOf.SIZE_OF_LONG) Deserializer(org.apache.hadoop.hive.serde2.Deserializer) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FloatWritable(org.apache.hadoop.io.FloatWritable) VarcharType(io.prestosql.spi.type.VarcharType) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) Iterables.transform(com.google.common.collect.Iterables.transform) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) DecimalType(io.prestosql.spi.type.DecimalType) GzipCodec(org.apache.hadoop.io.compress.GzipCodec) LongWritable(org.apache.hadoop.io.LongWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) InputFormat(org.apache.hadoop.mapred.InputFormat) Path(org.apache.hadoop.fs.Path) BOOLEAN(io.prestosql.spi.type.BooleanType.BOOLEAN) KILOBYTE(io.airlift.units.DataSize.Unit.KILOBYTE) Type(io.prestosql.spi.type.Type) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) SIZE_OF_INT(io.airlift.slice.SizeOf.SIZE_OF_INT) ARRAY(io.prestosql.spi.type.StandardTypes.ARRAY) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) BlockBuilder(io.prestosql.spi.block.BlockBuilder) MetadataManager.createTestMetadataManager(io.prestosql.metadata.MetadataManager.createTestMetadataManager) Collections.nCopies(java.util.Collections.nCopies) ArrayType(io.prestosql.spi.type.ArrayType) RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) TINYINT(io.prestosql.spi.type.TinyintType.TINYINT) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe) Objects(java.util.Objects) DataSize(io.airlift.units.DataSize) List(java.util.List) PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY(io.prestosql.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY) ZLIB(io.prestosql.rcfile.RcFileTester.Compression.ZLIB) Entry(java.util.Map.Entry) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) SqlTimestamp(io.prestosql.spi.type.SqlTimestamp) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Assert.assertNull(org.testng.Assert.assertNull) SESSION(io.prestosql.testing.TestingConnectorSession.SESSION) TextRcFileEncoding(io.prestosql.rcfile.text.TextRcFileEncoding) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) SqlDate(io.prestosql.spi.type.SqlDate) Assert.assertEquals(org.testng.Assert.assertEquals) Decimals(io.prestosql.spi.type.Decimals) HashMap(java.util.HashMap) INTEGER(io.prestosql.spi.type.IntegerType.INTEGER) SqlDecimal(io.prestosql.spi.type.SqlDecimal) DoubleWritable(org.apache.hadoop.io.DoubleWritable) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) OutputStreamSliceOutput(io.airlift.slice.OutputStreamSliceOutput) COMPRESS_CODEC(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC) PRESTO_RCFILE_WRITER_VERSION(io.prestosql.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION) ImmutableList(com.google.common.collect.ImmutableList) ByteWritable(org.apache.hadoop.io.ByteWritable) MAP(io.prestosql.spi.type.StandardTypes.MAP) BytesWritable(org.apache.hadoop.io.BytesWritable) DATE(io.prestosql.spi.type.DateType.DATE) REAL(io.prestosql.spi.type.RealType.REAL) Math.toIntExact(java.lang.Math.toIntExact) Block(io.prestosql.spi.block.Block) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) Page(io.prestosql.spi.Page) FileInputStream(java.io.FileInputStream) BinaryRcFileEncoding(io.prestosql.rcfile.binary.BinaryRcFileEncoding) BZIP2(io.prestosql.rcfile.RcFileTester.Compression.BZIP2) LZ4(io.prestosql.rcfile.RcFileTester.Compression.LZ4) JobConf(org.apache.hadoop.mapred.JobConf) BZip2Codec(org.apache.hadoop.io.compress.BZip2Codec) Collectors.toList(java.util.stream.Collectors.toList) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) SMALLINT(io.prestosql.spi.type.SmallintType.SMALLINT) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Closeable(java.io.Closeable) Assert.assertTrue(org.testng.Assert.assertTrue) SNAPPY(io.prestosql.rcfile.RcFileTester.Compression.SNAPPY) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) Collections(java.util.Collections) BYTE(io.airlift.units.DataSize.Unit.BYTE) InputStream(java.io.InputStream) VarcharType(io.prestosql.spi.type.VarcharType) HashMap(java.util.HashMap) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) ArrayList(java.util.ArrayList) SqlTimestamp(io.prestosql.spi.type.SqlTimestamp) RowType(io.prestosql.spi.type.RowType) MapType(io.prestosql.spi.type.MapType) VarcharType(io.prestosql.spi.type.VarcharType) DecimalType(io.prestosql.spi.type.DecimalType) Type(io.prestosql.spi.type.Type) ArrayType(io.prestosql.spi.type.ArrayType) DecimalType(io.prestosql.spi.type.DecimalType) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) StructObject(org.apache.hadoop.hive.serde2.StructObject) Map(java.util.Map) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) ImmutableMap(com.google.common.collect.ImmutableMap) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) HashMap(java.util.HashMap)

Example 12 with DecimalType

use of io.prestosql.spi.type.DecimalType in project hetu-core by openlookeng.

the class RcFileTester method decodeRecordReaderValue.

private static Object decodeRecordReaderValue(Format format, Type type, Object inputActualValue) {
    Object actualValue = inputActualValue;
    if (actualValue instanceof LazyPrimitive) {
        actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject();
    }
    if (actualValue instanceof BooleanWritable) {
        actualValue = ((BooleanWritable) actualValue).get();
    } else if (actualValue instanceof ByteWritable) {
        actualValue = ((ByteWritable) actualValue).get();
    } else if (actualValue instanceof BytesWritable) {
        actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
    } else if (actualValue instanceof DateWritableV2) {
        actualValue = new SqlDate(((DateWritableV2) actualValue).getDays());
    } else if (actualValue instanceof DoubleWritable) {
        actualValue = ((DoubleWritable) actualValue).get();
    } else if (actualValue instanceof FloatWritable) {
        actualValue = ((FloatWritable) actualValue).get();
    } else if (actualValue instanceof IntWritable) {
        actualValue = ((IntWritable) actualValue).get();
    } else if (actualValue instanceof LongWritable) {
        actualValue = ((LongWritable) actualValue).get();
    } else if (actualValue instanceof ShortWritable) {
        actualValue = ((ShortWritable) actualValue).get();
    } else if (actualValue instanceof HiveDecimalWritable) {
        DecimalType decimalType = (DecimalType) type;
        HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
        // writable messes with the scale so rescale the values to the Presto type
        BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
        actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
    } else if (actualValue instanceof Text) {
        actualValue = actualValue.toString();
    } else if (actualValue instanceof TimestampWritableV2) {
        long millis = ((TimestampWritableV2) actualValue).getTimestamp().toEpochMilli();
        if (format == Format.BINARY) {
            millis = HIVE_STORAGE_TIME_ZONE.convertUTCToLocal(millis);
        }
        actualValue = sqlTimestampOf(millis);
    } else if (actualValue instanceof StructObject) {
        StructObject structObject = (StructObject) actualValue;
        actualValue = decodeRecordReaderStruct(format, type, structObject.getFieldsAsList());
    } else if (actualValue instanceof LazyBinaryArray) {
        actualValue = decodeRecordReaderList(format, type, ((LazyBinaryArray) actualValue).getList());
    } else if (actualValue instanceof LazyBinaryMap) {
        actualValue = decodeRecordReaderMap(format, type, ((LazyBinaryMap) actualValue).getMap());
    } else if (actualValue instanceof LazyArray) {
        actualValue = decodeRecordReaderList(format, type, ((LazyArray) actualValue).getList());
    } else if (actualValue instanceof LazyMap) {
        actualValue = decodeRecordReaderMap(format, type, ((LazyMap) actualValue).getMap());
    } else if (actualValue instanceof List) {
        actualValue = decodeRecordReaderList(format, type, ((List<?>) actualValue));
    }
    return actualValue;
}
Also used : SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LazyBinaryArray(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) StructObject(org.apache.hadoop.hive.serde2.StructObject) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) BytesWritable(org.apache.hadoop.io.BytesWritable) SqlDecimal(io.prestosql.spi.type.SqlDecimal) Text(org.apache.hadoop.io.Text) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) SqlDate(io.prestosql.spi.type.SqlDate) DecimalType(io.prestosql.spi.type.DecimalType) BigInteger(java.math.BigInteger) StructObject(org.apache.hadoop.hive.serde2.StructObject) LazyArray(org.apache.hadoop.hive.serde2.lazy.LazyArray) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap)

Example 13 with DecimalType

use of io.prestosql.spi.type.DecimalType in project hetu-core by openlookeng.

the class StatisticsUtils method setMinMax.

private static void setMinMax(ConnectorSession session, Type type, Block min, Block max, ColumnStatisticsData.Builder builder) {
    if (type.equals(BIGINT) || type.equals(INTEGER) || type.equals(SMALLINT) || type.equals(TINYINT)) {
        OptionalLong minVal = getIntegerValue(session, type, min);
        OptionalLong maxVal = getIntegerValue(session, type, max);
        if (minVal.isPresent() && maxVal.isPresent()) {
            builder.setMin(minVal.getAsLong());
            builder.setMax(maxVal.getAsLong());
        }
    } else if (type.equals(DOUBLE) || type.equals(REAL)) {
        OptionalDouble minVal = getDoubleValue(session, type, min);
        OptionalDouble maxVal = getDoubleValue(session, type, max);
        if (minVal.isPresent() && maxVal.isPresent()) {
            builder.setMin(minVal.getAsDouble());
            builder.setMax(maxVal.getAsDouble());
        }
    } else if (type.equals(DATE)) {
        Optional<LocalDate> minVal = getDateValue(session, type, min);
        Optional<LocalDate> maxVal = getDateValue(session, type, max);
        if (minVal.isPresent() && maxVal.isPresent()) {
            builder.setMin(minVal.get().toEpochDay());
            builder.setMax(maxVal.get().toEpochDay());
        }
    } else if (type.equals(TIMESTAMP)) {
        OptionalLong minVal = getTimestampValue(min);
        OptionalLong maxVal = getTimestampValue(max);
        if (minVal.isPresent() && maxVal.isPresent()) {
            builder.setMin(minVal.getAsLong());
            builder.setMax(maxVal.getAsLong());
        }
    } else if (type instanceof DecimalType) {
        Optional<BigDecimal> minVal = getDecimalValue(session, type, min);
        Optional<BigDecimal> maxVal = getDecimalValue(session, type, max);
        if (minVal.isPresent() && maxVal.isPresent()) {
            builder.setMin(minVal.get().doubleValue());
            builder.setMax(maxVal.get().doubleValue());
        }
    } else {
        throw new IllegalArgumentException("Unexpected type: " + type);
    }
}
Also used : OptionalLong(java.util.OptionalLong) DecimalType(io.prestosql.spi.type.DecimalType) LocalDate(java.time.LocalDate) OptionalDouble(java.util.OptionalDouble) BigDecimal(java.math.BigDecimal)

Example 14 with DecimalType

use of io.prestosql.spi.type.DecimalType in project hetu-core by openlookeng.

the class H2QueryRunner method rowMapper.

private static RowMapper<MaterializedRow> rowMapper(List<? extends Type> types) {
    return new RowMapper<MaterializedRow>() {

        @Override
        public MaterializedRow map(ResultSet resultSet, StatementContext context) throws SQLException {
            int count = resultSet.getMetaData().getColumnCount();
            checkArgument(types.size() == count, "expected types count (%s) does not match actual column count (%s)", types.size(), count);
            List<Object> row = new ArrayList<>(count);
            for (int i = 1; i <= count; i++) {
                Type type = types.get(i - 1);
                if (BOOLEAN.equals(type)) {
                    boolean booleanValue = resultSet.getBoolean(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(booleanValue);
                    }
                } else if (TINYINT.equals(type)) {
                    byte byteValue = resultSet.getByte(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(byteValue);
                    }
                } else if (SMALLINT.equals(type)) {
                    short shortValue = resultSet.getShort(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(shortValue);
                    }
                } else if (INTEGER.equals(type)) {
                    int intValue = resultSet.getInt(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(intValue);
                    }
                } else if (BIGINT.equals(type)) {
                    long longValue = resultSet.getLong(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(longValue);
                    }
                } else if (REAL.equals(type)) {
                    float floatValue = resultSet.getFloat(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(floatValue);
                    }
                } else if (DOUBLE.equals(type)) {
                    double doubleValue = resultSet.getDouble(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(doubleValue);
                    }
                } else if (JSON.equals(type)) {
                    String stringValue = resultSet.getString(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(jsonParse(utf8Slice(stringValue)).toStringUtf8());
                    }
                } else if (isVarcharType(type)) {
                    String stringValue = resultSet.getString(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(stringValue);
                    }
                } else if (isCharType(type)) {
                    String stringValue = resultSet.getString(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(padEnd(stringValue, ((CharType) type).getLength(), ' '));
                    }
                } else if (DATE.equals(type)) {
                    // resultSet.getDate(i) doesn't work if JVM's zone skipped day being retrieved (e.g. 2011-12-30 and Pacific/Apia zone)
                    LocalDate dateValue = resultSet.getObject(i, LocalDate.class);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(dateValue);
                    }
                } else if (TIME.equals(type)) {
                    // resultSet.getTime(i) doesn't work if JVM's zone had forward offset change during 1970-01-01 (e.g. America/Hermosillo zone)
                    LocalTime timeValue = resultSet.getObject(i, LocalTime.class);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(timeValue);
                    }
                } else if (TIME_WITH_TIME_ZONE.equals(type)) {
                    throw new UnsupportedOperationException("H2 does not support TIME WITH TIME ZONE");
                } else if (TIMESTAMP.equals(type)) {
                    // resultSet.getTimestamp(i) doesn't work if JVM's zone had forward offset at the date/time being retrieved
                    LocalDateTime timestampValue;
                    try {
                        timestampValue = resultSet.getObject(i, LocalDateTime.class);
                    } catch (SQLException first) {
                        // H2 cannot convert DATE to LocalDateTime in their JDBC driver (even though it can convert to java.sql.Timestamp), we need to do this manually
                        try {
                            timestampValue = Optional.ofNullable(resultSet.getObject(i, LocalDate.class)).map(LocalDate::atStartOfDay).orElse(null);
                        } catch (RuntimeException e) {
                            first.addSuppressed(e);
                            throw first;
                        }
                    }
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(timestampValue);
                    }
                } else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
                    // This means H2 is unsuitable for testing TIMESTAMP WITH TIME ZONE-bearing queries. Those need to be tested manually.
                    throw new UnsupportedOperationException();
                } else if (UNKNOWN.equals(type)) {
                    Object objectValue = resultSet.getObject(i);
                    checkState(resultSet.wasNull(), "Expected a null value, but got %s", objectValue);
                    row.add(null);
                } else if (type instanceof DecimalType) {
                    DecimalType decimalType = (DecimalType) type;
                    BigDecimal decimalValue = resultSet.getBigDecimal(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(decimalValue.setScale(decimalType.getScale(), BigDecimal.ROUND_HALF_UP).round(new MathContext(decimalType.getPrecision())));
                    }
                } else if (type instanceof ArrayType) {
                    Array array = resultSet.getArray(i);
                    if (resultSet.wasNull()) {
                        row.add(null);
                    } else {
                        row.add(newArrayList((Object[]) array.getArray()));
                    }
                } else {
                    throw new AssertionError("unhandled type: " + type);
                }
            }
            return new MaterializedRow(MaterializedResult.DEFAULT_PRECISION, row);
        }
    };
}
Also used : LocalDateTime(java.time.LocalDateTime) SQLException(java.sql.SQLException) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) LocalDate(java.time.LocalDate) StatementContext(org.jdbi.v3.core.statement.StatementContext) ArrayType(io.prestosql.spi.type.ArrayType) ResultSet(java.sql.ResultSet) RowMapper(org.jdbi.v3.core.mapper.RowMapper) LocalTime(java.time.LocalTime) BigDecimal(java.math.BigDecimal) MathContext(java.math.MathContext) Array(java.sql.Array) Varchars.isVarcharType(io.prestosql.spi.type.Varchars.isVarcharType) DecimalType(io.prestosql.spi.type.DecimalType) Type(io.prestosql.spi.type.Type) Chars.isCharType(io.prestosql.spi.type.Chars.isCharType) ArrayType(io.prestosql.spi.type.ArrayType) CharType(io.prestosql.spi.type.CharType) VarcharType(io.prestosql.spi.type.VarcharType) DecimalType(io.prestosql.spi.type.DecimalType) MaterializedRow(io.prestosql.testing.MaterializedRow)

Example 15 with DecimalType

use of io.prestosql.spi.type.DecimalType in project hetu-core by openlookeng.

the class DecimalSliceStreamReader method getSlice.

private Slice getSlice(Object value, Type type) {
    if (type instanceof DecimalType) {
        DecimalType actual = (DecimalType) type;
        BigDecimal bigDecimalValue = (BigDecimal) value;
        if (isShortDecimal(type)) {
            return utf8Slice(value.toString());
        } else {
            if (bigDecimalValue.scale() > actual.getScale()) {
                BigInteger unscaledDecimal = rescale(bigDecimalValue.unscaledValue(), bigDecimalValue.scale(), bigDecimalValue.scale());
                Slice decimalSlice = Decimals.encodeUnscaledValue(unscaledDecimal);
                return utf8Slice(Decimals.toString(decimalSlice, actual.getScale()));
            } else {
                BigInteger unscaledDecimal = rescale(bigDecimalValue.unscaledValue(), bigDecimalValue.scale(), actual.getScale());
                Slice decimalSlice = Decimals.encodeUnscaledValue(unscaledDecimal);
                return utf8Slice(Decimals.toString(decimalSlice, actual.getScale()));
            }
        }
    } else {
        return utf8Slice(value.toString());
    }
}
Also used : Slice(io.airlift.slice.Slice) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) DecimalType(io.prestosql.spi.type.DecimalType) BigInteger(java.math.BigInteger) BigDecimal(java.math.BigDecimal)

Aggregations

DecimalType (io.prestosql.spi.type.DecimalType)90 VarcharType (io.prestosql.spi.type.VarcharType)58 CharType (io.prestosql.spi.type.CharType)45 Type (io.prestosql.spi.type.Type)43 PrestoException (io.prestosql.spi.PrestoException)30 Slice (io.airlift.slice.Slice)22 BigDecimal (java.math.BigDecimal)21 BigInteger (java.math.BigInteger)18 Block (io.prestosql.spi.block.Block)15 ArrayType (io.prestosql.spi.type.ArrayType)15 TimestampType (io.prestosql.spi.type.TimestampType)15 VarbinaryType (io.prestosql.spi.type.VarbinaryType)15 ArrayList (java.util.ArrayList)14 DateType (io.prestosql.spi.type.DateType)13 DoubleType (io.prestosql.spi.type.DoubleType)13 RealType (io.prestosql.spi.type.RealType)13 RowType (io.prestosql.spi.type.RowType)12 Slices.utf8Slice (io.airlift.slice.Slices.utf8Slice)11 BigintType (io.prestosql.spi.type.BigintType)11 BooleanType (io.prestosql.spi.type.BooleanType)11