Search in sources :

Example 1 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class AbstractTestHive method assertGetRecords.

protected void assertGetRecords(HiveStorageFormat hiveStorageFormat, ConnectorTableMetadata tableMetadata, HiveSplit hiveSplit, ConnectorPageSource pageSource, List<? extends ColumnHandle> columnHandles) throws IOException {
    try {
        MaterializedResult result = materializeSourceDataStream(newSession(), pageSource, getTypes(columnHandles));
        assertPageSourceType(pageSource, hiveStorageFormat);
        ImmutableMap<String, Integer> columnIndex = indexColumns(tableMetadata);
        long rowNumber = 0;
        long completedBytes = 0;
        for (MaterializedRow row : result) {
            try {
                assertValueTypes(row, tableMetadata.getColumns());
            } catch (RuntimeException e) {
                throw new RuntimeException("row " + rowNumber, e);
            }
            rowNumber++;
            Integer index;
            Object value;
            // STRING
            index = columnIndex.get("t_string");
            value = row.getField(index);
            if (rowNumber % 19 == 0) {
                assertNull(value);
            } else if (rowNumber % 19 == 1) {
                assertEquals(value, "");
            } else {
                assertEquals(value, "test");
            }
            // NUMBERS
            assertEquals(row.getField(columnIndex.get("t_tinyint")), (byte) (1 + rowNumber));
            assertEquals(row.getField(columnIndex.get("t_smallint")), (short) (2 + rowNumber));
            assertEquals(row.getField(columnIndex.get("t_int")), (int) (3 + rowNumber));
            index = columnIndex.get("t_bigint");
            if ((rowNumber % 13) == 0) {
                assertNull(row.getField(index));
            } else {
                assertEquals(row.getField(index), 4 + rowNumber);
            }
            assertEquals((Float) row.getField(columnIndex.get("t_float")), 5.1f + rowNumber, 0.001);
            assertEquals(row.getField(columnIndex.get("t_double")), 6.2 + rowNumber);
            // BOOLEAN
            index = columnIndex.get("t_boolean");
            if ((rowNumber % 3) == 2) {
                assertNull(row.getField(index));
            } else {
                assertEquals(row.getField(index), (rowNumber % 3) != 0);
            }
            // TIMESTAMP
            index = columnIndex.get("t_timestamp");
            if (index != null) {
                if ((rowNumber % 17) == 0) {
                    assertNull(row.getField(index));
                } else {
                    SqlTimestamp expected = sqlTimestampOf(3, 2011, 5, 6, 7, 8, 9, 123);
                    assertEquals(row.getField(index), expected);
                }
            }
            // BINARY
            index = columnIndex.get("t_binary");
            if (index != null) {
                if ((rowNumber % 23) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertEquals(row.getField(index), new SqlVarbinary("test binary".getBytes(UTF_8)));
                }
            }
            // DATE
            index = columnIndex.get("t_date");
            if (index != null) {
                if ((rowNumber % 37) == 0) {
                    assertNull(row.getField(index));
                } else {
                    SqlDate expected = new SqlDate(toIntExact(MILLISECONDS.toDays(new DateTime(2013, 8, 9, 0, 0, 0, UTC).getMillis())));
                    assertEquals(row.getField(index), expected);
                }
            }
            // VARCHAR(50)
            index = columnIndex.get("t_varchar");
            if (index != null) {
                value = row.getField(index);
                if (rowNumber % 39 == 0) {
                    assertNull(value);
                } else if (rowNumber % 39 == 1) {
                    // RCBINARY reads empty VARCHAR as null
                    if (hiveStorageFormat == RCBINARY) {
                        assertNull(value);
                    } else {
                        assertEquals(value, "");
                    }
                } else {
                    assertEquals(value, "test varchar");
                }
            }
            // CHAR(25)
            index = columnIndex.get("t_char");
            if (index != null) {
                value = row.getField(index);
                if ((rowNumber % 41) == 0) {
                    assertNull(value);
                } else {
                    assertEquals(value, (rowNumber % 41) == 1 ? "                         " : "test char                ");
                }
            }
            // MAP<STRING, STRING>
            index = columnIndex.get("t_map");
            if (index != null) {
                if ((rowNumber % 27) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertEquals(row.getField(index), ImmutableMap.of("test key", "test value"));
                }
            }
            // ARRAY<STRING>
            index = columnIndex.get("t_array_string");
            if (index != null) {
                if ((rowNumber % 29) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertEquals(row.getField(index), ImmutableList.of("abc", "xyz", "data"));
                }
            }
            // ARRAY<TIMESTAMP>
            index = columnIndex.get("t_array_timestamp");
            if (index != null) {
                if ((rowNumber % 43) == 0) {
                    assertNull(row.getField(index));
                } else {
                    SqlTimestamp expected = sqlTimestampOf(3, LocalDateTime.of(2011, 5, 6, 7, 8, 9, 123_000_000));
                    assertEquals(row.getField(index), ImmutableList.of(expected));
                }
            }
            // ARRAY<STRUCT<s_string: STRING, s_double:DOUBLE>>
            index = columnIndex.get("t_array_struct");
            if (index != null) {
                if ((rowNumber % 31) == 0) {
                    assertNull(row.getField(index));
                } else {
                    List<Object> expected1 = ImmutableList.of("test abc", 0.1);
                    List<Object> expected2 = ImmutableList.of("test xyz", 0.2);
                    assertEquals(row.getField(index), ImmutableList.of(expected1, expected2));
                }
            }
            // STRUCT<s_string: STRING, s_double:DOUBLE>
            index = columnIndex.get("t_struct");
            if (index != null) {
                if ((rowNumber % 31) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertTrue(row.getField(index) instanceof List);
                    List<?> values = (List<?>) row.getField(index);
                    assertEquals(values.size(), 2);
                    assertEquals(values.get(0), "test abc");
                    assertEquals(values.get(1), 0.1);
                }
            }
            // MAP<INT, ARRAY<STRUCT<s_string: STRING, s_double:DOUBLE>>>
            index = columnIndex.get("t_complex");
            if (index != null) {
                if ((rowNumber % 33) == 0) {
                    assertNull(row.getField(index));
                } else {
                    List<Object> expected1 = ImmutableList.of("test abc", 0.1);
                    List<Object> expected2 = ImmutableList.of("test xyz", 0.2);
                    assertEquals(row.getField(index), ImmutableMap.of(1, ImmutableList.of(expected1, expected2)));
                }
            }
            // NEW COLUMN
            assertNull(row.getField(columnIndex.get("new_column")));
            long newCompletedBytes = pageSource.getCompletedBytes();
            assertTrue(newCompletedBytes >= completedBytes);
            // some formats (e.g., parquet) over read the data by a bit
            assertLessThanOrEqual(newCompletedBytes, hiveSplit.getLength() + (100 * 1024));
            completedBytes = newCompletedBytes;
        }
        assertLessThanOrEqual(completedBytes, hiveSplit.getLength() + (100 * 1024));
        assertEquals(rowNumber, 100);
    } finally {
        pageSource.close();
    }
}
Also used : SqlVarbinary(io.trino.spi.type.SqlVarbinary) SqlTimestamp(io.trino.spi.type.SqlTimestamp) LocalDateTime(java.time.LocalDateTime) DateTime(org.joda.time.DateTime) SqlDate(io.trino.spi.type.SqlDate) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) MaterializedResult(io.trino.testing.MaterializedResult) MaterializedRow(io.trino.testing.MaterializedRow)

Example 2 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class OrcTester method writeValue.

private static void writeValue(Type type, BlockBuilder blockBuilder, Object value) {
    if (value == null) {
        blockBuilder.appendNull();
    } else {
        if (BOOLEAN.equals(type)) {
            type.writeBoolean(blockBuilder, (Boolean) value);
        } else if (TINYINT.equals(type) || SMALLINT.equals(type) || INTEGER.equals(type) || BIGINT.equals(type)) {
            type.writeLong(blockBuilder, ((Number) value).longValue());
        } else if (Decimals.isShortDecimal(type)) {
            type.writeLong(blockBuilder, ((SqlDecimal) value).toBigDecimal().unscaledValue().longValue());
        } else if (Decimals.isLongDecimal(type)) {
            type.writeObject(blockBuilder, Int128.valueOf(((SqlDecimal) value).toBigDecimal().unscaledValue()));
        } else if (DOUBLE.equals(type)) {
            type.writeDouble(blockBuilder, ((Number) value).doubleValue());
        } else if (REAL.equals(type)) {
            float floatValue = ((Number) value).floatValue();
            type.writeLong(blockBuilder, Float.floatToIntBits(floatValue));
        } else if (type instanceof VarcharType) {
            Slice slice = truncateToLength(utf8Slice((String) value), type);
            type.writeSlice(blockBuilder, slice);
        } else if (type instanceof CharType) {
            Slice slice = truncateToLengthAndTrimSpaces(utf8Slice((String) value), type);
            type.writeSlice(blockBuilder, slice);
        } else if (VARBINARY.equals(type)) {
            type.writeSlice(blockBuilder, Slices.wrappedBuffer(((SqlVarbinary) value).getBytes()));
        } else if (DATE.equals(type)) {
            long days = ((SqlDate) value).getDays();
            type.writeLong(blockBuilder, days);
        } else if (TIMESTAMP_MILLIS.equals(type)) {
            type.writeLong(blockBuilder, ((SqlTimestamp) value).getEpochMicros());
        } else if (TIMESTAMP_MICROS.equals(type)) {
            long micros = ((SqlTimestamp) value).getEpochMicros();
            type.writeLong(blockBuilder, micros);
        } else if (TIMESTAMP_NANOS.equals(type)) {
            SqlTimestamp ts = (SqlTimestamp) value;
            type.writeObject(blockBuilder, new LongTimestamp(ts.getEpochMicros(), ts.getPicosOfMicros()));
        } else if (TIMESTAMP_TZ_MILLIS.equals(type)) {
            long millis = ((SqlTimestampWithTimeZone) value).getEpochMillis();
            type.writeLong(blockBuilder, packDateTimeWithZone(millis, UTC_KEY));
        } else if (TIMESTAMP_TZ_MICROS.equals(type) || TIMESTAMP_TZ_NANOS.equals(type)) {
            SqlTimestampWithTimeZone ts = (SqlTimestampWithTimeZone) value;
            type.writeObject(blockBuilder, LongTimestampWithTimeZone.fromEpochMillisAndFraction(ts.getEpochMillis(), ts.getPicosOfMilli(), UTC_KEY));
        } else {
            if (type instanceof ArrayType) {
                List<?> array = (List<?>) value;
                Type elementType = type.getTypeParameters().get(0);
                BlockBuilder arrayBlockBuilder = blockBuilder.beginBlockEntry();
                for (Object elementValue : array) {
                    writeValue(elementType, arrayBlockBuilder, elementValue);
                }
                blockBuilder.closeEntry();
            } else if (type instanceof MapType) {
                Map<?, ?> map = (Map<?, ?>) value;
                Type keyType = type.getTypeParameters().get(0);
                Type valueType = type.getTypeParameters().get(1);
                BlockBuilder mapBlockBuilder = blockBuilder.beginBlockEntry();
                for (Entry<?, ?> entry : map.entrySet()) {
                    writeValue(keyType, mapBlockBuilder, entry.getKey());
                    writeValue(valueType, mapBlockBuilder, entry.getValue());
                }
                blockBuilder.closeEntry();
            } else if (type instanceof RowType) {
                List<?> array = (List<?>) value;
                List<Type> fieldTypes = type.getTypeParameters();
                BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry();
                for (int fieldId = 0; fieldId < fieldTypes.size(); fieldId++) {
                    Type fieldType = fieldTypes.get(fieldId);
                    writeValue(fieldType, rowBlockBuilder, array.get(fieldId));
                }
                blockBuilder.closeEntry();
            } else {
                throw new IllegalArgumentException("Unsupported type " + type);
            }
        }
    }
}
Also used : LongTimestamp(io.trino.spi.type.LongTimestamp) VarcharType(io.trino.spi.type.VarcharType) SqlVarbinary(io.trino.spi.type.SqlVarbinary) RowType(io.trino.spi.type.RowType) SqlDecimal(io.trino.spi.type.SqlDecimal) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) OrcType(io.trino.orc.metadata.OrcType) MapType(io.trino.spi.type.MapType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) Entry(java.util.Map.Entry) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) Slice(io.airlift.slice.Slice) Arrays.asList(java.util.Arrays.asList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) CharType(io.trino.spi.type.CharType) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) BlockBuilder(io.trino.spi.block.BlockBuilder)

Example 3 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class RcFileTester method preprocessWriteValueOld.

private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP_MILLIS)) {
        long millis = ((SqlTimestamp) value).getMillis();
        if (format == Format.BINARY) {
            millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
        }
        return Timestamp.ofEpochMilli(millis);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : SnappyCodec(org.apache.hadoop.io.compress.SnappyCodec) PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY(io.trino.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY) DateTimeZone(org.joda.time.DateTimeZone) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) Decimals.rescale(io.trino.spi.type.Decimals.rescale) FileSplit(org.apache.hadoop.mapred.FileSplit) RcFileDecoderUtils.findFirstSyncPosition(io.trino.rcfile.RcFileDecoderUtils.findFirstSyncPosition) RCFileInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat) Files.createTempDirectory(java.nio.file.Files.createTempDirectory) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) BigInteger(java.math.BigInteger) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Assert.assertFalse(org.testng.Assert.assertFalse) LazyBinaryArray(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray) IntWritable(org.apache.hadoop.io.IntWritable) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) SERIALIZATION_LIB(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) BytesRefArrayWritable(org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable) META_TABLE_COLUMN_TYPES(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) LazyArray(org.apache.hadoop.hive.serde2.lazy.LazyArray) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) MICROSECONDS_PER_MILLISECOND(io.trino.type.DateTimes.MICROSECONDS_PER_MILLISECOND) UncheckedIOException(java.io.UncheckedIOException) BooleanWritable(org.apache.hadoop.io.BooleanWritable) RecordReader(org.apache.hadoop.mapred.RecordReader) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Lz4Codec(org.apache.hadoop.io.compress.Lz4Codec) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) StructObject(org.apache.hadoop.hive.serde2.StructObject) Page(io.trino.spi.Page) SqlDecimal(io.trino.spi.type.SqlDecimal) Functions.constant(com.google.common.base.Functions.constant) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) META_TABLE_COLUMNS(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS) ArrayList(java.util.ArrayList) NONE(io.trino.rcfile.RcFileTester.Compression.NONE) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) Lists(com.google.common.collect.Lists) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) BZIP2(io.trino.rcfile.RcFileTester.Compression.BZIP2) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) HadoopNative(io.trino.hadoop.HadoopNative) LinkedHashSet(java.util.LinkedHashSet) Int128(io.trino.spi.type.Int128) Properties(java.util.Properties) MapType(io.trino.spi.type.MapType) AbstractIterator(com.google.common.collect.AbstractIterator) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) NULL(org.apache.hadoop.mapred.Reporter.NULL) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) SqlVarbinary(io.trino.spi.type.SqlVarbinary) SIZE_OF_LONG(io.airlift.slice.SizeOf.SIZE_OF_LONG) Deserializer(org.apache.hadoop.hive.serde2.Deserializer) TINYINT(io.trino.spi.type.TinyintType.TINYINT) BlockBuilder(io.trino.spi.block.BlockBuilder) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FloatWritable(org.apache.hadoop.io.FloatWritable) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) Iterables.transform(com.google.common.collect.Iterables.transform) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) GzipCodec(org.apache.hadoop.io.compress.GzipCodec) LongWritable(org.apache.hadoop.io.LongWritable) SNAPPY(io.trino.rcfile.RcFileTester.Compression.SNAPPY) TextRcFileEncoding(io.trino.rcfile.text.TextRcFileEncoding) SqlTimestamp(io.trino.spi.type.SqlTimestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Block(io.trino.spi.block.Block) PRESTO_RCFILE_WRITER_VERSION(io.trino.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION) InputFormat(org.apache.hadoop.mapred.InputFormat) Path(org.apache.hadoop.fs.Path) KILOBYTE(io.airlift.units.DataSize.Unit.KILOBYTE) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) RowType(io.trino.spi.type.RowType) SIZE_OF_INT(io.airlift.slice.SizeOf.SIZE_OF_INT) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) Collections.nCopies(java.util.Collections.nCopies) RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) SESSION(io.trino.testing.TestingConnectorSession.SESSION) ArrayType(io.trino.spi.type.ArrayType) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) SqlDate(io.trino.spi.type.SqlDate) ColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe) Objects(java.util.Objects) DataSize(io.airlift.units.DataSize) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Decimals(io.trino.spi.type.Decimals) Entry(java.util.Map.Entry) LZ4(io.trino.rcfile.RcFileTester.Compression.LZ4) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) DecimalType(io.trino.spi.type.DecimalType) MAP(io.trino.spi.type.StandardTypes.MAP) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Assert.assertNull(org.testng.Assert.assertNull) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) Type(io.trino.spi.type.Type) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) DoubleWritable(org.apache.hadoop.io.DoubleWritable) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) VarcharType(io.trino.spi.type.VarcharType) OutputStreamSliceOutput(io.airlift.slice.OutputStreamSliceOutput) COMPRESS_CODEC(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC) ImmutableList(com.google.common.collect.ImmutableList) ByteWritable(org.apache.hadoop.io.ByteWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Math.toIntExact(java.lang.Math.toIntExact) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) FileInputStream(java.io.FileInputStream) JobConf(org.apache.hadoop.mapred.JobConf) BZip2Codec(org.apache.hadoop.io.compress.BZip2Codec) Collectors.toList(java.util.stream.Collectors.toList) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Closeable(java.io.Closeable) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) Collections(java.util.Collections) InputStream(java.io.InputStream) ZLIB(io.trino.rcfile.RcFileTester.Compression.ZLIB) VarcharType(io.trino.spi.type.VarcharType) HashMap(java.util.HashMap) SqlVarbinary(io.trino.spi.type.SqlVarbinary) ArrayList(java.util.ArrayList) RowType(io.trino.spi.type.RowType) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) MapType(io.trino.spi.type.MapType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) DecimalType(io.trino.spi.type.DecimalType) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) StructObject(org.apache.hadoop.hive.serde2.StructObject) Map(java.util.Map) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) ImmutableMap(com.google.common.collect.ImmutableMap) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) HashMap(java.util.HashMap)

Example 4 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class RcFileTester method writeValue.

private static void writeValue(Type type, BlockBuilder blockBuilder, Object value) {
    if (value == null) {
        blockBuilder.appendNull();
    } else {
        if (BOOLEAN.equals(type)) {
            type.writeBoolean(blockBuilder, (Boolean) value);
        } else if (TINYINT.equals(type)) {
            type.writeLong(blockBuilder, ((Number) value).longValue());
        } else if (SMALLINT.equals(type)) {
            type.writeLong(blockBuilder, ((Number) value).longValue());
        } else if (INTEGER.equals(type)) {
            type.writeLong(blockBuilder, ((Number) value).longValue());
        } else if (BIGINT.equals(type)) {
            type.writeLong(blockBuilder, ((Number) value).longValue());
        } else if (Decimals.isShortDecimal(type)) {
            type.writeLong(blockBuilder, ((SqlDecimal) value).toBigDecimal().unscaledValue().longValue());
        } else if (Decimals.isLongDecimal(type)) {
            type.writeObject(blockBuilder, Int128.valueOf(((SqlDecimal) value).toBigDecimal().unscaledValue()));
        } else if (REAL.equals(type)) {
            type.writeLong(blockBuilder, Float.floatToIntBits((Float) value));
        } else if (DOUBLE.equals(type)) {
            type.writeDouble(blockBuilder, ((Number) value).doubleValue());
        } else if (VARCHAR.equals(type)) {
            type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
        } else if (VARBINARY.equals(type)) {
            type.writeSlice(blockBuilder, Slices.wrappedBuffer(((SqlVarbinary) value).getBytes()));
        } else if (DATE.equals(type)) {
            long days = ((SqlDate) value).getDays();
            type.writeLong(blockBuilder, days);
        } else if (TIMESTAMP_MILLIS.equals(type)) {
            long millis = ((SqlTimestamp) value).getMillis();
            type.writeLong(blockBuilder, millis * MICROSECONDS_PER_MILLISECOND);
        } else {
            if (type instanceof ArrayType) {
                List<?> array = (List<?>) value;
                Type elementType = type.getTypeParameters().get(0);
                BlockBuilder arrayBlockBuilder = blockBuilder.beginBlockEntry();
                for (Object elementValue : array) {
                    writeValue(elementType, arrayBlockBuilder, elementValue);
                }
                blockBuilder.closeEntry();
            } else if (type instanceof MapType) {
                Map<?, ?> map = (Map<?, ?>) value;
                Type keyType = type.getTypeParameters().get(0);
                Type valueType = type.getTypeParameters().get(1);
                BlockBuilder mapBlockBuilder = blockBuilder.beginBlockEntry();
                for (Entry<?, ?> entry : map.entrySet()) {
                    writeValue(keyType, mapBlockBuilder, entry.getKey());
                    writeValue(valueType, mapBlockBuilder, entry.getValue());
                }
                blockBuilder.closeEntry();
            } else if (type instanceof RowType) {
                List<?> array = (List<?>) value;
                List<Type> fieldTypes = type.getTypeParameters();
                BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry();
                for (int fieldId = 0; fieldId < fieldTypes.size(); fieldId++) {
                    Type fieldType = fieldTypes.get(fieldId);
                    writeValue(fieldType, rowBlockBuilder, array.get(fieldId));
                }
                blockBuilder.closeEntry();
            } else {
                throw new IllegalArgumentException("Unsupported type " + type);
            }
        }
    }
}
Also used : SqlVarbinary(io.trino.spi.type.SqlVarbinary) RowType(io.trino.spi.type.RowType) SqlDecimal(io.trino.spi.type.SqlDecimal) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) MapType(io.trino.spi.type.MapType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) Entry(java.util.Map.Entry) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) StructObject(org.apache.hadoop.hive.serde2.StructObject) Map(java.util.Map) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) ImmutableMap(com.google.common.collect.ImmutableMap) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) HashMap(java.util.HashMap) BlockBuilder(io.trino.spi.block.BlockBuilder)

Example 5 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class AbstractRowEncoder method appendColumnValue.

@Override
public void appendColumnValue(Block block, int position) {
    checkArgument(currentColumnIndex < columnHandles.size(), format("currentColumnIndex '%d' is greater than number of columns '%d'", currentColumnIndex, columnHandles.size()));
    Type type = columnHandles.get(currentColumnIndex).getType();
    if (block.isNull(position)) {
        appendNullValue();
    } else if (type == BOOLEAN) {
        appendBoolean(type.getBoolean(block, position));
    } else if (type == BIGINT) {
        appendLong(type.getLong(block, position));
    } else if (type == INTEGER) {
        appendInt(toIntExact(type.getLong(block, position)));
    } else if (type == SMALLINT) {
        appendShort(Shorts.checkedCast(type.getLong(block, position)));
    } else if (type == TINYINT) {
        appendByte(SignedBytes.checkedCast(type.getLong(block, position)));
    } else if (type == DOUBLE) {
        appendDouble(type.getDouble(block, position));
    } else if (type == REAL) {
        appendFloat(intBitsToFloat(toIntExact(type.getLong(block, position))));
    } else if (type instanceof VarcharType) {
        appendString(type.getSlice(block, position).toStringUtf8());
    } else if (type instanceof VarbinaryType) {
        appendByteBuffer(type.getSlice(block, position).toByteBuffer());
    } else if (type == DATE) {
        appendSqlDate((SqlDate) type.getObjectValue(session, block, position));
    } else if (type instanceof TimeType) {
        appendSqlTime((SqlTime) type.getObjectValue(session, block, position));
    } else if (type instanceof TimeWithTimeZoneType) {
        appendSqlTimeWithTimeZone((SqlTimeWithTimeZone) type.getObjectValue(session, block, position));
    } else if (type instanceof TimestampType) {
        appendSqlTimestamp((SqlTimestamp) type.getObjectValue(session, block, position));
    } else if (type instanceof TimestampWithTimeZoneType) {
        appendSqlTimestampWithTimeZone((SqlTimestampWithTimeZone) type.getObjectValue(session, block, position));
    } else if (type instanceof ArrayType) {
        appendArray((List<Object>) type.getObjectValue(session, block, position));
    } else if (type instanceof MapType) {
        appendMap((Map<Object, Object>) type.getObjectValue(session, block, position));
    } else if (type instanceof RowType) {
        appendRow((List<Object>) type.getObjectValue(session, block, position));
    } else {
        throw new UnsupportedOperationException(format("Unsupported type '%s' for column '%s'", type, columnHandles.get(currentColumnIndex).getName()));
    }
    currentColumnIndex++;
}
Also used : VarcharType(io.trino.spi.type.VarcharType) SqlTime(io.trino.spi.type.SqlTime) TimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType) RowType(io.trino.spi.type.RowType) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) TimeType(io.trino.spi.type.TimeType) ArrayType(io.trino.spi.type.ArrayType) TimeType(io.trino.spi.type.TimeType) Type(io.trino.spi.type.Type) TimestampType(io.trino.spi.type.TimestampType) VarcharType(io.trino.spi.type.VarcharType) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) RowType(io.trino.spi.type.RowType) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) VarbinaryType(io.trino.spi.type.VarbinaryType) TimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType) VarbinaryType(io.trino.spi.type.VarbinaryType) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) TimestampType(io.trino.spi.type.TimestampType) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List)

Aggregations

SqlTimestamp (io.trino.spi.type.SqlTimestamp)8 ImmutableList (com.google.common.collect.ImmutableList)7 ArrayType (io.trino.spi.type.ArrayType)6 RowType (io.trino.spi.type.RowType)6 SqlVarbinary (io.trino.spi.type.SqlVarbinary)6 Type (io.trino.spi.type.Type)6 VarcharType (io.trino.spi.type.VarcharType)6 BlockBuilder (io.trino.spi.block.BlockBuilder)5 DecimalType (io.trino.spi.type.DecimalType)5 SqlDecimal (io.trino.spi.type.SqlDecimal)5 List (java.util.List)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 Slice (io.airlift.slice.Slice)4 MapType (io.trino.spi.type.MapType)4 SqlDate (io.trino.spi.type.SqlDate)4 Collectors.toList (java.util.stream.Collectors.toList)4 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)3 Lists.newArrayList (com.google.common.collect.Lists.newArrayList)3 ArrayList (java.util.ArrayList)3 HashMap (java.util.HashMap)3