Search in sources :

Example 6 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class OrcTester method preprocessWriteValueHive.

private static Object preprocessWriteValueHive(Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type instanceof CharType) {
        return new HiveChar((String) value, ((CharType) type).getLength());
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
        LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
        return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
    }
    if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
        SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
        int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
        return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : LocalDateTime(java.time.LocalDateTime) OrcUtil(org.apache.hadoop.hive.ql.io.orc.OrcUtil) DateTimeZone(org.joda.time.DateTimeZone) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) TIMESTAMP_TZ_NANOS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) NANOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) LongTimestampWithTimeZone(io.trino.spi.type.LongTimestampWithTimeZone) NONE(io.trino.orc.metadata.CompressionKind.NONE) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) Decimals.rescale(io.trino.spi.type.Decimals.rescale) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Arrays.asList(java.util.Arrays.asList) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) BigInteger(java.math.BigInteger) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ZoneOffset(java.time.ZoneOffset) Assert.assertFalse(org.testng.Assert.assertFalse) IntWritable(org.apache.hadoop.io.IntWritable) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PICOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) UTC_KEY(io.trino.spi.type.TimeZoneKey.UTC_KEY) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) DateTimeEncoding.packDateTimeWithZone(io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) ReaderOptions(org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) LocalDateTime(java.time.LocalDateTime) Page(io.trino.spi.Page) SqlDecimal(io.trino.spi.type.SqlDecimal) DataSize.succinctBytes(io.airlift.units.DataSize.succinctBytes) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector) ArrayList(java.util.ArrayList) TIMESTAMP_TZ_MILLIS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS) Lists(com.google.common.collect.Lists) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) BOTH(io.trino.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH) OrcType(io.trino.orc.metadata.OrcType) Int128(io.trino.spi.type.Int128) TIMESTAMP_TZ_MICROS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS) Properties(java.util.Properties) MapType(io.trino.spi.type.MapType) AbstractIterator(com.google.common.collect.AbstractIterator) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) StandardTypes(io.trino.spi.type.StandardTypes) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) CompressionKind(io.trino.orc.metadata.CompressionKind) File(java.io.File) ZLIB(io.trino.orc.metadata.CompressionKind.ZLIB) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) MAX_BATCH_SIZE(io.trino.orc.OrcReader.MAX_BATCH_SIZE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) TIMESTAMP_MICROS(io.trino.spi.type.TimestampType.TIMESTAMP_MICROS) SqlVarbinary(io.trino.spi.type.SqlVarbinary) VarbinaryType(io.trino.spi.type.VarbinaryType) Varchars.truncateToLength(io.trino.spi.type.Varchars.truncateToLength) CharType(io.trino.spi.type.CharType) TINYINT(io.trino.spi.type.TinyintType.TINYINT) BlockBuilder(io.trino.spi.block.BlockBuilder) FloatWritable(org.apache.hadoop.io.FloatWritable) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) OrcFile(org.apache.hadoop.hive.ql.io.orc.OrcFile) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) TestingOrcPredicate.createOrcPredicate(io.trino.orc.TestingOrcPredicate.createOrcPredicate) LongWritable(org.apache.hadoop.io.LongWritable) Timestamps.roundDiv(io.trino.spi.type.Timestamps.roundDiv) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) SqlTimestamp(io.trino.spi.type.SqlTimestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Block(io.trino.spi.block.Block) OrcConf(org.apache.orc.OrcConf) RecordReader(org.apache.hadoop.hive.ql.io.orc.RecordReader) Path(org.apache.hadoop.fs.Path) Reader(org.apache.hadoop.hive.ql.io.orc.Reader) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) RowType(io.trino.spi.type.RowType) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) SESSION(io.trino.testing.TestingConnectorSession.SESSION) MICROSECONDS_PER_SECOND(io.trino.spi.type.Timestamps.MICROSECONDS_PER_SECOND) ArrayType(io.trino.spi.type.ArrayType) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) SqlDate(io.trino.spi.type.SqlDate) Objects(java.util.Objects) TIMESTAMP_NANOS(io.trino.spi.type.TimestampType.TIMESTAMP_NANOS) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DataSize(io.airlift.units.DataSize) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Decimals(io.trino.spi.type.Decimals) Entry(java.util.Map.Entry) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) DecimalType(io.trino.spi.type.DecimalType) IntStream(java.util.stream.IntStream) ORC_11(io.trino.orc.OrcTester.Format.ORC_11) TypeInfoFactory.getCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo) Assert.assertNull(org.testng.Assert.assertNull) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) PICOSECONDS_PER_NANOSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND) Type(io.trino.spi.type.Type) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ZSTD(io.trino.orc.metadata.CompressionKind.ZSTD) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Chars.truncateToLengthAndTrimSpaces(io.trino.spi.type.Chars.truncateToLengthAndTrimSpaces) ByteWritable(org.apache.hadoop.io.ByteWritable) RowFieldName(io.trino.spi.type.RowFieldName) BytesWritable(org.apache.hadoop.io.BytesWritable) SNAPPY(io.trino.orc.metadata.CompressionKind.SNAPPY) ObjectInspectorFactory.getStandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector) ORC_12(io.trino.orc.OrcTester.Format.ORC_12) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) LongTimestamp(io.trino.spi.type.LongTimestamp) NANOSECONDS_PER_MILLISECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND) ObjectInspectorFactory.getStandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) Serializer(org.apache.hadoop.hive.serde2.Serializer) LZ4(io.trino.orc.metadata.CompressionKind.LZ4) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) VarcharType(io.trino.spi.type.VarcharType) HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SqlVarbinary(io.trino.spi.type.SqlVarbinary) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) RowType(io.trino.spi.type.RowType) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) Arrays.asList(java.util.Arrays.asList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) OrcType(io.trino.orc.metadata.OrcType) MapType(io.trino.spi.type.MapType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DecimalType(io.trino.spi.type.DecimalType) CharType(io.trino.spi.type.CharType) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Example 7 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class TestTimestamp method testTimestampBackedByInt64.

@Test
public void testTimestampBackedByInt64() throws Exception {
    MessageType parquetSchema = parseMessageType("message hive_timestamp { optional int64 test (TIMESTAMP_MILLIS); }");
    ContiguousSet<Long> epochMillisValues = ContiguousSet.create(Range.closedOpen((long) -1_000, (long) 1_000), DiscreteDomain.longs());
    ImmutableList.Builder<SqlTimestamp> timestampsMillis = ImmutableList.builder();
    ImmutableList.Builder<Long> bigints = ImmutableList.builder();
    for (long value : epochMillisValues) {
        timestampsMillis.add(SqlTimestamp.fromMillis(3, value));
        bigints.add(value);
    }
    List<ObjectInspector> objectInspectors = singletonList(javaLongObjectInspector);
    List<String> columnNames = ImmutableList.of("test");
    ConnectorSession session = getHiveSession(new HiveConfig());
    try (ParquetTester.TempFile tempFile = new ParquetTester.TempFile("test", "parquet")) {
        JobConf jobConf = new JobConf();
        jobConf.setEnum(WRITER_VERSION, PARQUET_1_0);
        ParquetTester.writeParquetColumn(jobConf, tempFile.getFile(), CompressionCodecName.SNAPPY, ParquetTester.createTableProperties(columnNames, objectInspectors), getStandardStructObjectInspector(columnNames, objectInspectors), new Iterator<?>[] { epochMillisValues.iterator() }, Optional.of(parquetSchema), false);
        testReadingAs(TIMESTAMP_MILLIS, session, tempFile, columnNames, timestampsMillis.build());
        testReadingAs(BIGINT, session, tempFile, columnNames, bigints.build());
    }
}
Also used : PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) ImmutableList(com.google.common.collect.ImmutableList) SqlTimestamp(io.trino.spi.type.SqlTimestamp) HiveConfig(io.trino.plugin.hive.HiveConfig) ConnectorSession(io.trino.spi.connector.ConnectorSession) JobConf(org.apache.hadoop.mapred.JobConf) MessageTypeParser.parseMessageType(org.apache.parquet.schema.MessageTypeParser.parseMessageType) MessageType(org.apache.parquet.schema.MessageType) Test(org.testng.annotations.Test)

Example 8 with SqlTimestamp

use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.

the class AbstractTestHiveFileFormats method checkPageSource.

protected void checkPageSource(ConnectorPageSource pageSource, List<TestColumn> testColumns, List<Type> types, int rowCount) throws IOException {
    try {
        MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, types);
        assertEquals(result.getMaterializedRows().size(), rowCount);
        for (MaterializedRow row : result) {
            for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) {
                TestColumn testColumn = testColumns.get(i);
                Type type = types.get(i);
                Object actualValue = row.getField(i);
                Object expectedValue = testColumn.getExpectedValue();
                if (expectedValue instanceof Slice) {
                    expectedValue = ((Slice) expectedValue).toStringUtf8();
                }
                if (actualValue == null || expectedValue == null) {
                    assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName());
                } else if (testColumn.getObjectInspector().getTypeName().equals("float")) {
                    assertEquals((float) actualValue, (float) expectedValue, EPSILON, "Wrong value for column " + testColumn.getName());
                } else if (testColumn.getObjectInspector().getTypeName().equals("double")) {
                    assertEquals((double) actualValue, (double) expectedValue, EPSILON, "Wrong value for column " + testColumn.getName());
                } else if (testColumn.getObjectInspector().getTypeName().equals("date")) {
                    SqlDate expectedDate = new SqlDate(((Long) expectedValue).intValue());
                    assertEquals(actualValue, expectedDate, "Wrong value for column " + testColumn.getName());
                } else if (testColumn.getObjectInspector().getTypeName().equals("int") || testColumn.getObjectInspector().getTypeName().equals("smallint") || testColumn.getObjectInspector().getTypeName().equals("tinyint")) {
                    assertEquals(actualValue, expectedValue);
                } else if (testColumn.getObjectInspector().getTypeName().equals("timestamp")) {
                    SqlTimestamp expectedTimestamp = sqlTimestampOf(floorDiv((Long) expectedValue, MICROSECONDS_PER_MILLISECOND));
                    assertEquals(actualValue, expectedTimestamp, "Wrong value for column " + testColumn.getName());
                } else if (testColumn.getObjectInspector().getTypeName().startsWith("char")) {
                    assertEquals(actualValue, padSpaces((String) expectedValue, (CharType) type), "Wrong value for column " + testColumn.getName());
                } else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) {
                    if (expectedValue instanceof Slice) {
                        expectedValue = ((Slice) expectedValue).toStringUtf8();
                    }
                    if (actualValue instanceof Slice) {
                        actualValue = ((Slice) actualValue).toStringUtf8();
                    }
                    if (actualValue instanceof SqlVarbinary) {
                        actualValue = new String(((SqlVarbinary) actualValue).getBytes(), UTF_8);
                    }
                    if (actualValue instanceof SqlDecimal) {
                        actualValue = new BigDecimal(actualValue.toString());
                    }
                    assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName());
                } else {
                    BlockBuilder builder = type.createBlockBuilder(null, 1);
                    type.writeObject(builder, expectedValue);
                    expectedValue = type.getObjectValue(SESSION, builder.build(), 0);
                    assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName());
                }
            }
        }
    } finally {
        pageSource.close();
    }
}
Also used : SqlVarbinary(io.trino.spi.type.SqlVarbinary) SqlTimestamp(io.trino.spi.type.SqlTimestamp) SqlDecimal(io.trino.spi.type.SqlDecimal) BigDecimal(java.math.BigDecimal) DateType(io.trino.spi.type.DateType) CharType.createCharType(io.trino.spi.type.CharType.createCharType) TimestampType(io.trino.spi.type.TimestampType) HiveTestUtils.mapType(io.trino.plugin.hive.HiveTestUtils.mapType) CharType(io.trino.spi.type.CharType) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) HiveUtil.isStructuralType(io.trino.plugin.hive.util.HiveUtil.isStructuralType) Slice(io.airlift.slice.Slice) SqlDate(io.trino.spi.type.SqlDate) SerDeUtils.serializeObject(io.trino.plugin.hive.util.SerDeUtils.serializeObject) CharType.createCharType(io.trino.spi.type.CharType.createCharType) CharType(io.trino.spi.type.CharType) MaterializedResult(io.trino.testing.MaterializedResult) MaterializedRow(io.trino.testing.MaterializedRow) BlockBuilder(io.trino.spi.block.BlockBuilder)

Aggregations

SqlTimestamp (io.trino.spi.type.SqlTimestamp)8 ImmutableList (com.google.common.collect.ImmutableList)7 ArrayType (io.trino.spi.type.ArrayType)6 RowType (io.trino.spi.type.RowType)6 SqlVarbinary (io.trino.spi.type.SqlVarbinary)6 Type (io.trino.spi.type.Type)6 VarcharType (io.trino.spi.type.VarcharType)6 BlockBuilder (io.trino.spi.block.BlockBuilder)5 DecimalType (io.trino.spi.type.DecimalType)5 SqlDecimal (io.trino.spi.type.SqlDecimal)5 List (java.util.List)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 Slice (io.airlift.slice.Slice)4 MapType (io.trino.spi.type.MapType)4 SqlDate (io.trino.spi.type.SqlDate)4 Collectors.toList (java.util.stream.Collectors.toList)4 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)3 Lists.newArrayList (com.google.common.collect.Lists.newArrayList)3 ArrayList (java.util.ArrayList)3 HashMap (java.util.HashMap)3