Search in sources :

Example 51 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class TestTypeConversions method testConvertTwoLevelsRecordField.

@Test
public void testConvertTwoLevelsRecordField() {
    Field field = Field.of("rec", LegacySQLTypeName.RECORD, Field.of("sub_rec", LegacySQLTypeName.RECORD, Field.of("sub_sub_s", LegacySQLTypeName.STRING), Field.of("sub_sub_i", LegacySQLTypeName.INTEGER)), Field.of("sub_s", LegacySQLTypeName.STRING), Field.of("sub_i", LegacySQLTypeName.INTEGER));
    ColumnMetadata metadata = Conversions.toColumnMetadata(field);
    RowType targetType = RowType.rowType(RowType.field("sub_rec", RowType.rowType(RowType.field("sub_sub_s", VarcharType.VARCHAR), RowType.field("sub_sub_i", BigintType.BIGINT))), RowType.field("sub_s", VarcharType.VARCHAR), RowType.field("sub_i", BigintType.BIGINT));
    assertThat(metadata.getType()).isEqualTo(targetType);
}
Also used : Field(com.google.cloud.bigquery.Field) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) RowType(io.trino.spi.type.RowType) Test(org.testng.annotations.Test)

Example 52 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class TestTypeConversions method testConvertOneLevelRecordField.

@Test
public void testConvertOneLevelRecordField() {
    Field field = Field.of("rec", LegacySQLTypeName.RECORD, Field.of("sub_s", LegacySQLTypeName.STRING), Field.of("sub_i", LegacySQLTypeName.INTEGER));
    ColumnMetadata metadata = Conversions.toColumnMetadata(field);
    RowType targetType = RowType.rowType(RowType.field("sub_s", VarcharType.VARCHAR), RowType.field("sub_i", BigintType.BIGINT));
    assertThat(metadata.getType()).isEqualTo(targetType);
}
Also used : Field(com.google.cloud.bigquery.Field) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) RowType(io.trino.spi.type.RowType) Test(org.testng.annotations.Test)

Example 53 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class GeoFunctions method geometryNearestPoints.

@SqlNullable
@Description("Return the closest points on the two geometries")
@ScalarFunction("geometry_nearest_points")
@SqlType("row(" + GEOMETRY_TYPE_NAME + "," + GEOMETRY_TYPE_NAME + ")")
public static Block geometryNearestPoints(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) {
    Geometry leftGeometry = JtsGeometrySerde.deserialize(left);
    Geometry rightGeometry = JtsGeometrySerde.deserialize(right);
    if (leftGeometry.isEmpty() || rightGeometry.isEmpty()) {
        return null;
    }
    RowType rowType = RowType.anonymous(ImmutableList.of(GEOMETRY, GEOMETRY));
    PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(rowType));
    GeometryFactory geometryFactory = leftGeometry.getFactory();
    Coordinate[] nearestCoordinates = DistanceOp.nearestPoints(leftGeometry, rightGeometry);
    BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(0);
    BlockBuilder entryBlockBuilder = blockBuilder.beginBlockEntry();
    GEOMETRY.writeSlice(entryBlockBuilder, JtsGeometrySerde.serialize(geometryFactory.createPoint(nearestCoordinates[0])));
    GEOMETRY.writeSlice(entryBlockBuilder, JtsGeometrySerde.serialize(geometryFactory.createPoint(nearestCoordinates[1])));
    blockBuilder.closeEntry();
    pageBuilder.declarePosition();
    return rowType.getObject(blockBuilder, blockBuilder.getPositionCount() - 1);
}
Also used : OGCGeometry(com.esri.core.geometry.ogc.OGCGeometry) OGCGeometry.createFromEsriGeometry(com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry) GeometryUtils.jsonFromJtsGeometry(io.trino.geospatial.GeometryUtils.jsonFromJtsGeometry) Geometry(org.locationtech.jts.geom.Geometry) MultiVertexGeometry(com.esri.core.geometry.MultiVertexGeometry) GeometryFactory(org.locationtech.jts.geom.GeometryFactory) Coordinate(org.locationtech.jts.geom.Coordinate) RowType(io.trino.spi.type.RowType) PageBuilder(io.trino.spi.PageBuilder) BlockBuilder(io.trino.spi.block.BlockBuilder) SqlNullable(io.trino.spi.function.SqlNullable) ScalarFunction(io.trino.spi.function.ScalarFunction) Description(io.trino.spi.function.Description) SqlType(io.trino.spi.function.SqlType)

Example 54 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class ZipFunction method zip.

@UsedByGeneratedCode
public static Block zip(List<Type> types, Block... arrays) {
    int biggestCardinality = 0;
    for (Block array : arrays) {
        biggestCardinality = Math.max(biggestCardinality, array.getPositionCount());
    }
    RowType rowType = RowType.anonymous(types);
    BlockBuilder outputBuilder = rowType.createBlockBuilder(null, biggestCardinality);
    for (int outputPosition = 0; outputPosition < biggestCardinality; outputPosition++) {
        BlockBuilder rowBuilder = outputBuilder.beginBlockEntry();
        for (int fieldIndex = 0; fieldIndex < arrays.length; fieldIndex++) {
            if (arrays[fieldIndex].getPositionCount() <= outputPosition) {
                rowBuilder.appendNull();
            } else {
                types.get(fieldIndex).appendTo(arrays[fieldIndex], outputPosition, rowBuilder);
            }
        }
        outputBuilder.closeEntry();
    }
    return outputBuilder.build();
}
Also used : Block(io.trino.spi.block.Block) RowType(io.trino.spi.type.RowType) BlockBuilder(io.trino.spi.block.BlockBuilder) UsedByGeneratedCode(io.trino.annotation.UsedByGeneratedCode)

Example 55 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class OrcTester method preprocessWriteValueHive.

private static Object preprocessWriteValueHive(Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type instanceof CharType) {
        return new HiveChar((String) value, ((CharType) type).getLength());
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
        LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
        return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
    }
    if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
        SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
        int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
        return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : LocalDateTime(java.time.LocalDateTime) OrcUtil(org.apache.hadoop.hive.ql.io.orc.OrcUtil) DateTimeZone(org.joda.time.DateTimeZone) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) TIMESTAMP_TZ_NANOS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) NANOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) LongTimestampWithTimeZone(io.trino.spi.type.LongTimestampWithTimeZone) NONE(io.trino.orc.metadata.CompressionKind.NONE) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) Decimals.rescale(io.trino.spi.type.Decimals.rescale) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Arrays.asList(java.util.Arrays.asList) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) BigInteger(java.math.BigInteger) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ZoneOffset(java.time.ZoneOffset) Assert.assertFalse(org.testng.Assert.assertFalse) IntWritable(org.apache.hadoop.io.IntWritable) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PICOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) UTC_KEY(io.trino.spi.type.TimeZoneKey.UTC_KEY) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) DateTimeEncoding.packDateTimeWithZone(io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) ReaderOptions(org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) LocalDateTime(java.time.LocalDateTime) Page(io.trino.spi.Page) SqlDecimal(io.trino.spi.type.SqlDecimal) DataSize.succinctBytes(io.airlift.units.DataSize.succinctBytes) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector) ArrayList(java.util.ArrayList) TIMESTAMP_TZ_MILLIS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS) Lists(com.google.common.collect.Lists) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) BOTH(io.trino.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH) OrcType(io.trino.orc.metadata.OrcType) Int128(io.trino.spi.type.Int128) TIMESTAMP_TZ_MICROS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS) Properties(java.util.Properties) MapType(io.trino.spi.type.MapType) AbstractIterator(com.google.common.collect.AbstractIterator) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) StandardTypes(io.trino.spi.type.StandardTypes) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) CompressionKind(io.trino.orc.metadata.CompressionKind) File(java.io.File) ZLIB(io.trino.orc.metadata.CompressionKind.ZLIB) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) MAX_BATCH_SIZE(io.trino.orc.OrcReader.MAX_BATCH_SIZE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) TIMESTAMP_MICROS(io.trino.spi.type.TimestampType.TIMESTAMP_MICROS) SqlVarbinary(io.trino.spi.type.SqlVarbinary) VarbinaryType(io.trino.spi.type.VarbinaryType) Varchars.truncateToLength(io.trino.spi.type.Varchars.truncateToLength) CharType(io.trino.spi.type.CharType) TINYINT(io.trino.spi.type.TinyintType.TINYINT) BlockBuilder(io.trino.spi.block.BlockBuilder) FloatWritable(org.apache.hadoop.io.FloatWritable) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) OrcFile(org.apache.hadoop.hive.ql.io.orc.OrcFile) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) TestingOrcPredicate.createOrcPredicate(io.trino.orc.TestingOrcPredicate.createOrcPredicate) LongWritable(org.apache.hadoop.io.LongWritable) Timestamps.roundDiv(io.trino.spi.type.Timestamps.roundDiv) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) SqlTimestamp(io.trino.spi.type.SqlTimestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Block(io.trino.spi.block.Block) OrcConf(org.apache.orc.OrcConf) RecordReader(org.apache.hadoop.hive.ql.io.orc.RecordReader) Path(org.apache.hadoop.fs.Path) Reader(org.apache.hadoop.hive.ql.io.orc.Reader) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) RowType(io.trino.spi.type.RowType) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) SESSION(io.trino.testing.TestingConnectorSession.SESSION) MICROSECONDS_PER_SECOND(io.trino.spi.type.Timestamps.MICROSECONDS_PER_SECOND) ArrayType(io.trino.spi.type.ArrayType) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) SqlDate(io.trino.spi.type.SqlDate) Objects(java.util.Objects) TIMESTAMP_NANOS(io.trino.spi.type.TimestampType.TIMESTAMP_NANOS) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DataSize(io.airlift.units.DataSize) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Decimals(io.trino.spi.type.Decimals) Entry(java.util.Map.Entry) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) DecimalType(io.trino.spi.type.DecimalType) IntStream(java.util.stream.IntStream) ORC_11(io.trino.orc.OrcTester.Format.ORC_11) TypeInfoFactory.getCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo) Assert.assertNull(org.testng.Assert.assertNull) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) PICOSECONDS_PER_NANOSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND) Type(io.trino.spi.type.Type) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ZSTD(io.trino.orc.metadata.CompressionKind.ZSTD) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Chars.truncateToLengthAndTrimSpaces(io.trino.spi.type.Chars.truncateToLengthAndTrimSpaces) ByteWritable(org.apache.hadoop.io.ByteWritable) RowFieldName(io.trino.spi.type.RowFieldName) BytesWritable(org.apache.hadoop.io.BytesWritable) SNAPPY(io.trino.orc.metadata.CompressionKind.SNAPPY) ObjectInspectorFactory.getStandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector) ORC_12(io.trino.orc.OrcTester.Format.ORC_12) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) LongTimestamp(io.trino.spi.type.LongTimestamp) NANOSECONDS_PER_MILLISECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND) ObjectInspectorFactory.getStandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) Serializer(org.apache.hadoop.hive.serde2.Serializer) LZ4(io.trino.orc.metadata.CompressionKind.LZ4) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) VarcharType(io.trino.spi.type.VarcharType) HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SqlVarbinary(io.trino.spi.type.SqlVarbinary) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) RowType(io.trino.spi.type.RowType) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) Arrays.asList(java.util.Arrays.asList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) OrcType(io.trino.orc.metadata.OrcType) MapType(io.trino.spi.type.MapType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DecimalType(io.trino.spi.type.DecimalType) CharType(io.trino.spi.type.CharType) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Aggregations

RowType (io.trino.spi.type.RowType)90 ArrayType (io.trino.spi.type.ArrayType)51 MapType (io.trino.spi.type.MapType)42 Type (io.trino.spi.type.Type)42 ImmutableList (com.google.common.collect.ImmutableList)30 VarcharType (io.trino.spi.type.VarcharType)28 BlockBuilder (io.trino.spi.block.BlockBuilder)26 DecimalType (io.trino.spi.type.DecimalType)21 Test (org.testng.annotations.Test)21 ImmutableMap (com.google.common.collect.ImmutableMap)20 Block (io.trino.spi.block.Block)20 List (java.util.List)20 Map (java.util.Map)18 ArrayList (java.util.ArrayList)17 Optional (java.util.Optional)17 CharType (io.trino.spi.type.CharType)16 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)15 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)12 TimestampWithTimeZoneType (io.trino.spi.type.TimestampWithTimeZoneType)12 HashMap (java.util.HashMap)12