use of io.trino.spi.type.TypeSignatureParameter in project trino by trinodb.
the class SignatureBinder method checkNoLiteralVariableUsageAcrossTypes.
private static void checkNoLiteralVariableUsageAcrossTypes(TypeSignature typeSignature, Map<String, TypeSignature> existingUsages) {
List<TypeSignatureParameter> variables = typeSignature.getParameters().stream().filter(TypeSignatureParameter::isVariable).collect(toList());
for (TypeSignatureParameter variable : variables) {
TypeSignature existing = existingUsages.get(variable.getVariable());
if (existing != null && !existing.equals(typeSignature)) {
throw new UnsupportedOperationException("Literal parameters may not be shared across different types");
}
existingUsages.put(variable.getVariable(), typeSignature);
}
for (TypeSignatureParameter parameter : typeSignature.getParameters()) {
if (parameter.isLongLiteral() || parameter.isVariable()) {
continue;
}
checkNoLiteralVariableUsageAcrossTypes(parameter.getTypeSignatureOrNamedTypeSignature().get(), existingUsages);
}
}
use of io.trino.spi.type.TypeSignatureParameter in project trino by trinodb.
the class HiveTypeTranslator method toTypeSignature.
public static TypeSignature toTypeSignature(TypeInfo typeInfo, HiveTimestampPrecision timestampPrecision) {
switch(typeInfo.getCategory()) {
case PRIMITIVE:
Type primitiveType = fromPrimitiveType((PrimitiveTypeInfo) typeInfo, timestampPrecision);
if (primitiveType == null) {
break;
}
return primitiveType.getTypeSignature();
case MAP:
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
return mapType(toTypeSignature(mapTypeInfo.getMapKeyTypeInfo(), timestampPrecision), toTypeSignature(mapTypeInfo.getMapValueTypeInfo(), timestampPrecision));
case LIST:
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
TypeSignature elementType = toTypeSignature(listTypeInfo.getListElementTypeInfo(), timestampPrecision);
return arrayType(typeParameter(elementType));
case STRUCT:
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<TypeInfo> fieldTypes = structTypeInfo.getAllStructFieldTypeInfos();
List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
if (fieldTypes.size() != fieldNames.size()) {
throw new TrinoException(HiveErrorCode.HIVE_INVALID_METADATA, format("Invalid Hive struct type: %s", typeInfo));
}
return rowType(Streams.zip(// TODO: This is a hack. Trino engine should be able to handle identifiers in a case insensitive way where necessary.
fieldNames.stream().map(s -> s.toLowerCase(Locale.US)), fieldTypes.stream().map(type -> toTypeSignature(type, timestampPrecision)), TypeSignatureParameter::namedField).collect(Collectors.toList()));
case UNION:
// Use a row type to represent a union type in Hive for reading
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> unionObjectTypes = unionTypeInfo.getAllUnionObjectTypeInfos();
ImmutableList.Builder<TypeSignatureParameter> typeSignatures = ImmutableList.builder();
typeSignatures.add(namedField("tag", TINYINT.getTypeSignature()));
for (int i = 0; i < unionObjectTypes.size(); i++) {
TypeInfo unionObjectType = unionObjectTypes.get(i);
typeSignatures.add(namedField("field" + i, toTypeSignature(unionObjectType, timestampPrecision)));
}
return rowType(typeSignatures.build());
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", typeInfo));
}
use of io.trino.spi.type.TypeSignatureParameter in project trino by trinodb.
the class HiveTypeTranslator method toTypeInfo.
public static TypeInfo toTypeInfo(Type type) {
requireNonNull(type, "type is null");
if (BOOLEAN.equals(type)) {
return HIVE_BOOLEAN.getTypeInfo();
}
if (BIGINT.equals(type)) {
return HIVE_LONG.getTypeInfo();
}
if (INTEGER.equals(type)) {
return HIVE_INT.getTypeInfo();
}
if (SMALLINT.equals(type)) {
return HIVE_SHORT.getTypeInfo();
}
if (TINYINT.equals(type)) {
return HIVE_BYTE.getTypeInfo();
}
if (REAL.equals(type)) {
return HIVE_FLOAT.getTypeInfo();
}
if (DOUBLE.equals(type)) {
return HIVE_DOUBLE.getTypeInfo();
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
if (varcharType.isUnbounded()) {
return HIVE_STRING.getTypeInfo();
}
if (varcharType.getBoundedLength() <= HiveVarchar.MAX_VARCHAR_LENGTH) {
return getVarcharTypeInfo(varcharType.getBoundedLength());
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported Hive type: %s. Supported VARCHAR types: VARCHAR(<=%d), VARCHAR.", type, HiveVarchar.MAX_VARCHAR_LENGTH));
}
if (type instanceof CharType) {
CharType charType = (CharType) type;
int charLength = charType.getLength();
if (charLength <= HiveChar.MAX_CHAR_LENGTH) {
return getCharTypeInfo(charLength);
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported Hive type: %s. Supported CHAR types: CHAR(<=%d).", type, HiveChar.MAX_CHAR_LENGTH));
}
if (VARBINARY.equals(type)) {
return HIVE_BINARY.getTypeInfo();
}
if (DATE.equals(type)) {
return HIVE_DATE.getTypeInfo();
}
if (type instanceof TimestampType) {
return HIVE_TIMESTAMP.getTypeInfo();
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
}
if (isArrayType(type)) {
TypeInfo elementType = toTypeInfo(type.getTypeParameters().get(0));
return getListTypeInfo(elementType);
}
if (isMapType(type)) {
TypeInfo keyType = toTypeInfo(type.getTypeParameters().get(0));
TypeInfo valueType = toTypeInfo(type.getTypeParameters().get(1));
return getMapTypeInfo(keyType, valueType);
}
if (isRowType(type)) {
ImmutableList.Builder<String> fieldNames = ImmutableList.builder();
for (TypeSignatureParameter parameter : type.getTypeSignature().getParameters()) {
if (!parameter.isNamedTypeSignature()) {
throw new IllegalArgumentException(format("Expected all parameters to be named type, but got %s", parameter));
}
NamedTypeSignature namedTypeSignature = parameter.getNamedTypeSignature();
if (namedTypeSignature.getName().isEmpty()) {
throw new TrinoException(NOT_SUPPORTED, format("Anonymous row type is not supported in Hive. Please give each field a name: %s", type));
}
fieldNames.add(namedTypeSignature.getName().get());
}
return getStructTypeInfo(fieldNames.build(), type.getTypeParameters().stream().map(HiveTypeTranslator::toTypeInfo).collect(toImmutableList()));
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", type));
}
use of io.trino.spi.type.TypeSignatureParameter in project trino by trinodb.
the class OrcType method toOrcType.
private static List<OrcType> toOrcType(int nextFieldTypeIndex, Type type) {
if (BOOLEAN.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.BOOLEAN));
}
if (TINYINT.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.BYTE));
}
if (SMALLINT.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.SHORT));
}
if (INTEGER.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.INT));
}
if (BIGINT.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.LONG));
}
if (DOUBLE.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.DOUBLE));
}
if (REAL.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.FLOAT));
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
if (varcharType.isUnbounded()) {
return ImmutableList.of(new OrcType(OrcTypeKind.STRING));
}
return ImmutableList.of(new OrcType(OrcTypeKind.VARCHAR, varcharType.getBoundedLength()));
}
if (type instanceof CharType) {
return ImmutableList.of(new OrcType(OrcTypeKind.CHAR, ((CharType) type).getLength()));
}
if (VARBINARY.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.BINARY));
}
if (DATE.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.DATE));
}
if (TIMESTAMP_MILLIS.equals(type) || TIMESTAMP_MICROS.equals(type) || TIMESTAMP_NANOS.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.TIMESTAMP));
}
if (TIMESTAMP_TZ_MILLIS.equals(type) || TIMESTAMP_TZ_MICROS.equals(type) || TIMESTAMP_TZ_NANOS.equals(type)) {
return ImmutableList.of(new OrcType(OrcTypeKind.TIMESTAMP_INSTANT));
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return ImmutableList.of(new OrcType(OrcTypeKind.DECIMAL, decimalType.getPrecision(), decimalType.getScale()));
}
if (type instanceof ArrayType) {
return createOrcArrayType(nextFieldTypeIndex, type.getTypeParameters().get(0));
}
if (type instanceof MapType) {
return createOrcMapType(nextFieldTypeIndex, type.getTypeParameters().get(0), type.getTypeParameters().get(1));
}
if (type instanceof RowType) {
List<String> fieldNames = new ArrayList<>();
for (int i = 0; i < type.getTypeSignature().getParameters().size(); i++) {
TypeSignatureParameter parameter = type.getTypeSignature().getParameters().get(i);
fieldNames.add(parameter.getNamedTypeSignature().getName().orElse("field" + i));
}
List<Type> fieldTypes = type.getTypeParameters();
return createOrcRowType(nextFieldTypeIndex, fieldNames, fieldTypes);
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", type));
}
use of io.trino.spi.type.TypeSignatureParameter in project trino by trinodb.
the class ParquetReader method readStruct.
private ColumnChunk readStruct(GroupField field) throws IOException {
List<TypeSignatureParameter> fields = field.getType().getTypeSignature().getParameters();
Block[] blocks = new Block[fields.size()];
ColumnChunk columnChunk = null;
List<Optional<Field>> parameters = field.getChildren();
for (int i = 0; i < fields.size(); i++) {
Optional<Field> parameter = parameters.get(i);
if (parameter.isPresent()) {
columnChunk = readColumnChunk(parameter.get());
blocks[i] = columnChunk.getBlock();
}
}
for (int i = 0; i < fields.size(); i++) {
if (blocks[i] == null) {
blocks[i] = RunLengthEncodedBlock.create(field.getType().getTypeParameters().get(i), null, columnChunk.getBlock().getPositionCount());
}
}
BooleanList structIsNull = StructColumnReader.calculateStructOffsets(field, columnChunk.getDefinitionLevels(), columnChunk.getRepetitionLevels());
boolean[] structIsNullVector = structIsNull.toBooleanArray();
Block rowBlock = RowBlock.fromFieldBlocks(structIsNullVector.length, Optional.of(structIsNullVector), blocks);
return new ColumnChunk(rowBlock, columnChunk.getDefinitionLevels(), columnChunk.getRepetitionLevels());
}
Aggregations