use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class HiveWriteUtils method getJavaObjectInspector.
public static ObjectInspector getJavaObjectInspector(Type type) {
if (type.equals(BOOLEAN)) {
return javaBooleanObjectInspector;
}
if (type.equals(BIGINT)) {
return javaLongObjectInspector;
}
if (type.equals(INTEGER)) {
return javaIntObjectInspector;
}
if (type.equals(SMALLINT)) {
return javaShortObjectInspector;
}
if (type.equals(TINYINT)) {
return javaByteObjectInspector;
}
if (type.equals(REAL)) {
return javaFloatObjectInspector;
}
if (type.equals(DOUBLE)) {
return javaDoubleObjectInspector;
}
if (type instanceof VarcharType) {
return writableStringObjectInspector;
}
if (type instanceof CharType) {
return writableHiveCharObjectInspector;
}
if (type.equals(VARBINARY)) {
return javaByteArrayObjectInspector;
}
if (type.equals(DATE)) {
return javaDateObjectInspector;
}
if (type instanceof TimestampType) {
return javaTimestampObjectInspector;
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()));
}
if (isArrayType(type)) {
return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0)));
}
if (isMapType(type)) {
ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0));
ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector);
}
if (isRowType(type)) {
return ObjectInspectorFactory.getStandardStructObjectInspector(type.getTypeSignature().getParameters().stream().map(parameter -> parameter.getNamedTypeSignature().getName().get()).collect(toImmutableList()), type.getTypeParameters().stream().map(HiveWriteUtils::getJavaObjectInspector).collect(toImmutableList()));
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class HiveWriteUtils method getRowColumnInspector.
public static ObjectInspector getRowColumnInspector(Type type) {
if (type.equals(BOOLEAN)) {
return writableBooleanObjectInspector;
}
if (type.equals(BIGINT)) {
return writableLongObjectInspector;
}
if (type.equals(INTEGER)) {
return writableIntObjectInspector;
}
if (type.equals(SMALLINT)) {
return writableShortObjectInspector;
}
if (type.equals(TINYINT)) {
return writableByteObjectInspector;
}
if (type.equals(REAL)) {
return writableFloatObjectInspector;
}
if (type.equals(DOUBLE)) {
return writableDoubleObjectInspector;
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
if (varcharType.isUnbounded()) {
// Values for such columns must be stored as STRING in Hive
return writableStringObjectInspector;
}
if (varcharType.getBoundedLength() <= HiveVarchar.MAX_VARCHAR_LENGTH) {
// VARCHAR columns with the length less than or equal to 65535 are supported natively by Hive
return getPrimitiveWritableObjectInspector(getVarcharTypeInfo(varcharType.getBoundedLength()));
}
}
if (type instanceof CharType) {
CharType charType = (CharType) type;
int charLength = charType.getLength();
return getPrimitiveWritableObjectInspector(getCharTypeInfo(charLength));
}
if (type.equals(VARBINARY)) {
return writableBinaryObjectInspector;
}
if (type.equals(DATE)) {
return writableDateObjectInspector;
}
if (type instanceof TimestampType) {
return writableTimestampObjectInspector;
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return getPrimitiveWritableObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()));
}
if (isArrayType(type) || isMapType(type) || isRowType(type)) {
return getJavaObjectInspector(type);
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class HiveCoercionPolicy method canCoerce.
private boolean canCoerce(HiveType fromHiveType, HiveType toHiveType) {
Type fromType = typeManager.getType(fromHiveType.getTypeSignature());
Type toType = typeManager.getType(toHiveType.getTypeSignature());
if (fromType instanceof VarcharType) {
return toType instanceof VarcharType || toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG);
}
if (toType instanceof VarcharType) {
return fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_BYTE)) {
return toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_SHORT)) {
return toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_INT)) {
return toHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_FLOAT)) {
return toHiveType.equals(HIVE_DOUBLE) || toType instanceof DecimalType;
}
if (fromHiveType.equals(HIVE_DOUBLE)) {
return toHiveType.equals(HIVE_FLOAT) || toType instanceof DecimalType;
}
if (fromType instanceof DecimalType) {
return toType instanceof DecimalType || toHiveType.equals(HIVE_FLOAT) || toHiveType.equals(HIVE_DOUBLE);
}
return canCoerceForList(fromHiveType, toHiveType) || canCoerceForMap(fromHiveType, toHiveType) || canCoerceForStruct(fromHiveType, toHiveType);
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class HiveUtil method getPrefilledColumnValue.
public static NullableValue getPrefilledColumnValue(HiveColumnHandle columnHandle, HivePartitionKey partitionKey, Path path, OptionalInt bucketNumber, long fileSize, long fileModifiedTime, String partitionName) {
String columnValue;
if (partitionKey != null) {
columnValue = partitionKey.getValue();
} else if (isPathColumnHandle(columnHandle)) {
columnValue = path.toString();
} else if (isBucketColumnHandle(columnHandle)) {
columnValue = String.valueOf(bucketNumber.getAsInt());
} else if (isFileSizeColumnHandle(columnHandle)) {
columnValue = String.valueOf(fileSize);
} else if (isFileModifiedTimeColumnHandle(columnHandle)) {
columnValue = HIVE_TIMESTAMP_PARSER.print(fileModifiedTime);
} else if (isPartitionColumnHandle(columnHandle)) {
columnValue = partitionName;
} else {
throw new TrinoException(NOT_SUPPORTED, "unsupported hidden column: " + columnHandle);
}
byte[] bytes = columnValue.getBytes(UTF_8);
String name = columnHandle.getName();
Type type = columnHandle.getType();
if (isHiveNull(bytes)) {
return NullableValue.asNull(type);
} else if (type.equals(BOOLEAN)) {
return NullableValue.of(type, booleanPartitionKey(columnValue, name));
} else if (type.equals(BIGINT)) {
return NullableValue.of(type, bigintPartitionKey(columnValue, name));
} else if (type.equals(INTEGER)) {
return NullableValue.of(type, integerPartitionKey(columnValue, name));
} else if (type.equals(SMALLINT)) {
return NullableValue.of(type, smallintPartitionKey(columnValue, name));
} else if (type.equals(TINYINT)) {
return NullableValue.of(type, tinyintPartitionKey(columnValue, name));
} else if (type.equals(REAL)) {
return NullableValue.of(type, floatPartitionKey(columnValue, name));
} else if (type.equals(DOUBLE)) {
return NullableValue.of(type, doublePartitionKey(columnValue, name));
} else if (type instanceof VarcharType) {
return NullableValue.of(type, varcharPartitionKey(columnValue, name, type));
} else if (type instanceof CharType) {
return NullableValue.of(type, charPartitionKey(columnValue, name, type));
} else if (type.equals(DATE)) {
return NullableValue.of(type, datePartitionKey(columnValue, name));
} else if (type.equals(TIMESTAMP_MILLIS)) {
return NullableValue.of(type, timestampPartitionKey(columnValue, name));
} else if (type.equals(TIMESTAMP_TZ_MILLIS)) {
// used for $file_modified_time
return NullableValue.of(type, packDateTimeWithZone(floorDiv(timestampPartitionKey(columnValue, name), MICROSECONDS_PER_MILLISECOND), DateTimeZone.getDefault().getID()));
} else if (isShortDecimal(type)) {
return NullableValue.of(type, shortDecimalPartitionKey(columnValue, (DecimalType) type, name));
} else if (isLongDecimal(type)) {
return NullableValue.of(type, longDecimalPartitionKey(columnValue, (DecimalType) type, name));
} else if (type.equals(VarbinaryType.VARBINARY)) {
return NullableValue.of(type, utf8Slice(columnValue));
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported column type %s for prefilled column: %s", type.getDisplayName(), name));
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class OrcTester method preprocessWriteValueHive.
private static Object preprocessWriteValueHive(Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type instanceof CharType) {
return new HiveChar((String) value, ((CharType) type).getLength());
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
}
if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type instanceof ArrayType) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
}
if (type instanceof MapType) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
}
return newMap;
}
if (type instanceof RowType) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
Aggregations