use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class PrometheusRecordCursor method writeObject.
private static void writeObject(BlockBuilder builder, Type type, Object obj) {
if (type instanceof ArrayType) {
Type elementType = ((ArrayType) type).getElementType();
BlockBuilder arrayBuilder = builder.beginBlockEntry();
for (Object item : (List<?>) obj) {
writeObject(arrayBuilder, elementType, item);
}
builder.closeEntry();
} else if (type instanceof MapType) {
MapType mapType = (MapType) type;
BlockBuilder mapBlockBuilder = builder.beginBlockEntry();
for (Map.Entry<?, ?> entry : ((Map<?, ?>) obj).entrySet()) {
writeObject(mapBlockBuilder, mapType.getKeyType(), entry.getKey());
writeObject(mapBlockBuilder, mapType.getValueType(), entry.getValue());
}
builder.closeEntry();
} else {
if (BOOLEAN.equals(type) || TINYINT.equals(type) || SMALLINT.equals(type) || INTEGER.equals(type) || BIGINT.equals(type) || DOUBLE.equals(type) || type instanceof VarcharType) {
TypeUtils.writeNativeValue(type, builder, obj);
}
}
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class Row method nativeContainerToOrcValue.
private static Object nativeContainerToOrcValue(Type type, Object nativeValue) {
if (nativeValue == null) {
return null;
}
if (type instanceof DecimalType) {
BigInteger unscaledValue;
DecimalType decimalType = (DecimalType) type;
if (decimalType.isShort()) {
unscaledValue = BigInteger.valueOf((long) nativeValue);
} else {
unscaledValue = ((Int128) nativeValue).toBigInteger();
}
return HiveDecimal.create(unscaledValue, decimalType.getScale());
}
if (type.getJavaType() == boolean.class) {
return nativeValue;
}
if (type.getJavaType() == long.class) {
return nativeValue;
}
if (type.getJavaType() == double.class) {
return nativeValue;
}
if (type.getJavaType() == Slice.class) {
Slice slice = (Slice) nativeValue;
return type instanceof VarcharType ? slice.toStringUtf8() : slice.getBytes();
}
if (isArrayType(type)) {
Block arrayBlock = (Block) nativeValue;
Type elementType = type.getTypeParameters().get(0);
List<Object> list = new ArrayList<>();
for (int i = 0; i < arrayBlock.getPositionCount(); i++) {
list.add(nativeContainerToOrcValue(elementType, getNativeContainerValue(elementType, arrayBlock, i)));
}
return list;
}
if (isMapType(type)) {
Block mapBlock = (Block) nativeValue;
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> map = new HashMap<>();
for (int i = 0; i < mapBlock.getPositionCount(); i += 2) {
Object key = nativeContainerToOrcValue(keyType, getNativeContainerValue(keyType, mapBlock, i));
Object value = nativeContainerToOrcValue(valueType, getNativeContainerValue(valueType, mapBlock, i + 1));
map.put(key, value);
}
return map;
}
throw new TrinoException(GENERIC_INTERNAL_ERROR, "Unimplemented type: " + type);
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class ShardStats method doComputeColumnStats.
private static ColumnStats doComputeColumnStats(OrcReader orcReader, long columnId, Type type, TypeManager typeManager) throws IOException {
OrcColumn column = getColumn(orcReader.getRootColumn().getNestedColumns(), columnId);
Type columnType = toOrcFileType(type, typeManager);
OrcRecordReader reader = orcReader.createRecordReader(ImmutableList.of(column), ImmutableList.of(columnType), OrcPredicate.TRUE, UTC, newSimpleAggregatedMemoryContext(), INITIAL_BATCH_SIZE, exception -> new TrinoException(RAPTOR_ERROR, "Error reading column: " + columnId, exception));
if (type.equals(BooleanType.BOOLEAN)) {
return indexBoolean(reader, columnId);
}
if (type.equals(BigintType.BIGINT) || type.equals(DateType.DATE) || type.equals(TimestampType.TIMESTAMP_MILLIS)) {
return indexLong(type, reader, columnId);
}
if (type.equals(DoubleType.DOUBLE)) {
return indexDouble(reader, columnId);
}
if (type instanceof VarcharType) {
return indexString(type, reader, columnId);
}
return null;
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class H2QueryRunner method rowMapper.
private static RowMapper<MaterializedRow> rowMapper(List<? extends Type> types) {
return (resultSet, context) -> {
int count = resultSet.getMetaData().getColumnCount();
checkArgument(types.size() == count, "expected types count (%s) does not match actual column count (%s)", types.size(), count);
List<Object> row = new ArrayList<>(count);
for (int i = 1; i <= count; i++) {
Type type = types.get(i - 1);
if (BOOLEAN.equals(type)) {
boolean booleanValue = resultSet.getBoolean(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(booleanValue);
}
} else if (TINYINT.equals(type)) {
byte byteValue = resultSet.getByte(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(byteValue);
}
} else if (SMALLINT.equals(type)) {
short shortValue = resultSet.getShort(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(shortValue);
}
} else if (INTEGER.equals(type)) {
int intValue = resultSet.getInt(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(intValue);
}
} else if (BIGINT.equals(type)) {
long longValue = resultSet.getLong(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(longValue);
}
} else if (REAL.equals(type)) {
float floatValue = resultSet.getFloat(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(floatValue);
}
} else if (DOUBLE.equals(type)) {
double doubleValue = resultSet.getDouble(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(doubleValue);
}
} else if (JSON.equals(type)) {
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(jsonParse(utf8Slice(stringValue)).toStringUtf8());
}
} else if (type instanceof VarcharType) {
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(stringValue);
}
} else if (type instanceof CharType) {
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(padSpaces(stringValue, (CharType) type));
}
} else if (VARBINARY.equals(type)) {
byte[] bytes = resultSet.getBytes(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(bytes);
}
} else if (DATE.equals(type)) {
// resultSet.getDate(i) doesn't work if JVM's zone skipped day being retrieved (e.g. 2011-12-30 and Pacific/Apia zone)
LocalDate dateValue = resultSet.getObject(i, LocalDate.class);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(dateValue);
}
} else if (type instanceof TimeType) {
// resultSet.getTime(i) doesn't work if JVM's zone had forward offset change during 1970-01-01 (e.g. America/Hermosillo zone)
LocalTime timeValue = resultSet.getObject(i, LocalTime.class);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(timeValue);
}
} else if (TIME_WITH_TIME_ZONE.equals(type)) {
throw new UnsupportedOperationException("H2 does not support TIME WITH TIME ZONE");
} else if (type instanceof TimestampType) {
// resultSet.getTimestamp(i) doesn't work if JVM's zone had forward offset at the date/time being retrieved
LocalDateTime timestampValue;
try {
timestampValue = resultSet.getObject(i, LocalDateTime.class);
} catch (SQLException first) {
// H2 cannot convert DATE to LocalDateTime in their JDBC driver (even though it can convert to java.sql.Timestamp), we need to do this manually
try {
timestampValue = Optional.ofNullable(resultSet.getObject(i, LocalDate.class)).map(LocalDate::atStartOfDay).orElse(null);
} catch (RuntimeException e) {
first.addSuppressed(e);
throw first;
}
}
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(timestampValue);
}
} else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
// This means H2 is unsuitable for testing TIMESTAMP WITH TIME ZONE-bearing queries. Those need to be tested manually.
throw new UnsupportedOperationException();
} else if (UUID.equals(type)) {
java.util.UUID value = (java.util.UUID) resultSet.getObject(i);
row.add(value);
} else if (UNKNOWN.equals(type)) {
Object objectValue = resultSet.getObject(i);
checkState(resultSet.wasNull(), "Expected a null value, but got %s", objectValue);
row.add(null);
} else if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
BigDecimal decimalValue = resultSet.getBigDecimal(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(decimalValue.setScale(decimalType.getScale(), BigDecimal.ROUND_HALF_UP).round(new MathContext(decimalType.getPrecision())));
}
} else if (type instanceof ArrayType) {
Array array = resultSet.getArray(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(newArrayList((Object[]) array.getArray()));
}
} else {
throw new AssertionError("unhandled type: " + type);
}
}
return new MaterializedRow(MaterializedResult.DEFAULT_PRECISION, row);
};
}
use of io.trino.spi.type.VarcharType in project trino by trinodb.
the class H2QueryRunner method insertRows.
private static void insertRows(ConnectorTableMetadata tableMetadata, Handle handle, RecordSet data) {
List<ColumnMetadata> columns = tableMetadata.getColumns().stream().filter(columnMetadata -> !columnMetadata.isHidden()).collect(toImmutableList());
String vars = Joiner.on(',').join(nCopies(columns.size(), "?"));
String sql = format("INSERT INTO %s VALUES (%s)", tableMetadata.getTable().getTableName(), vars);
RecordCursor cursor = data.cursor();
while (true) {
// insert 1000 rows at a time
PreparedBatch batch = handle.prepareBatch(sql);
for (int row = 0; row < 1000; row++) {
if (!cursor.advanceNextPosition()) {
if (batch.size() > 0) {
batch.execute();
}
return;
}
for (int column = 0; column < columns.size(); column++) {
Type type = columns.get(column).getType();
if (BOOLEAN.equals(type)) {
batch.bind(column, cursor.getBoolean(column));
} else if (BIGINT.equals(type)) {
batch.bind(column, cursor.getLong(column));
} else if (INTEGER.equals(type)) {
batch.bind(column, toIntExact(cursor.getLong(column)));
} else if (DOUBLE.equals(type)) {
batch.bind(column, cursor.getDouble(column));
} else if (type instanceof VarcharType) {
batch.bind(column, cursor.getSlice(column).toStringUtf8());
} else if (DATE.equals(type)) {
long millisUtc = TimeUnit.DAYS.toMillis(cursor.getLong(column));
// H2 expects dates in to be millis at midnight in the JVM timezone
long localMillis = DateTimeZone.UTC.getMillisKeepLocal(DateTimeZone.getDefault(), millisUtc);
batch.bind(column, new Date(localMillis));
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
batch.add();
}
batch.execute();
}
}
Aggregations