Search in sources :

Example 6 with DATE

use of io.trino.spi.type.DateType.DATE in project trino by trinodb.

the class H2QueryRunner method rowMapper.

private static RowMapper<MaterializedRow> rowMapper(List<? extends Type> types) {
    return (resultSet, context) -> {
        int count = resultSet.getMetaData().getColumnCount();
        checkArgument(types.size() == count, "expected types count (%s) does not match actual column count (%s)", types.size(), count);
        List<Object> row = new ArrayList<>(count);
        for (int i = 1; i <= count; i++) {
            Type type = types.get(i - 1);
            if (BOOLEAN.equals(type)) {
                boolean booleanValue = resultSet.getBoolean(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(booleanValue);
                }
            } else if (TINYINT.equals(type)) {
                byte byteValue = resultSet.getByte(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(byteValue);
                }
            } else if (SMALLINT.equals(type)) {
                short shortValue = resultSet.getShort(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(shortValue);
                }
            } else if (INTEGER.equals(type)) {
                int intValue = resultSet.getInt(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(intValue);
                }
            } else if (BIGINT.equals(type)) {
                long longValue = resultSet.getLong(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(longValue);
                }
            } else if (REAL.equals(type)) {
                float floatValue = resultSet.getFloat(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(floatValue);
                }
            } else if (DOUBLE.equals(type)) {
                double doubleValue = resultSet.getDouble(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(doubleValue);
                }
            } else if (JSON.equals(type)) {
                String stringValue = resultSet.getString(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(jsonParse(utf8Slice(stringValue)).toStringUtf8());
                }
            } else if (type instanceof VarcharType) {
                String stringValue = resultSet.getString(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(stringValue);
                }
            } else if (type instanceof CharType) {
                String stringValue = resultSet.getString(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(padSpaces(stringValue, (CharType) type));
                }
            } else if (VARBINARY.equals(type)) {
                byte[] bytes = resultSet.getBytes(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(bytes);
                }
            } else if (DATE.equals(type)) {
                // resultSet.getDate(i) doesn't work if JVM's zone skipped day being retrieved (e.g. 2011-12-30 and Pacific/Apia zone)
                LocalDate dateValue = resultSet.getObject(i, LocalDate.class);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(dateValue);
                }
            } else if (type instanceof TimeType) {
                // resultSet.getTime(i) doesn't work if JVM's zone had forward offset change during 1970-01-01 (e.g. America/Hermosillo zone)
                LocalTime timeValue = resultSet.getObject(i, LocalTime.class);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(timeValue);
                }
            } else if (TIME_WITH_TIME_ZONE.equals(type)) {
                throw new UnsupportedOperationException("H2 does not support TIME WITH TIME ZONE");
            } else if (type instanceof TimestampType) {
                // resultSet.getTimestamp(i) doesn't work if JVM's zone had forward offset at the date/time being retrieved
                LocalDateTime timestampValue;
                try {
                    timestampValue = resultSet.getObject(i, LocalDateTime.class);
                } catch (SQLException first) {
                    // H2 cannot convert DATE to LocalDateTime in their JDBC driver (even though it can convert to java.sql.Timestamp), we need to do this manually
                    try {
                        timestampValue = Optional.ofNullable(resultSet.getObject(i, LocalDate.class)).map(LocalDate::atStartOfDay).orElse(null);
                    } catch (RuntimeException e) {
                        first.addSuppressed(e);
                        throw first;
                    }
                }
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(timestampValue);
                }
            } else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
                // This means H2 is unsuitable for testing TIMESTAMP WITH TIME ZONE-bearing queries. Those need to be tested manually.
                throw new UnsupportedOperationException();
            } else if (UUID.equals(type)) {
                java.util.UUID value = (java.util.UUID) resultSet.getObject(i);
                row.add(value);
            } else if (UNKNOWN.equals(type)) {
                Object objectValue = resultSet.getObject(i);
                checkState(resultSet.wasNull(), "Expected a null value, but got %s", objectValue);
                row.add(null);
            } else if (type instanceof DecimalType) {
                DecimalType decimalType = (DecimalType) type;
                BigDecimal decimalValue = resultSet.getBigDecimal(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(decimalValue.setScale(decimalType.getScale(), BigDecimal.ROUND_HALF_UP).round(new MathContext(decimalType.getPrecision())));
                }
            } else if (type instanceof ArrayType) {
                Array array = resultSet.getArray(i);
                if (resultSet.wasNull()) {
                    row.add(null);
                } else {
                    row.add(newArrayList((Object[]) array.getArray()));
                }
            } else {
                throw new AssertionError("unhandled type: " + type);
            }
        }
        return new MaterializedRow(MaterializedResult.DEFAULT_PRECISION, row);
    };
}
Also used : DateTimeZone(org.joda.time.DateTimeZone) PreparedBatch(org.jdbi.v3.core.statement.PreparedBatch) TpchRecordSet.createTpchRecordSet(io.trino.plugin.tpch.TpchRecordSet.createTpchRecordSet) UNKNOWN(io.trino.type.UnknownType.UNKNOWN) Array(java.sql.Array) CUSTOMER(io.trino.tpch.TpchTable.CUSTOMER) BigDecimal(java.math.BigDecimal) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) TpchTableHandle(io.trino.plugin.tpch.TpchTableHandle) ParsedSql(org.jdbi.v3.core.statement.ParsedSql) Handle(org.jdbi.v3.core.Handle) LocalTime(java.time.LocalTime) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) TIMESTAMP_WITH_TIME_ZONE(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE) INTEGER(io.trino.spi.type.IntegerType.INTEGER) UUID(io.trino.spi.type.UuidType.UUID) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) TpchTable(io.trino.tpch.TpchTable) PART(io.trino.tpch.TpchTable.PART) MathContext(java.math.MathContext) Collections.nCopies(java.util.Collections.nCopies) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) REGION(io.trino.tpch.TpchTable.REGION) ArrayType(io.trino.spi.type.ArrayType) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) StatementContext(org.jdbi.v3.core.statement.StatementContext) Preconditions.checkState(com.google.common.base.Preconditions.checkState) List(java.util.List) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) BIGINT(io.trino.spi.type.BigintType.BIGINT) LocalDate(java.time.LocalDate) RecordSet(io.trino.spi.connector.RecordSet) Optional(java.util.Optional) DecimalType(io.trino.spi.type.DecimalType) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) LINE_ITEM(io.trino.tpch.TpchTable.LINE_ITEM) Joiner(com.google.common.base.Joiner) Session(io.trino.Session) NATION(io.trino.tpch.TpchTable.NATION) TimeType(io.trino.spi.type.TimeType) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Type(io.trino.spi.type.Type) LocalDateTime(java.time.LocalDateTime) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) JsonFunctions.jsonParse(io.trino.operator.scalar.JsonFunctions.jsonParse) TimestampType(io.trino.spi.type.TimestampType) ORDERS(io.trino.tpch.TpchTable.ORDERS) ArrayList(java.util.ArrayList) VarcharType(io.trino.spi.type.VarcharType) TIME_WITH_TIME_ZONE(io.trino.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE) SQLException(java.sql.SQLException) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) Chars.padSpaces(io.trino.spi.type.Chars.padSpaces) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) Math.toIntExact(java.lang.Math.toIntExact) RowMapper(org.jdbi.v3.core.mapper.RowMapper) Jdbi(org.jdbi.v3.core.Jdbi) TINY_SCHEMA_NAME(io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME) RecordCursor(io.trino.spi.connector.RecordCursor) Language(org.intellij.lang.annotations.Language) Date(java.sql.Date) TimeUnit(java.util.concurrent.TimeUnit) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) CharType(io.trino.spi.type.CharType) Closeable(java.io.Closeable) TpchMetadata(io.trino.plugin.tpch.TpchMetadata) SqlParser(org.jdbi.v3.core.statement.SqlParser) TINYINT(io.trino.spi.type.TinyintType.TINYINT) JSON(io.trino.type.JsonType.JSON) LocalDateTime(java.time.LocalDateTime) VarcharType(io.trino.spi.type.VarcharType) SQLException(java.sql.SQLException) LocalDate(java.time.LocalDate) TimeType(io.trino.spi.type.TimeType) ArrayType(io.trino.spi.type.ArrayType) TimestampType(io.trino.spi.type.TimestampType) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) UUID(io.trino.spi.type.UuidType.UUID) LocalTime(java.time.LocalTime) BigDecimal(java.math.BigDecimal) MathContext(java.math.MathContext) Array(java.sql.Array) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) TimeType(io.trino.spi.type.TimeType) Type(io.trino.spi.type.Type) TimestampType(io.trino.spi.type.TimestampType) VarcharType(io.trino.spi.type.VarcharType) CharType(io.trino.spi.type.CharType) DecimalType(io.trino.spi.type.DecimalType) CharType(io.trino.spi.type.CharType)

Example 7 with DATE

use of io.trino.spi.type.DateType.DATE in project trino by trinodb.

the class H2QueryRunner method insertRows.

private static void insertRows(ConnectorTableMetadata tableMetadata, Handle handle, RecordSet data) {
    List<ColumnMetadata> columns = tableMetadata.getColumns().stream().filter(columnMetadata -> !columnMetadata.isHidden()).collect(toImmutableList());
    String vars = Joiner.on(',').join(nCopies(columns.size(), "?"));
    String sql = format("INSERT INTO %s VALUES (%s)", tableMetadata.getTable().getTableName(), vars);
    RecordCursor cursor = data.cursor();
    while (true) {
        // insert 1000 rows at a time
        PreparedBatch batch = handle.prepareBatch(sql);
        for (int row = 0; row < 1000; row++) {
            if (!cursor.advanceNextPosition()) {
                if (batch.size() > 0) {
                    batch.execute();
                }
                return;
            }
            for (int column = 0; column < columns.size(); column++) {
                Type type = columns.get(column).getType();
                if (BOOLEAN.equals(type)) {
                    batch.bind(column, cursor.getBoolean(column));
                } else if (BIGINT.equals(type)) {
                    batch.bind(column, cursor.getLong(column));
                } else if (INTEGER.equals(type)) {
                    batch.bind(column, toIntExact(cursor.getLong(column)));
                } else if (DOUBLE.equals(type)) {
                    batch.bind(column, cursor.getDouble(column));
                } else if (type instanceof VarcharType) {
                    batch.bind(column, cursor.getSlice(column).toStringUtf8());
                } else if (DATE.equals(type)) {
                    long millisUtc = TimeUnit.DAYS.toMillis(cursor.getLong(column));
                    // H2 expects dates in to be millis at midnight in the JVM timezone
                    long localMillis = DateTimeZone.UTC.getMillisKeepLocal(DateTimeZone.getDefault(), millisUtc);
                    batch.bind(column, new Date(localMillis));
                } else {
                    throw new IllegalArgumentException("Unsupported type " + type);
                }
            }
            batch.add();
        }
        batch.execute();
    }
}
Also used : DateTimeZone(org.joda.time.DateTimeZone) PreparedBatch(org.jdbi.v3.core.statement.PreparedBatch) TpchRecordSet.createTpchRecordSet(io.trino.plugin.tpch.TpchRecordSet.createTpchRecordSet) UNKNOWN(io.trino.type.UnknownType.UNKNOWN) Array(java.sql.Array) CUSTOMER(io.trino.tpch.TpchTable.CUSTOMER) BigDecimal(java.math.BigDecimal) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) TpchTableHandle(io.trino.plugin.tpch.TpchTableHandle) ParsedSql(org.jdbi.v3.core.statement.ParsedSql) Handle(org.jdbi.v3.core.Handle) LocalTime(java.time.LocalTime) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) TIMESTAMP_WITH_TIME_ZONE(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE) INTEGER(io.trino.spi.type.IntegerType.INTEGER) UUID(io.trino.spi.type.UuidType.UUID) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) TpchTable(io.trino.tpch.TpchTable) PART(io.trino.tpch.TpchTable.PART) MathContext(java.math.MathContext) Collections.nCopies(java.util.Collections.nCopies) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) REGION(io.trino.tpch.TpchTable.REGION) ArrayType(io.trino.spi.type.ArrayType) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) StatementContext(org.jdbi.v3.core.statement.StatementContext) Preconditions.checkState(com.google.common.base.Preconditions.checkState) List(java.util.List) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) BIGINT(io.trino.spi.type.BigintType.BIGINT) LocalDate(java.time.LocalDate) RecordSet(io.trino.spi.connector.RecordSet) Optional(java.util.Optional) DecimalType(io.trino.spi.type.DecimalType) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) LINE_ITEM(io.trino.tpch.TpchTable.LINE_ITEM) Joiner(com.google.common.base.Joiner) Session(io.trino.Session) NATION(io.trino.tpch.TpchTable.NATION) TimeType(io.trino.spi.type.TimeType) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Type(io.trino.spi.type.Type) LocalDateTime(java.time.LocalDateTime) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) JsonFunctions.jsonParse(io.trino.operator.scalar.JsonFunctions.jsonParse) TimestampType(io.trino.spi.type.TimestampType) ORDERS(io.trino.tpch.TpchTable.ORDERS) ArrayList(java.util.ArrayList) VarcharType(io.trino.spi.type.VarcharType) TIME_WITH_TIME_ZONE(io.trino.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE) SQLException(java.sql.SQLException) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) Chars.padSpaces(io.trino.spi.type.Chars.padSpaces) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) Math.toIntExact(java.lang.Math.toIntExact) RowMapper(org.jdbi.v3.core.mapper.RowMapper) Jdbi(org.jdbi.v3.core.Jdbi) TINY_SCHEMA_NAME(io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME) RecordCursor(io.trino.spi.connector.RecordCursor) Language(org.intellij.lang.annotations.Language) Date(java.sql.Date) TimeUnit(java.util.concurrent.TimeUnit) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) CharType(io.trino.spi.type.CharType) Closeable(java.io.Closeable) TpchMetadata(io.trino.plugin.tpch.TpchMetadata) SqlParser(org.jdbi.v3.core.statement.SqlParser) TINYINT(io.trino.spi.type.TinyintType.TINYINT) JSON(io.trino.type.JsonType.JSON) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) TimeType(io.trino.spi.type.TimeType) Type(io.trino.spi.type.Type) TimestampType(io.trino.spi.type.TimestampType) VarcharType(io.trino.spi.type.VarcharType) CharType(io.trino.spi.type.CharType) RecordCursor(io.trino.spi.connector.RecordCursor) VarcharType(io.trino.spi.type.VarcharType) PreparedBatch(org.jdbi.v3.core.statement.PreparedBatch) LocalDate(java.time.LocalDate) Date(java.sql.Date)

Example 8 with DATE

use of io.trino.spi.type.DateType.DATE in project trino by trinodb.

the class PostgreSqlClient method toColumnMapping.

@Override
public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) {
    String jdbcTypeName = typeHandle.getJdbcTypeName().orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
    Optional<ColumnMapping> mapping = getForcedMappingToVarchar(typeHandle);
    if (mapping.isPresent()) {
        return mapping;
    }
    switch(jdbcTypeName) {
        case "money":
            return Optional.of(moneyColumnMapping());
        case "uuid":
            return Optional.of(uuidColumnMapping());
        case "jsonb":
        case "json":
            return Optional.of(jsonColumnMapping());
        case "timestamptz":
            // PostgreSQL's "timestamp with time zone" is reported as Types.TIMESTAMP rather than Types.TIMESTAMP_WITH_TIMEZONE
            int decimalDigits = typeHandle.getRequiredDecimalDigits();
            return Optional.of(timestampWithTimeZoneColumnMapping(decimalDigits));
        case "hstore":
            return Optional.of(hstoreColumnMapping(session));
    }
    switch(typeHandle.getJdbcType()) {
        case Types.BIT:
            return Optional.of(booleanColumnMapping());
        case Types.SMALLINT:
            return Optional.of(smallintColumnMapping());
        case Types.INTEGER:
            return Optional.of(integerColumnMapping());
        case Types.BIGINT:
            return Optional.of(bigintColumnMapping());
        case Types.REAL:
            return Optional.of(realColumnMapping());
        case Types.DOUBLE:
            return Optional.of(doubleColumnMapping());
        case Types.NUMERIC:
            {
                int columnSize = typeHandle.getRequiredColumnSize();
                int precision;
                int decimalDigits = typeHandle.getDecimalDigits().orElse(0);
                if (getDecimalRounding(session) == ALLOW_OVERFLOW) {
                    if (columnSize == PRECISION_OF_UNSPECIFIED_DECIMAL) {
                        // decimal type with unspecified scale - up to 131072 digits before the decimal point; up to 16383 digits after the decimal point)
                        return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, getDecimalDefaultScale(session)), getDecimalRoundingMode(session)));
                    }
                    precision = columnSize;
                    if (precision > Decimals.MAX_PRECISION) {
                        int scale = min(decimalDigits, getDecimalDefaultScale(session));
                        return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale), getDecimalRoundingMode(session)));
                    }
                }
                // Map decimal(p, -s) (negative scale) to decimal(p+s, 0).
                precision = columnSize + max(-decimalDigits, 0);
                if (columnSize == PRECISION_OF_UNSPECIFIED_DECIMAL || precision > Decimals.MAX_PRECISION) {
                    break;
                }
                return Optional.of(decimalColumnMapping(createDecimalType(precision, max(decimalDigits, 0)), UNNECESSARY));
            }
        case Types.CHAR:
            return Optional.of(charColumnMapping(typeHandle.getRequiredColumnSize()));
        case Types.VARCHAR:
            if (!jdbcTypeName.equals("varchar")) {
                // This can be e.g. an ENUM
                return Optional.of(typedVarcharColumnMapping(jdbcTypeName));
            }
            return Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize()));
        case Types.BINARY:
            return Optional.of(varbinaryColumnMapping());
        case Types.DATE:
            return Optional.of(ColumnMapping.longMapping(DATE, (resultSet, index) -> LocalDate.parse(resultSet.getString(index), DATE_FORMATTER).toEpochDay(), dateWriteFunctionUsingLocalDate()));
        case Types.TIME:
            return Optional.of(timeColumnMapping(typeHandle.getRequiredDecimalDigits()));
        case Types.TIMESTAMP:
            TimestampType timestampType = createTimestampType(typeHandle.getRequiredDecimalDigits());
            return Optional.of(ColumnMapping.longMapping(timestampType, timestampReadFunction(timestampType), PostgreSqlClient::shortTimestampWriteFunction));
        case Types.ARRAY:
            Optional<ColumnMapping> columnMapping = arrayToTrinoType(session, connection, typeHandle);
            if (columnMapping.isPresent()) {
                return columnMapping;
            }
            break;
    }
    if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
        return mapToUnboundedVarchar(typeHandle);
    }
    return Optional.empty();
}
Also used : UNNECESSARY(java.math.RoundingMode.UNNECESSARY) AggregateFunction(io.trino.spi.connector.AggregateFunction) StandardColumnMappings.bigintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction) LongTimestampWithTimeZone(io.trino.spi.type.LongTimestampWithTimeZone) PredicatePushdownController(io.trino.plugin.jdbc.PredicatePushdownController) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Map(java.util.Map) StandardColumnMappings.fromTrinoTimestamp(io.trino.plugin.jdbc.StandardColumnMappings.fromTrinoTimestamp) DecimalSessionSessionProperties.getDecimalDefaultScale(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale) BooleanReadFunction(io.trino.plugin.jdbc.BooleanReadFunction) ImplementStddevSamp(io.trino.plugin.jdbc.aggregation.ImplementStddevSamp) Domain(io.trino.spi.predicate.Domain) PreparedStatement(java.sql.PreparedStatement) Collectors.joining(java.util.stream.Collectors.joining) Stream(java.util.stream.Stream) StandardColumnMappings.longDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction) CONVERT_TO_VARCHAR(io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR) JdbcConnectorExpressionRewriterBuilder(io.trino.plugin.jdbc.expression.JdbcConnectorExpressionRewriterBuilder) TypeSignature.mapType(io.trino.spi.type.TypeSignature.mapType) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) REAL(io.trino.spi.type.RealType.REAL) ImplementCountDistinct(io.trino.plugin.jdbc.aggregation.ImplementCountDistinct) JoinCondition(io.trino.spi.connector.JoinCondition) LocalDateTime(java.time.LocalDateTime) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) ImplementCovarianceSamp(io.trino.plugin.jdbc.aggregation.ImplementCovarianceSamp) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) TimestampType(io.trino.spi.type.TimestampType) ImplementVariancePop(io.trino.plugin.jdbc.aggregation.ImplementVariancePop) OptionalLong(java.util.OptionalLong) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) UuidType.javaUuidToTrinoUuid(io.trino.spi.type.UuidType.javaUuidToTrinoUuid) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) UuidType.trinoUuidToJavaUuid(io.trino.spi.type.UuidType.trinoUuidToJavaUuid) Timestamps.round(io.trino.spi.type.Timestamps.round) ConnectorExpressionRewriter(io.trino.plugin.base.expression.ConnectorExpressionRewriter) Math.floorDiv(java.lang.Math.floorDiv) RewriteComparison(io.trino.plugin.jdbc.expression.RewriteComparison) QueryBuilder(io.trino.plugin.jdbc.QueryBuilder) MapType(io.trino.spi.type.MapType) StandardTypes(io.trino.spi.type.StandardTypes) IOException(java.io.IOException) ImplementAvgDecimal(io.trino.plugin.jdbc.aggregation.ImplementAvgDecimal) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) IdentifierMapping(io.trino.plugin.jdbc.mapping.IdentifierMapping) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) BlockBuilder(io.trino.spi.block.BlockBuilder) StandardColumnMappings.varbinaryWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) WriteMapping(io.trino.plugin.jdbc.WriteMapping) ArrayMapping(io.trino.plugin.postgresql.PostgreSqlConfig.ArrayMapping) Connection(java.sql.Connection) BiFunction(java.util.function.BiFunction) Array(java.sql.Array) ObjectWriteFunction(io.trino.plugin.jdbc.ObjectWriteFunction) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Block(io.trino.spi.block.Block) TimestampType.createTimestampType(io.trino.spi.type.TimestampType.createTimestampType) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) JdbcMetadataSessionProperties.getDomainCompactionThreshold(io.trino.plugin.jdbc.JdbcMetadataSessionProperties.getDomainCompactionThreshold) IGNORE(io.trino.plugin.jdbc.UnsupportedTypeHandling.IGNORE) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ImplementStddevPop(io.trino.plugin.jdbc.aggregation.ImplementStddevPop) TypeSignature(io.trino.spi.type.TypeSignature) ImmutableSet(com.google.common.collect.ImmutableSet) DecimalSessionSessionProperties.getDecimalRounding(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding) Timestamp(java.sql.Timestamp) StandardColumnMappings.realColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping) LongMath(com.google.common.math.LongMath) UUID(java.util.UUID) Math.min(java.lang.Math.min) Instant(java.time.Instant) JdbcTypeHandle(io.trino.plugin.jdbc.JdbcTypeHandle) BIGINT(io.trino.spi.type.BigintType.BIGINT) INVALID_FUNCTION_ARGUMENT(io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT) LocalDate(java.time.LocalDate) StandardColumnMappings.varcharReadFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharReadFunction) DecimalType(io.trino.spi.type.DecimalType) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) TimeType(io.trino.spi.type.TimeType) Math.floorMod(java.lang.Math.floorMod) StandardColumnMappings.varcharWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction) DoubleReadFunction(io.trino.plugin.jdbc.DoubleReadFunction) DISABLED(io.trino.plugin.postgresql.PostgreSqlConfig.ArrayMapping.DISABLED) ImplementCorr(io.trino.plugin.jdbc.aggregation.ImplementCorr) Inject(javax.inject.Inject) ImmutableList(com.google.common.collect.ImmutableList) MILLISECONDS_PER_SECOND(io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND) JsonTypeUtil.jsonParse(io.trino.plugin.base.util.JsonTypeUtil.jsonParse) ImplementMinMax(io.trino.plugin.jdbc.aggregation.ImplementMinMax) ImplementRegrIntercept(io.trino.plugin.jdbc.aggregation.ImplementRegrIntercept) PICOSECONDS_PER_DAY(io.trino.spi.type.Timestamps.PICOSECONDS_PER_DAY) LongTimestamp(io.trino.spi.type.LongTimestamp) StandardColumnMappings.integerWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction) StandardColumnMappings.dateWriteFunctionUsingLocalDate(io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunctionUsingLocalDate) DISABLE_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN) TimestampWithTimeZoneType.createTimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType) DateTimeFormatter(java.time.format.DateTimeFormatter) TypeManager(io.trino.spi.type.TypeManager) ImplementCount(io.trino.plugin.jdbc.aggregation.ImplementCount) CharType.createCharType(io.trino.spi.type.CharType.createCharType) StandardColumnMappings.booleanColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping) ResultSet(java.sql.ResultSet) StandardColumnMappings.doubleWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction) ImplementVarianceSamp(io.trino.plugin.jdbc.aggregation.ImplementVarianceSamp) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling(io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling) PICOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND) UTC_KEY(io.trino.spi.type.TimeZoneKey.UTC_KEY) ImplementAvgFloatingPoint(io.trino.plugin.jdbc.aggregation.ImplementAvgFloatingPoint) DateTimeEncoding.packDateTimeWithZone(io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone) LongWriteFunction(io.trino.plugin.jdbc.LongWriteFunction) ImplementCountAll(io.trino.plugin.jdbc.aggregation.ImplementCountAll) SchemaTableName(io.trino.spi.connector.SchemaTableName) TypeInfo(org.postgresql.core.TypeInfo) LongReadFunction(io.trino.plugin.jdbc.LongReadFunction) SingleMapBlock(io.trino.spi.block.SingleMapBlock) StandardColumnMappings.charReadFunction(io.trino.plugin.jdbc.StandardColumnMappings.charReadFunction) StandardColumnMappings.smallintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction) AggregateFunctionRewriter(io.trino.plugin.base.aggregation.AggregateFunctionRewriter) ConnectionFactory(io.trino.plugin.jdbc.ConnectionFactory) DateTimeEncoding.unpackMillisUtc(io.trino.spi.type.DateTimeEncoding.unpackMillisUtc) PostgreSqlSessionProperties.getArrayMapping(io.trino.plugin.postgresql.PostgreSqlSessionProperties.getArrayMapping) DATE(io.trino.spi.type.DateType.DATE) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) JSON(io.trino.spi.type.StandardTypes.JSON) Slice(io.airlift.slice.Slice) StandardColumnMappings.booleanWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction) ALLOW_OVERFLOW(io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ArrayList(java.util.ArrayList) SQLException(java.sql.SQLException) TypeUtils.arrayDepth(io.trino.plugin.postgresql.TypeUtils.arrayDepth) FULL_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN) StandardColumnMappings.charWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction) PreparedQuery(io.trino.plugin.jdbc.PreparedQuery) ColumnHandle(io.trino.spi.connector.ColumnHandle) PostgreSqlSessionProperties.isEnableStringPushdownWithCollate(io.trino.plugin.postgresql.PostgreSqlSessionProperties.isEnableStringPushdownWithCollate) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) AggregateFunctionRule(io.trino.plugin.base.aggregation.AggregateFunctionRule) JsonTypeUtil.toJsonValue(io.trino.plugin.base.util.JsonTypeUtil.toJsonValue) AS_ARRAY(io.trino.plugin.postgresql.PostgreSqlConfig.ArrayMapping.AS_ARRAY) StandardColumnMappings.realWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) TypeUtils.getArrayElementPgTypeName(io.trino.plugin.postgresql.TypeUtils.getArrayElementPgTypeName) ConnectorSession(io.trino.spi.connector.ConnectorSession) CharType(io.trino.spi.type.CharType) DecimalSessionSessionProperties.getDecimalRoundingMode(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode) TINYINT(io.trino.spi.type.TinyintType.TINYINT) StandardColumnMappings.shortDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction) JdbcExpression(io.trino.plugin.jdbc.JdbcExpression) JDBC_ERROR(io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR) BaseJdbcConfig(io.trino.plugin.jdbc.BaseJdbcConfig) ReadFunction(io.trino.plugin.jdbc.ReadFunction) DatabaseMetaData.columnNoNulls(java.sql.DatabaseMetaData.columnNoNulls) ImplementSum(io.trino.plugin.jdbc.aggregation.ImplementSum) UnsupportedTypeHandling(io.trino.plugin.jdbc.UnsupportedTypeHandling) LocalTime(java.time.LocalTime) ALREADY_EXISTS(io.trino.spi.StandardErrorCode.ALREADY_EXISTS) ImmutableMap(com.google.common.collect.ImmutableMap) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) ImplementRegrSlope(io.trino.plugin.jdbc.aggregation.ImplementRegrSlope) String.format(java.lang.String.format) JdbcSortItem(io.trino.plugin.jdbc.JdbcSortItem) TypeUtils.getJdbcObjectArray(io.trino.plugin.postgresql.TypeUtils.getJdbcObjectArray) List(java.util.List) OffsetDateTime(java.time.OffsetDateTime) Decimals(io.trino.spi.type.Decimals) Optional(java.util.Optional) Math.max(java.lang.Math.max) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping) Types(java.sql.Types) SliceWriteFunction(io.trino.plugin.jdbc.SliceWriteFunction) Logger(io.airlift.log.Logger) StandardColumnMappings.tinyintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction) PICOSECONDS_PER_NANOSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND) ImplementCovariancePop(io.trino.plugin.jdbc.aggregation.ImplementCovariancePop) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) HashMap(java.util.HashMap) TimeType.createTimeType(io.trino.spi.type.TimeType.createTimeType) NANOSECONDS_PER_DAY(io.trino.spi.type.Timestamps.NANOSECONDS_PER_DAY) VarcharType(io.trino.spi.type.VarcharType) TypeUtils.toPgTimestamp(io.trino.plugin.postgresql.TypeUtils.toPgTimestamp) SliceReadFunction(io.trino.plugin.jdbc.SliceReadFunction) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) BaseJdbcClient(io.trino.plugin.jdbc.BaseJdbcClient) JdbcJoinCondition(io.trino.plugin.jdbc.JdbcJoinCondition) AS_JSON(io.trino.plugin.postgresql.PostgreSqlConfig.ArrayMapping.AS_JSON) PgConnection(org.postgresql.jdbc.PgConnection) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) NANOSECONDS_PER_MILLISECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND) ObjectReadFunction(io.trino.plugin.jdbc.ObjectReadFunction) StandardColumnMappings.timestampReadFunction(io.trino.plugin.jdbc.StandardColumnMappings.timestampReadFunction) Collections(java.util.Collections) TrinoException(io.trino.spi.TrinoException) TimestampType(io.trino.spi.type.TimestampType) TimestampType.createTimestampType(io.trino.spi.type.TimestampType.createTimestampType) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) StandardColumnMappings.realColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) StandardColumnMappings.booleanColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping) ImplementAvgFloatingPoint(io.trino.plugin.jdbc.aggregation.ImplementAvgFloatingPoint)

Example 9 with DATE

use of io.trino.spi.type.DateType.DATE in project trino by trinodb.

the class OrcTester method preprocessWriteValueHive.

private static Object preprocessWriteValueHive(Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type instanceof CharType) {
        return new HiveChar((String) value, ((CharType) type).getLength());
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
        LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
        return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
    }
    if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
        SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
        int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
        return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : LocalDateTime(java.time.LocalDateTime) OrcUtil(org.apache.hadoop.hive.ql.io.orc.OrcUtil) DateTimeZone(org.joda.time.DateTimeZone) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) TIMESTAMP_TZ_NANOS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) NANOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) LongTimestampWithTimeZone(io.trino.spi.type.LongTimestampWithTimeZone) NONE(io.trino.orc.metadata.CompressionKind.NONE) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) Decimals.rescale(io.trino.spi.type.Decimals.rescale) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Arrays.asList(java.util.Arrays.asList) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) BigInteger(java.math.BigInteger) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ZoneOffset(java.time.ZoneOffset) Assert.assertFalse(org.testng.Assert.assertFalse) IntWritable(org.apache.hadoop.io.IntWritable) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PICOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) UTC_KEY(io.trino.spi.type.TimeZoneKey.UTC_KEY) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) DateTimeEncoding.packDateTimeWithZone(io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) ReaderOptions(org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) LocalDateTime(java.time.LocalDateTime) Page(io.trino.spi.Page) SqlDecimal(io.trino.spi.type.SqlDecimal) DataSize.succinctBytes(io.airlift.units.DataSize.succinctBytes) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector) ArrayList(java.util.ArrayList) TIMESTAMP_TZ_MILLIS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS) Lists(com.google.common.collect.Lists) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) BOTH(io.trino.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH) OrcType(io.trino.orc.metadata.OrcType) Int128(io.trino.spi.type.Int128) TIMESTAMP_TZ_MICROS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS) Properties(java.util.Properties) MapType(io.trino.spi.type.MapType) AbstractIterator(com.google.common.collect.AbstractIterator) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) StandardTypes(io.trino.spi.type.StandardTypes) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) CompressionKind(io.trino.orc.metadata.CompressionKind) File(java.io.File) ZLIB(io.trino.orc.metadata.CompressionKind.ZLIB) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) MAX_BATCH_SIZE(io.trino.orc.OrcReader.MAX_BATCH_SIZE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) TIMESTAMP_MICROS(io.trino.spi.type.TimestampType.TIMESTAMP_MICROS) SqlVarbinary(io.trino.spi.type.SqlVarbinary) VarbinaryType(io.trino.spi.type.VarbinaryType) Varchars.truncateToLength(io.trino.spi.type.Varchars.truncateToLength) CharType(io.trino.spi.type.CharType) TINYINT(io.trino.spi.type.TinyintType.TINYINT) BlockBuilder(io.trino.spi.block.BlockBuilder) FloatWritable(org.apache.hadoop.io.FloatWritable) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) OrcFile(org.apache.hadoop.hive.ql.io.orc.OrcFile) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) TestingOrcPredicate.createOrcPredicate(io.trino.orc.TestingOrcPredicate.createOrcPredicate) LongWritable(org.apache.hadoop.io.LongWritable) Timestamps.roundDiv(io.trino.spi.type.Timestamps.roundDiv) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) SqlTimestamp(io.trino.spi.type.SqlTimestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Block(io.trino.spi.block.Block) OrcConf(org.apache.orc.OrcConf) RecordReader(org.apache.hadoop.hive.ql.io.orc.RecordReader) Path(org.apache.hadoop.fs.Path) Reader(org.apache.hadoop.hive.ql.io.orc.Reader) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) RowType(io.trino.spi.type.RowType) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) SESSION(io.trino.testing.TestingConnectorSession.SESSION) MICROSECONDS_PER_SECOND(io.trino.spi.type.Timestamps.MICROSECONDS_PER_SECOND) ArrayType(io.trino.spi.type.ArrayType) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) SqlDate(io.trino.spi.type.SqlDate) Objects(java.util.Objects) TIMESTAMP_NANOS(io.trino.spi.type.TimestampType.TIMESTAMP_NANOS) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DataSize(io.airlift.units.DataSize) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Decimals(io.trino.spi.type.Decimals) Entry(java.util.Map.Entry) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) DecimalType(io.trino.spi.type.DecimalType) IntStream(java.util.stream.IntStream) ORC_11(io.trino.orc.OrcTester.Format.ORC_11) TypeInfoFactory.getCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo) Assert.assertNull(org.testng.Assert.assertNull) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) PICOSECONDS_PER_NANOSECOND(io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND) Type(io.trino.spi.type.Type) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ZSTD(io.trino.orc.metadata.CompressionKind.ZSTD) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Chars.truncateToLengthAndTrimSpaces(io.trino.spi.type.Chars.truncateToLengthAndTrimSpaces) ByteWritable(org.apache.hadoop.io.ByteWritable) RowFieldName(io.trino.spi.type.RowFieldName) BytesWritable(org.apache.hadoop.io.BytesWritable) SNAPPY(io.trino.orc.metadata.CompressionKind.SNAPPY) ObjectInspectorFactory.getStandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector) ORC_12(io.trino.orc.OrcTester.Format.ORC_12) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) LongTimestamp(io.trino.spi.type.LongTimestamp) NANOSECONDS_PER_MILLISECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND) ObjectInspectorFactory.getStandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) Serializer(org.apache.hadoop.hive.serde2.Serializer) LZ4(io.trino.orc.metadata.CompressionKind.LZ4) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) VarcharType(io.trino.spi.type.VarcharType) HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SqlVarbinary(io.trino.spi.type.SqlVarbinary) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) RowType(io.trino.spi.type.RowType) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) Arrays.asList(java.util.Arrays.asList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) OrcType(io.trino.orc.metadata.OrcType) MapType(io.trino.spi.type.MapType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DecimalType(io.trino.spi.type.DecimalType) CharType(io.trino.spi.type.CharType) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Example 10 with DATE

use of io.trino.spi.type.DateType.DATE in project trino by trinodb.

the class TestHivePageSink method writeTestFile.

private static long writeTestFile(HiveConfig config, HiveMetastore metastore, String outputPath) {
    HiveTransactionHandle transaction = new HiveTransactionHandle(false);
    HiveWriterStats stats = new HiveWriterStats();
    ConnectorPageSink pageSink = createPageSink(transaction, config, metastore, new Path("file:///" + outputPath), stats);
    List<LineItemColumn> columns = getTestColumns();
    List<Type> columnTypes = columns.stream().map(LineItemColumn::getType).map(TestHivePageSink::getHiveType).map(hiveType -> hiveType.getType(TESTING_TYPE_MANAGER)).collect(toList());
    PageBuilder pageBuilder = new PageBuilder(columnTypes);
    int rows = 0;
    for (LineItem lineItem : new LineItemGenerator(0.01, 1, 1)) {
        rows++;
        if (rows >= NUM_ROWS) {
            break;
        }
        pageBuilder.declarePosition();
        for (int i = 0; i < columns.size(); i++) {
            LineItemColumn column = columns.get(i);
            BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
            switch(column.getType().getBase()) {
                case IDENTIFIER:
                    BIGINT.writeLong(blockBuilder, column.getIdentifier(lineItem));
                    break;
                case INTEGER:
                    INTEGER.writeLong(blockBuilder, column.getInteger(lineItem));
                    break;
                case DATE:
                    DATE.writeLong(blockBuilder, column.getDate(lineItem));
                    break;
                case DOUBLE:
                    DOUBLE.writeDouble(blockBuilder, column.getDouble(lineItem));
                    break;
                case VARCHAR:
                    createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(lineItem)));
                    break;
                default:
                    throw new IllegalArgumentException("Unsupported type " + column.getType());
            }
        }
    }
    Page page = pageBuilder.build();
    pageSink.appendPage(page);
    getFutureValue(pageSink.finish());
    File outputDir = new File(outputPath);
    List<File> files = ImmutableList.copyOf(outputDir.listFiles((dir, name) -> !name.endsWith(".crc")));
    File outputFile = getOnlyElement(files);
    long length = outputFile.length();
    ConnectorPageSource pageSource = createPageSource(transaction, config, outputFile);
    List<Page> pages = new ArrayList<>();
    while (!pageSource.isFinished()) {
        Page nextPage = pageSource.getNextPage();
        if (nextPage != null) {
            pages.add(nextPage.getLoadedPage());
        }
    }
    MaterializedResult expectedResults = toMaterializedResult(getHiveSession(config), columnTypes, ImmutableList.of(page));
    MaterializedResult results = toMaterializedResult(getHiveSession(config), columnTypes, pages);
    assertEquals(results, expectedResults);
    assertEquals(round(stats.getInputPageSizeInBytes().getAllTime().getMax()), page.getRetainedSizeInBytes());
    return length;
}
Also used : Path(org.apache.hadoop.fs.Path) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) MaterializedResult(io.trino.testing.MaterializedResult) Assertions.assertGreaterThan(io.airlift.testing.Assertions.assertGreaterThan) Test(org.testng.annotations.Test) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) TypeOperators(io.trino.spi.type.TypeOperators) SplitWeight(io.trino.spi.SplitWeight) HiveMetastoreFactory(io.trino.plugin.hive.metastore.HiveMetastoreFactory) TpchColumnType(io.trino.tpch.TpchColumnType) Math.round(java.lang.Math.round) Slices(io.airlift.slice.Slices) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Path(org.apache.hadoop.fs.Path) LineItemColumn(io.trino.tpch.LineItemColumn) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) INTEGER(io.trino.spi.type.IntegerType.INTEGER) Assert.assertEquals(io.trino.testing.assertions.Assert.assertEquals) SERIALIZATION_LIB(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB) HiveTestUtils.getDefaultHiveRecordCursorProviders(io.trino.plugin.hive.HiveTestUtils.getDefaultHiveRecordCursorProviders) TestingNodeManager(io.trino.testing.TestingNodeManager) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) HIVE_DATE(io.trino.plugin.hive.HiveType.HIVE_DATE) HDFS_ENVIRONMENT(io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) LineItemGenerator(io.trino.tpch.LineItemGenerator) LineItem(io.trino.tpch.LineItem) List(java.util.List) Stream(java.util.stream.Stream) BIGINT(io.trino.spi.type.BigintType.BIGINT) DynamicFilter(io.trino.spi.connector.DynamicFilter) Optional(java.util.Optional) HivePageSinkMetadata(io.trino.plugin.hive.metastore.HivePageSinkMetadata) DATE(io.trino.spi.type.DateType.DATE) Joiner(com.google.common.base.Joiner) JsonCodec(io.airlift.json.JsonCodec) DIRECT_TO_TARGET_NEW_DIRECTORY(io.trino.plugin.hive.LocationHandle.WriteMode.DIRECT_TO_TARGET_NEW_DIRECTORY) HiveTestUtils.getDefaultHivePageSourceFactories(io.trino.plugin.hive.HiveTestUtils.getDefaultHivePageSourceFactories) HIVE_DOUBLE(io.trino.plugin.hive.HiveType.HIVE_DOUBLE) PageBuilder(io.trino.spi.PageBuilder) Type(io.trino.spi.type.Type) Page(io.trino.spi.Page) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) JoinCompiler(io.trino.sql.gen.JoinCompiler) OptionalInt(java.util.OptionalInt) GroupByHashPageIndexerFactory(io.trino.operator.GroupByHashPageIndexerFactory) ArrayList(java.util.ArrayList) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) ImmutableList(com.google.common.collect.ImmutableList) Files(com.google.common.io.Files) NONE(io.trino.plugin.hive.HiveCompressionCodec.NONE) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) FileHiveMetastore.createTestingFileHiveMetastore(io.trino.plugin.hive.metastore.file.FileHiveMetastore.createTestingFileHiveMetastore) ConnectorPageSink(io.trino.spi.connector.ConnectorPageSink) BlockTypeOperators(io.trino.type.BlockTypeOperators) Properties(java.util.Properties) HIVE_LONG(io.trino.plugin.hive.HiveType.HIVE_LONG) HiveTestUtils.getDefaultHiveFileWriterFactories(io.trino.plugin.hive.HiveTestUtils.getDefaultHiveFileWriterFactories) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) HiveTestUtils.getHiveSessionProperties(io.trino.plugin.hive.HiveTestUtils.getHiveSessionProperties) ConnectorSession(io.trino.spi.connector.ConnectorSession) MoreFutures.getFutureValue(io.airlift.concurrent.MoreFutures.getFutureValue) PAGE_SORTER(io.trino.plugin.hive.HiveTestUtils.PAGE_SORTER) File(java.io.File) HIVE_STRING(io.trino.plugin.hive.HiveType.HIVE_STRING) TpchColumnTypes(io.trino.tpch.TpchColumnTypes) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HiveTestUtils.getHiveSession(io.trino.plugin.hive.HiveTestUtils.getHiveSession) HIVE_INT(io.trino.plugin.hive.HiveType.HIVE_INT) Collectors.toList(java.util.stream.Collectors.toList) FILE_INPUT_FORMAT(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT) Assert.assertTrue(org.testng.Assert.assertTrue) BlockBuilder(io.trino.spi.block.BlockBuilder) REGULAR(io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR) LineItemColumn(io.trino.tpch.LineItemColumn) ArrayList(java.util.ArrayList) LineItem(io.trino.tpch.LineItem) Page(io.trino.spi.Page) PageBuilder(io.trino.spi.PageBuilder) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) TpchColumnType(io.trino.tpch.TpchColumnType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) ConnectorPageSink(io.trino.spi.connector.ConnectorPageSink) MaterializedResult(io.trino.testing.MaterializedResult) File(java.io.File) LineItemGenerator(io.trino.tpch.LineItemGenerator) BlockBuilder(io.trino.spi.block.BlockBuilder)

Aggregations

DATE (io.trino.spi.type.DateType.DATE)13 List (java.util.List)13 BIGINT (io.trino.spi.type.BigintType.BIGINT)12 DOUBLE (io.trino.spi.type.DoubleType.DOUBLE)12 INTEGER (io.trino.spi.type.IntegerType.INTEGER)11 Type (io.trino.spi.type.Type)11 Optional (java.util.Optional)11 ImmutableList (com.google.common.collect.ImmutableList)10 SMALLINT (io.trino.spi.type.SmallintType.SMALLINT)10 BOOLEAN (io.trino.spi.type.BooleanType.BOOLEAN)9 DecimalType (io.trino.spi.type.DecimalType)9 REAL (io.trino.spi.type.RealType.REAL)9 TINYINT (io.trino.spi.type.TinyintType.TINYINT)9 VarcharType (io.trino.spi.type.VarcharType)9 String.format (java.lang.String.format)9 SchemaTableName (io.trino.spi.connector.SchemaTableName)8 VARBINARY (io.trino.spi.type.VarbinaryType.VARBINARY)8 Map (java.util.Map)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 Slices.utf8Slice (io.airlift.slice.Slices.utf8Slice)7