Search in sources :

Example 6 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class DeltaLakeMetadata method dropSchema.

@Override
public void dropSchema(ConnectorSession session, String schemaName) {
    Optional<Path> location = metastore.getDatabase(schemaName).orElseThrow(() -> new SchemaNotFoundException(schemaName)).getLocation().map(Path::new);
    // If we see files in the schema location, don't delete it.
    // If we see no files or can't see the location at all, use fallback.
    boolean deleteData = location.map(path -> {
        // don't catch errors here
        HdfsContext context = new HdfsContext(session);
        try (FileSystem fs = hdfsEnvironment.getFileSystem(context, path)) {
            return !fs.listLocatedStatus(path).hasNext();
        } catch (IOException | RuntimeException e) {
            LOG.warn(e, "Could not check schema directory '%s'", path);
            return deleteSchemaLocationsFallback;
        }
    }).orElse(deleteSchemaLocationsFallback);
    metastore.dropDatabase(schemaName, deleteData);
}
Also used : Path(org.apache.hadoop.fs.Path) TransactionLogUtil.getTransactionLogDir(io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.getTransactionLogDir) FileSystem(org.apache.hadoop.fs.FileSystem) TableSnapshot(io.trino.plugin.deltalake.transactionlog.TableSnapshot) ColumnStatisticMetadata(io.trino.spi.statistics.ColumnStatisticMetadata) FileStatus(org.apache.hadoop.fs.FileStatus) DeltaLakeSchemaSupport.validateType(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.validateType) TypeUtils.isFloatingPointNaN(io.trino.spi.type.TypeUtils.isFloatingPointNaN) RemoveFileEntry(io.trino.plugin.deltalake.transactionlog.RemoveFileEntry) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) Collections.singletonList(java.util.Collections.singletonList) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TransactionLogWriterFactory(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriterFactory) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) ValueSet.ofRanges(io.trino.spi.predicate.ValueSet.ofRanges) Column(io.trino.plugin.hive.metastore.Column) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) PARTITIONED_BY_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.PARTITIONED_BY_PROPERTY) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Table(io.trino.plugin.hive.metastore.Table) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) TABLE_PROVIDER_PROPERTY(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_PROPERTY) HiveWriteUtils.pathExists(io.trino.plugin.hive.util.HiveWriteUtils.pathExists) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) HyperLogLog(io.airlift.stats.cardinality.HyperLogLog) DateTimeEncoding.unpackMillisUtc(io.trino.spi.type.DateTimeEncoding.unpackMillisUtc) FILE_MODIFIED_TIME_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.FILE_MODIFIED_TIME_COLUMN_NAME) Predicate.not(java.util.function.Predicate.not) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) ANALYZE_COLUMNS_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.ANALYZE_COLUMNS_PROPERTY) REGULAR(io.trino.plugin.deltalake.DeltaLakeColumnType.REGULAR) TransactionLogParser.getMandatoryCurrentVersion(io.trino.plugin.deltalake.transactionlog.TransactionLogParser.getMandatoryCurrentVersion) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) DeltaLakeColumnHandle.fileSizeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileSizeColumnHandle) Slice(io.airlift.slice.Slice) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) DeltaLakeTableProcedureId(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId) INVALID_ANALYZE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Variable(io.trino.spi.expression.Variable) DeltaLakeTableProperties.getLocation(io.trino.plugin.deltalake.DeltaLakeTableProperties.getLocation) Range.greaterThanOrEqual(io.trino.spi.predicate.Range.greaterThanOrEqual) TransactionConflictException(io.trino.plugin.deltalake.transactionlog.writer.TransactionConflictException) HiveType(io.trino.plugin.hive.HiveType) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) DeltaLakeStatisticsAccess(io.trino.plugin.deltalake.statistics.DeltaLakeStatisticsAccess) DeltaLakeSchemaSupport.extractPartitionColumns(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractPartitionColumns) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) INVALID_TABLE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) DeltaLakeSchemaSupport.serializeStatsAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeStatsAsJson) Nullable(javax.annotation.Nullable) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) MapType(io.trino.spi.type.MapType) PARTITION_KEY(io.trino.plugin.deltalake.DeltaLakeColumnType.PARTITION_KEY) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) DELTA_LAKE_INVALID_SCHEMA(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_INVALID_SCHEMA) CheckpointWriterManager(io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointWriterManager) ROW_ID_COLUMN_TYPE(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_TYPE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HiveUtil.isHiveSystemSchema(io.trino.plugin.hive.util.HiveUtil.isHiveSystemSchema) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) MAX_VALUE(io.trino.spi.statistics.ColumnStatisticType.MAX_VALUE) DeltaLakeSessionProperties.isTableStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isTableStatisticsEnabled) LOCATION_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.LOCATION_PROPERTY) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) TINYINT(io.trino.spi.type.TinyintType.TINYINT) NotADeltaLakeTableException(io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException) DeltaLakeStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeStatistics) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) NodeManager(io.trino.spi.NodeManager) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) Database(io.trino.plugin.hive.metastore.Database) DeltaLakeSchemaSupport.extractSchema(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractSchema) SYNTHESIZED(io.trino.plugin.deltalake.DeltaLakeColumnType.SYNTHESIZED) TABLE_PROVIDER_VALUE(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_VALUE) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) AddFileEntry(io.trino.plugin.deltalake.transactionlog.AddFileEntry) DeltaLakeMetastore(io.trino.plugin.deltalake.metastore.DeltaLakeMetastore) Format(io.trino.plugin.deltalake.transactionlog.MetadataEntry.Format) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Locale(java.util.Locale) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Path(org.apache.hadoop.fs.Path) HyperLogLogType(io.trino.spi.type.HyperLogLogType) INTEGER(io.trino.spi.type.IntegerType.INTEGER) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) RowType(io.trino.spi.type.RowType) Range.range(io.trino.spi.predicate.Range.range) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) HiveWriteUtils.isS3FileSystem(io.trino.plugin.hive.util.HiveWriteUtils.isS3FileSystem) TransactionLogWriter(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriter) Collection(java.util.Collection) DeltaLakeTableExecuteHandle(io.trino.plugin.deltalake.procedure.DeltaLakeTableExecuteHandle) MetadataEntry(io.trino.plugin.deltalake.transactionlog.MetadataEntry) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) Instant(java.time.Instant) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) Sets(com.google.common.collect.Sets) FileNotFoundException(java.io.FileNotFoundException) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) ROW_ID_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_NAME) INVALID_SCHEMA_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) MetastoreUtil.buildInitialPrivilegeSet(io.trino.plugin.hive.metastore.MetastoreUtil.buildInitialPrivilegeSet) Assignment(io.trino.spi.connector.Assignment) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) DecimalType(io.trino.spi.type.DecimalType) OPTIMIZE(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId.OPTIMIZE) JsonCodec(io.airlift.json.JsonCodec) Comparators(com.google.common.collect.Comparators) Constraint(io.trino.spi.connector.Constraint) Range.lessThanOrEqual(io.trino.spi.predicate.Range.lessThanOrEqual) DeltaLakeFileStatistics(io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics) Logger(io.airlift.log.Logger) DeltaLakeSchemaSupport.serializeSchemaAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeSchemaAsJson) DeltaLakeColumnStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeColumnStatistics) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) DeltaLakeColumnHandle.pathColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.pathColumnHandle) DeltaLakeColumnHandle.fileModifiedTimeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileModifiedTimeColumnHandle) AtomicReference(java.util.concurrent.atomic.AtomicReference) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) TableStatistics(io.trino.spi.statistics.TableStatistics) DeltaLakeSessionProperties.isExtendedStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isExtendedStatisticsEnabled) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) CHECKPOINT_INTERVAL_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.CHECKPOINT_INTERVAL_PROPERTY) StorageFormat.create(io.trino.plugin.hive.metastore.StorageFormat.create) MetadataEntry.buildDeltaMetadataConfiguration(io.trino.plugin.deltalake.transactionlog.MetadataEntry.buildDeltaMetadataConfiguration) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) DELTA_LAKE_BAD_WRITE(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) TupleDomain(io.trino.spi.predicate.TupleDomain) DeltaLakeTableProperties.getPartitionedBy(io.trino.plugin.deltalake.DeltaLakeTableProperties.getPartitionedBy) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) GENERIC_INTERNAL_ERROR(io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) SchemaTableName.schemaTableName(io.trino.spi.connector.SchemaTableName.schemaTableName) UUID.randomUUID(java.util.UUID.randomUUID) ProtocolEntry(io.trino.plugin.deltalake.transactionlog.ProtocolEntry) DeltaTableOptimizeHandle(io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle) Collections.unmodifiableMap(java.util.Collections.unmodifiableMap) CommitInfoEntry(io.trino.plugin.deltalake.transactionlog.CommitInfoEntry) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) TypeManager(io.trino.spi.type.TypeManager) Collections(java.util.Collections) NUMBER_OF_DISTINCT_VALUES_SUMMARY(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_DISTINCT_VALUES_SUMMARY) FileSystem(org.apache.hadoop.fs.FileSystem) HiveWriteUtils.isS3FileSystem(io.trino.plugin.hive.util.HiveWriteUtils.isS3FileSystem) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext)

Example 7 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class DeltaLakeMetadata method getSchemaProperties.

@Override
public Map<String, Object> getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) {
    String schema = schemaName.getSchemaName();
    checkState(!schema.equals("information_schema") && !schema.equals("sys"), "Schema is not accessible: %s", schemaName);
    Optional<Database> db = metastore.getDatabase(schema);
    return db.map(DeltaLakeSchemaProperties::fromDatabase).orElseThrow(() -> new SchemaNotFoundException(schema));
}
Also used : Database(io.trino.plugin.hive.metastore.Database) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Example 8 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class PhoenixClient method beginCreateTable.

@Override
public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) {
    SchemaTableName schemaTableName = tableMetadata.getTable();
    String schema = schemaTableName.getSchemaName();
    String table = schemaTableName.getTableName();
    if (!getSchemaNames(session).contains(schema)) {
        throw new SchemaNotFoundException(schema);
    }
    try (Connection connection = connectionFactory.openConnection(session)) {
        ConnectorIdentity identity = session.getIdentity();
        schema = getIdentifierMapping().toRemoteSchemaName(identity, connection, schema);
        table = getIdentifierMapping().toRemoteTableName(identity, connection, schema, table);
        schema = toPhoenixSchemaName(schema);
        LinkedList<ColumnMetadata> tableColumns = new LinkedList<>(tableMetadata.getColumns());
        Map<String, Object> tableProperties = tableMetadata.getProperties();
        Optional<Boolean> immutableRows = PhoenixTableProperties.getImmutableRows(tableProperties);
        String immutable = immutableRows.isPresent() && immutableRows.get() ? "IMMUTABLE" : "";
        ImmutableList.Builder<String> columnNames = ImmutableList.builder();
        ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
        ImmutableList.Builder<String> columnList = ImmutableList.builder();
        Set<ColumnMetadata> rowkeyColumns = tableColumns.stream().filter(col -> isPrimaryKey(col, tableProperties)).collect(toSet());
        ImmutableList.Builder<String> pkNames = ImmutableList.builder();
        Optional<String> rowkeyColumn = Optional.empty();
        if (rowkeyColumns.isEmpty()) {
            // Add a rowkey when not specified in DDL
            columnList.add(ROWKEY + " bigint not null");
            pkNames.add(ROWKEY);
            execute(session, format("CREATE SEQUENCE %s", getEscapedTableName(schema, table + "_sequence")));
            rowkeyColumn = Optional.of(ROWKEY);
        }
        for (ColumnMetadata column : tableColumns) {
            String columnName = getIdentifierMapping().toRemoteColumnName(connection, column.getName());
            columnNames.add(columnName);
            columnTypes.add(column.getType());
            String typeStatement = toWriteMapping(session, column.getType()).getDataType();
            if (rowkeyColumns.contains(column)) {
                typeStatement += " not null";
                pkNames.add(columnName);
            }
            columnList.add(format("%s %s", getEscapedArgument(columnName), typeStatement));
        }
        ImmutableList.Builder<String> tableOptions = ImmutableList.builder();
        PhoenixTableProperties.getSaltBuckets(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.SALT_BUCKETS + "=" + value));
        PhoenixTableProperties.getSplitOn(tableProperties).ifPresent(value -> tableOptions.add("SPLIT ON (" + value.replace('"', '\'') + ")"));
        PhoenixTableProperties.getDisableWal(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DISABLE_WAL + "=" + value));
        PhoenixTableProperties.getDefaultColumnFamily(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DEFAULT_COLUMN_FAMILY + "=" + value));
        PhoenixTableProperties.getBloomfilter(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.BLOOMFILTER + "='" + value + "'"));
        PhoenixTableProperties.getVersions(tableProperties).ifPresent(value -> tableOptions.add(HConstants.VERSIONS + "=" + value));
        PhoenixTableProperties.getMinVersions(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.MIN_VERSIONS + "=" + value));
        PhoenixTableProperties.getCompression(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.COMPRESSION + "='" + value + "'"));
        PhoenixTableProperties.getTimeToLive(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.TTL + "=" + value));
        PhoenixTableProperties.getDataBlockEncoding(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.DATA_BLOCK_ENCODING + "='" + value + "'"));
        String sql = format("CREATE %s TABLE %s (%s , CONSTRAINT PK PRIMARY KEY (%s)) %s", immutable, getEscapedTableName(schema, table), join(", ", columnList.build()), join(", ", pkNames.build()), join(", ", tableOptions.build()));
        execute(session, sql);
        return new PhoenixOutputTableHandle(schema, table, columnNames.build(), columnTypes.build(), Optional.empty(), rowkeyColumn);
    } catch (SQLException e) {
        if (e.getErrorCode() == SQLExceptionCode.TABLE_ALREADY_EXIST.getErrorCode()) {
            throw new TrinoException(ALREADY_EXISTS, "Phoenix table already exists", e);
        }
        throw new TrinoException(PHOENIX_METADATA_ERROR, "Error creating Phoenix table", e);
    }
}
Also used : UNNECESSARY(java.math.RoundingMode.UNNECESSARY) StandardColumnMappings.varcharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping) TypeUtils.getArrayElementPhoenixTypeName(io.trino.plugin.phoenix5.TypeUtils.getArrayElementPhoenixTypeName) TypeUtils.jdbcObjectArrayToBlock(io.trino.plugin.phoenix5.TypeUtils.jdbcObjectArrayToBlock) HBaseFactoryProvider(org.apache.phoenix.query.HBaseFactoryProvider) StandardColumnMappings.bigintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction) PredicatePushdownController(io.trino.plugin.jdbc.PredicatePushdownController) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) StandardColumnMappings.booleanColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping) StandardColumnMappings.defaultVarcharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.defaultVarcharColumnMapping) ResultSet(java.sql.ResultSet) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) StandardColumnMappings.doubleWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction) DecimalSessionSessionProperties.getDecimalDefaultScale(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale) MapReduceParallelScanGrouper(org.apache.phoenix.iterate.MapReduceParallelScanGrouper) ENGLISH(java.util.Locale.ENGLISH) PhoenixArray(org.apache.phoenix.schema.types.PhoenixArray) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) LONGNVARCHAR(java.sql.Types.LONGNVARCHAR) ConcatResultIterator(org.apache.phoenix.iterate.ConcatResultIterator) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling(io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling) PhoenixClientModule.getConnectionProperties(io.trino.plugin.phoenix5.PhoenixClientModule.getConnectionProperties) TIME_WITH_TIMEZONE(java.sql.Types.TIME_WITH_TIMEZONE) FOREVER(org.apache.hadoop.hbase.HConstants.FOREVER) LongWriteFunction(io.trino.plugin.jdbc.LongWriteFunction) Set(java.util.Set) LONGVARCHAR(java.sql.Types.LONGVARCHAR) PreparedStatement(java.sql.PreparedStatement) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) SchemaTableName(io.trino.spi.connector.SchemaTableName) Collectors.joining(java.util.stream.Collectors.joining) LongReadFunction(io.trino.plugin.jdbc.LongReadFunction) ResultIterator(org.apache.phoenix.iterate.ResultIterator) StandardColumnMappings.smallintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction) StandardColumnMappings.longDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction) ConnectionFactory(io.trino.plugin.jdbc.ConnectionFactory) CONVERT_TO_VARCHAR(io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR) SKIP_REGION_BOUNDARY_CHECK(org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK) PhoenixResultSet(org.apache.phoenix.jdbc.PhoenixResultSet) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) ARRAY(java.sql.Types.ARRAY) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) SchemaUtil(org.apache.phoenix.util.SchemaUtil) QueryConstants(org.apache.phoenix.query.QueryConstants) StandardColumnMappings.booleanWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction) JdbcSplit(io.trino.plugin.jdbc.JdbcSplit) SimpleDateFormat(java.text.SimpleDateFormat) ALLOW_OVERFLOW(io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) TableProperty(org.apache.phoenix.schema.TableProperty) DatabaseMetaData(java.sql.DatabaseMetaData) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) StandardColumnMappings.defaultCharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.defaultCharColumnMapping) ArrayList(java.util.ArrayList) JDBCType(java.sql.JDBCType) SQLException(java.sql.SQLException) TIMESTAMP_TZ_MILLIS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS) String.join(java.lang.String.join) FULL_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN) StandardColumnMappings.charWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction) PreparedQuery(io.trino.plugin.jdbc.PreparedQuery) PName(org.apache.phoenix.schema.PName) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) TableName(org.apache.hadoop.hbase.TableName) StandardColumnMappings.realWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) QueryBuilder(io.trino.plugin.jdbc.QueryBuilder) PHOENIX_METADATA_ERROR(io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_METADATA_ERROR) SchemaUtil.getEscapedArgument(org.apache.phoenix.util.SchemaUtil.getEscapedArgument) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) IOException(java.io.IOException) DEFAULT_SCHEMA(io.trino.plugin.phoenix5.PhoenixMetadata.DEFAULT_SCHEMA) ConnectorSession(io.trino.spi.connector.ConnectorSession) Scan(org.apache.hadoop.hbase.client.Scan) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) IdentifierMapping(io.trino.plugin.jdbc.mapping.IdentifierMapping) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) VarbinaryType(io.trino.spi.type.VarbinaryType) Admin(org.apache.hadoop.hbase.client.Admin) CharType(io.trino.spi.type.CharType) DecimalSessionSessionProperties.getDecimalRoundingMode(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode) MetadataUtil.toPhoenixSchemaName(io.trino.plugin.phoenix5.MetadataUtil.toPhoenixSchemaName) TINYINT(io.trino.spi.type.TinyintType.TINYINT) StandardColumnMappings.shortDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction) StandardColumnMappings.varbinaryWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction) WriteMapping(io.trino.plugin.jdbc.WriteMapping) TypeUtils.toBoxedArray(io.trino.plugin.phoenix5.TypeUtils.toBoxedArray) Connection(java.sql.Connection) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) BiFunction(java.util.function.BiFunction) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) ObjectWriteFunction(io.trino.plugin.jdbc.ObjectWriteFunction) PhoenixInputSplit(org.apache.phoenix.mapreduce.PhoenixInputSplit) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) TypeUtils.getJdbcObjectArray(io.trino.plugin.phoenix5.TypeUtils.getJdbcObjectArray) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Block(io.trino.spi.block.Block) DEFAULT_SCALE(io.trino.spi.type.DecimalType.DEFAULT_SCALE) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) ALREADY_EXISTS(io.trino.spi.StandardErrorCode.ALREADY_EXISTS) INTEGER(io.trino.spi.type.IntegerType.INTEGER) Collectors.toSet(java.util.stream.Collectors.toSet) NVARCHAR(java.sql.Types.NVARCHAR) ImmutableSet(com.google.common.collect.ImmutableSet) DecimalSessionSessionProperties.getDecimalRounding(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) DelegatePreparedStatement(org.apache.phoenix.jdbc.DelegatePreparedStatement) Compression(org.apache.hadoop.hbase.io.compress.Compression) MetadataUtil.getEscapedTableName(io.trino.plugin.phoenix5.MetadataUtil.getEscapedTableName) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) StandardColumnMappings.realColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping) JdbcOutputTableHandle(io.trino.plugin.jdbc.JdbcOutputTableHandle) WriteFunction(io.trino.plugin.jdbc.WriteFunction) String.format(java.lang.String.format) PhoenixRuntime.getTable(org.apache.phoenix.util.PhoenixRuntime.getTable) JdbcSortItem(io.trino.plugin.jdbc.JdbcSortItem) PColumn(org.apache.phoenix.schema.PColumn) List(java.util.List) JdbcTypeHandle(io.trino.plugin.jdbc.JdbcTypeHandle) BIGINT(io.trino.spi.type.BigintType.BIGINT) ScanMetricsHolder(org.apache.phoenix.monitoring.ScanMetricsHolder) LocalDate(java.time.LocalDate) Decimals(io.trino.spi.type.Decimals) Optional(java.util.Optional) Math.max(java.lang.Math.max) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) VARCHAR(java.sql.Types.VARCHAR) ESCAPE_CHARACTER(org.apache.phoenix.util.SchemaUtil.ESCAPE_CHARACTER) SQLExceptionCode(org.apache.phoenix.exception.SQLExceptionCode) PDataType(org.apache.phoenix.schema.types.PDataType) StandardColumnMappings.tinyintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping) DecimalType(io.trino.spi.type.DecimalType) PeekingResultIterator(org.apache.phoenix.iterate.PeekingResultIterator) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping) Types(java.sql.Types) PhoenixColumnProperties.isPrimaryKey(io.trino.plugin.phoenix5.PhoenixColumnProperties.isPrimaryKey) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) PHOENIX_QUERY_ERROR(io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_QUERY_ERROR) StatementContext(org.apache.phoenix.compile.StatementContext) SequenceResultIterator(org.apache.phoenix.iterate.SequenceResultIterator) StandardColumnMappings.varcharWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction) StandardColumnMappings.tinyintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) TIMESTAMP(java.sql.Types.TIMESTAMP) Inject(javax.inject.Inject) VarcharType(io.trino.spi.type.VarcharType) TIME_WITH_TIME_ZONE(io.trino.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE) HConstants(org.apache.hadoop.hbase.HConstants) TIMESTAMP_WITH_TIMEZONE(java.sql.Types.TIMESTAMP_WITH_TIMEZONE) ImmutableList(com.google.common.collect.ImmutableList) QueryPlan(org.apache.phoenix.compile.QueryPlan) Verify.verify(com.google.common.base.Verify.verify) TIME(io.trino.spi.type.TimeType.TIME) BaseJdbcClient(io.trino.plugin.jdbc.BaseJdbcClient) LinkedList(java.util.LinkedList) Bytes(org.apache.hadoop.hbase.util.Bytes) PTable(org.apache.phoenix.schema.PTable) StandardColumnMappings.timeWriteFunctionUsingSqlTime(io.trino.plugin.jdbc.StandardColumnMappings.timeWriteFunctionUsingSqlTime) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) StandardColumnMappings.integerWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction) TableResultIterator(org.apache.phoenix.iterate.TableResultIterator) ConnectionQueryServices(org.apache.phoenix.query.ConnectionQueryServices) DEFAULT_PRECISION(io.trino.spi.type.DecimalType.DEFAULT_PRECISION) DISABLE_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN) ObjectReadFunction(io.trino.plugin.jdbc.ObjectReadFunction) DateTimeFormatter(java.time.format.DateTimeFormatter) StringJoiner(java.util.StringJoiner) LookAheadResultIterator(org.apache.phoenix.iterate.LookAheadResultIterator) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) Connection(java.sql.Connection) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) SchemaTableName(io.trino.spi.connector.SchemaTableName) LinkedList(java.util.LinkedList) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) JDBCType(java.sql.JDBCType) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) ArrayType(io.trino.spi.type.ArrayType) PDataType(org.apache.phoenix.schema.types.PDataType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) TrinoException(io.trino.spi.TrinoException) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Example 9 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class BridgingHiveMetastore method setDatabaseOwner.

@Override
public void setDatabaseOwner(String databaseName, HivePrincipal principal) {
    Database database = fromMetastoreApiDatabase(delegate.getDatabase(databaseName).orElseThrow(() -> new SchemaNotFoundException(databaseName)));
    Database newDatabase = Database.builder(database).setOwnerName(Optional.of(principal.getName())).setOwnerType(Optional.of(principal.getType())).build();
    delegate.alterDatabase(identity, databaseName, toMetastoreApiDatabase(newDatabase));
}
Also used : ThriftMetastoreUtil.fromMetastoreApiDatabase(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromMetastoreApiDatabase) ThriftMetastoreUtil.toMetastoreApiDatabase(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiDatabase) Database(io.trino.plugin.hive.metastore.Database) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Example 10 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class BridgingHiveMetastore method renameDatabase.

@Override
public void renameDatabase(String databaseName, String newDatabaseName) {
    org.apache.hadoop.hive.metastore.api.Database database = delegate.getDatabase(databaseName).orElseThrow(() -> new SchemaNotFoundException(databaseName));
    database.setName(newDatabaseName);
    delegate.alterDatabase(identity, databaseName, database);
    delegate.getDatabase(databaseName).ifPresent(newDatabase -> {
        if (newDatabase.getName().equals(databaseName)) {
            throw new TrinoException(NOT_SUPPORTED, "Hive metastore does not support renaming schemas");
        }
    });
}
Also used : TrinoException(io.trino.spi.TrinoException) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Aggregations

SchemaNotFoundException (io.trino.spi.connector.SchemaNotFoundException)19 TrinoException (io.trino.spi.TrinoException)13 SchemaTableName (io.trino.spi.connector.SchemaTableName)10 Database (io.trino.plugin.hive.metastore.Database)8 ImmutableList (com.google.common.collect.ImmutableList)7 ImmutableMap (com.google.common.collect.ImmutableMap)7 ImmutableSet (com.google.common.collect.ImmutableSet)6 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)6 NOT_SUPPORTED (io.trino.spi.StandardErrorCode.NOT_SUPPORTED)6 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)6 ConnectorSession (io.trino.spi.connector.ConnectorSession)6 IOException (java.io.IOException)6 List (java.util.List)6 Map (java.util.Map)6 Optional (java.util.Optional)6 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)5 Verify.verify (com.google.common.base.Verify.verify)5 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)5 ConnectorTableMetadata (io.trino.spi.connector.ConnectorTableMetadata)5 Preconditions.checkState (com.google.common.base.Preconditions.checkState)4