Search in sources :

Example 1 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class SemiTransactionalHiveMetastore method dropDatabase.

public synchronized void dropDatabase(ConnectorSession session, String schemaName) {
    Optional<Path> location = delegate.getDatabase(schemaName).orElseThrow(() -> new SchemaNotFoundException(schemaName)).getLocation().map(Path::new);
    setExclusive((delegate, hdfsEnvironment) -> {
        // If we see files in the schema location, don't delete it.
        // If we see no files, request deletion.
        // If we fail to check the schema location, behave according to fallback.
        boolean deleteData = location.map(path -> {
            HdfsContext context = new HdfsContext(session);
            try (FileSystem fs = hdfsEnvironment.getFileSystem(context, path)) {
                return !fs.listLocatedStatus(path).hasNext();
            } catch (IOException | RuntimeException e) {
                log.warn(e, "Could not check schema directory '%s'", path);
                return deleteSchemaLocationsFallback;
            }
        }).orElse(deleteSchemaLocationsFallback);
        delegate.dropDatabase(schemaName, deleteData);
    });
}
Also used : Path(org.apache.hadoop.fs.Path) TableInvalidationCallback(io.trino.plugin.hive.TableInvalidationCallback) HiveUpdateProcessor(io.trino.plugin.hive.HiveUpdateProcessor) FileSystem(org.apache.hadoop.fs.FileSystem) USER(io.trino.spi.security.PrincipalType.USER) MetastoreConf.getTimeVar(org.apache.hadoop.hive.metastore.conf.MetastoreConf.getTimeVar) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) FileStatus(org.apache.hadoop.fs.FileStatus) ColumnStatisticType(io.trino.spi.statistics.ColumnStatisticType) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) HIVE_CORRUPTED_COLUMN_STATISTICS(io.trino.plugin.hive.HiveErrorCode.HIVE_CORRUPTED_COLUMN_STATISTICS) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) MILLISECONDS(java.util.concurrent.TimeUnit.MILLISECONDS) GuardedBy(javax.annotation.concurrent.GuardedBy) HiveWriteUtils.pathExists(io.trino.plugin.hive.util.HiveWriteUtils.pathExists) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) OWNERSHIP(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.OWNERSHIP) SqlStandardAccessControlMetadataMetastore(io.trino.plugin.hive.security.SqlStandardAccessControlMetadataMetastore) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) Joiner(com.google.common.base.Joiner) MoreObjects.toStringHelper(com.google.common.base.MoreObjects.toStringHelper) HIVE_TABLE_DROPPED_DURING_QUERY(io.trino.plugin.hive.HiveErrorCode.HIVE_TABLE_DROPPED_DURING_QUERY) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) Iterables(com.google.common.collect.Iterables) PartitionNotFoundException(io.trino.plugin.hive.PartitionNotFoundException) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) DataOperationType(org.apache.hadoop.hive.metastore.api.DataOperationType) HiveType(io.trino.plugin.hive.HiveType) OptionalLong(java.util.OptionalLong) Lists(com.google.common.collect.Lists) HiveTableHandle(io.trino.plugin.hive.HiveTableHandle) FormatMethod(com.google.errorprone.annotations.FormatMethod) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) LinkedHashSet(java.util.LinkedHashSet) PartitionAndStatementId(io.trino.plugin.hive.PartitionAndStatementId) ViewReaderUtil.isPrestoView(io.trino.plugin.hive.ViewReaderUtil.isPrestoView) Executor(java.util.concurrent.Executor) FileUtils.makePartName(org.apache.hadoop.hive.common.FileUtils.makePartName) PrincipalType(io.trino.spi.security.PrincipalType) TRANSACTION_CONFLICT(io.trino.spi.StandardErrorCode.TRANSACTION_CONFLICT) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) MoreFutures.getFutureValue(io.airlift.concurrent.MoreFutures.getFutureValue) RoleGrant(io.trino.spi.security.RoleGrant) HiveMetastoreClosure(io.trino.plugin.hive.HiveMetastoreClosure) ValidTxnWriteIdList(org.apache.hadoop.hive.common.ValidTxnWriteIdList) HivePrivilege(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege) NUM_ROWS(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.NUM_ROWS) ScheduledFuture(java.util.concurrent.ScheduledFuture) HiveUtil.toPartitionValues(io.trino.plugin.hive.util.HiveUtil.toPartitionValues) AcidOperation(io.trino.plugin.hive.acid.AcidOperation) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) Duration(io.airlift.units.Duration) Statistics.merge(io.trino.plugin.hive.util.Statistics.merge) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) ALREADY_EXISTS(io.trino.spi.StandardErrorCode.ALREADY_EXISTS) Path(org.apache.hadoop.fs.Path) HIVE_FILESYSTEM_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR) Statistics.reduce(io.trino.plugin.hive.util.Statistics.reduce) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) TableAlreadyExistsException(io.trino.plugin.hive.TableAlreadyExistsException) TrinoException(io.trino.spi.TrinoException) HIVE_PATH_ALREADY_EXISTS(io.trino.plugin.hive.HiveErrorCode.HIVE_PATH_ALREADY_EXISTS) Collectors(java.util.stream.Collectors) FileNotFoundException(java.io.FileNotFoundException) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) MetastoreUtil.buildInitialPrivilegeSet(io.trino.plugin.hive.metastore.MetastoreUtil.buildInitialPrivilegeSet) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) WriteMode(io.trino.plugin.hive.LocationHandle.WriteMode) AcidUtils(org.apache.hadoop.hive.ql.io.AcidUtils) DIRECT_TO_TARGET_NEW_DIRECTORY(io.trino.plugin.hive.LocationHandle.WriteMode.DIRECT_TO_TARGET_NEW_DIRECTORY) Logger(io.airlift.log.Logger) Type(io.trino.spi.type.Type) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Function(java.util.function.Function) HashSet(java.util.HashSet) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) HIVE_METASTORE_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_METASTORE_ERROR) Objects.requireNonNull(java.util.Objects.requireNonNull) LinkedList(java.util.LinkedList) Iterator(java.util.Iterator) HiveWriteUtils.isFileCreatedByQuery(io.trino.plugin.hive.util.HiveWriteUtils.isFileCreatedByQuery) TupleDomain(io.trino.spi.predicate.TupleDomain) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) SUBTRACT(io.trino.plugin.hive.util.Statistics.ReduceOperator.SUBTRACT) TXN_TIMEOUT(org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.TXN_TIMEOUT) VisibleForTesting(com.google.common.annotations.VisibleForTesting) FileSystem(org.apache.hadoop.fs.FileSystem) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext)

Example 2 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class GlueHiveMetastore method renameDatabase.

@Override
public void renameDatabase(String databaseName, String newDatabaseName) {
    try {
        Database database = getDatabase(databaseName).orElseThrow(() -> new SchemaNotFoundException(databaseName));
        DatabaseInput renamedDatabase = GlueInputConverter.convertDatabase(database).withName(newDatabaseName);
        stats.getUpdateDatabase().call(() -> glueClient.updateDatabase(new UpdateDatabaseRequest().withCatalogId(catalogId).withName(databaseName).withDatabaseInput(renamedDatabase)));
    } catch (AmazonServiceException e) {
        throw new TrinoException(HIVE_METASTORE_ERROR, e);
    }
}
Also used : UpdateDatabaseRequest(com.amazonaws.services.glue.model.UpdateDatabaseRequest) Database(io.trino.plugin.hive.metastore.Database) AmazonServiceException(com.amazonaws.AmazonServiceException) TrinoException(io.trino.spi.TrinoException) DatabaseInput(com.amazonaws.services.glue.model.DatabaseInput) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Example 3 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class GlueHiveMetastore method createTable.

@Override
public void createTable(Table table, PrincipalPrivileges principalPrivileges) {
    try {
        TableInput input = GlueInputConverter.convertTable(table);
        stats.getCreateTable().call(() -> glueClient.createTable(new CreateTableRequest().withCatalogId(catalogId).withDatabaseName(table.getDatabaseName()).withTableInput(input)));
    } catch (AlreadyExistsException e) {
        throw new TableAlreadyExistsException(new SchemaTableName(table.getDatabaseName(), table.getTableName()));
    } catch (EntityNotFoundException e) {
        throw new SchemaNotFoundException(table.getDatabaseName());
    } catch (AmazonServiceException e) {
        throw new TrinoException(HIVE_METASTORE_ERROR, e);
    }
}
Also used : TableInput(com.amazonaws.services.glue.model.TableInput) TableAlreadyExistsException(io.trino.plugin.hive.TableAlreadyExistsException) AlreadyExistsException(com.amazonaws.services.glue.model.AlreadyExistsException) SchemaAlreadyExistsException(io.trino.plugin.hive.SchemaAlreadyExistsException) TableAlreadyExistsException(io.trino.plugin.hive.TableAlreadyExistsException) AmazonServiceException(com.amazonaws.AmazonServiceException) TrinoException(io.trino.spi.TrinoException) EntityNotFoundException(com.amazonaws.services.glue.model.EntityNotFoundException) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) CreateTableRequest(com.amazonaws.services.glue.model.CreateTableRequest) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 4 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class InMemoryThriftMetastore method alterDatabase.

@Override
public synchronized void alterDatabase(HiveIdentity identity, String databaseName, Database newDatabase) {
    String newDatabaseName = newDatabase.getName();
    if (databaseName.equals(newDatabaseName)) {
        if (databases.replace(databaseName, newDatabase) == null) {
            throw new SchemaNotFoundException(databaseName);
        }
        return;
    }
    Database database = databases.get(databaseName);
    if (database == null) {
        throw new SchemaNotFoundException(databaseName);
    }
    if (databases.putIfAbsent(newDatabaseName, database) != null) {
        throw new SchemaAlreadyExistsException(newDatabaseName);
    }
    databases.remove(databaseName);
    rewriteKeys(relations, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
    rewriteKeys(views, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
    rewriteKeys(partitions, name -> name.withSchemaName(newDatabaseName));
    rewriteKeys(tablePrivileges, name -> name.withDatabase(newDatabaseName));
}
Also used : SchemaAlreadyExistsException(io.trino.plugin.hive.SchemaAlreadyExistsException) Database(org.apache.hadoop.hive.metastore.api.Database) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 5 with SchemaNotFoundException

use of io.trino.spi.connector.SchemaNotFoundException in project trino by trinodb.

the class BigQueryMetadata method dropSchema.

@Override
public void dropSchema(ConnectorSession session, String schemaName) {
    BigQueryClient client = bigQueryClientFactory.create(session);
    String remoteSchemaName = client.toRemoteDataset(getProjectId(client), schemaName).map(RemoteDatabaseObject::getOnlyRemoteName).orElseThrow(() -> new SchemaNotFoundException(schemaName));
    client.dropSchema(DatasetId.of(remoteSchemaName));
}
Also used : SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Aggregations

SchemaNotFoundException (io.trino.spi.connector.SchemaNotFoundException)19 TrinoException (io.trino.spi.TrinoException)13 SchemaTableName (io.trino.spi.connector.SchemaTableName)10 Database (io.trino.plugin.hive.metastore.Database)8 ImmutableList (com.google.common.collect.ImmutableList)7 ImmutableMap (com.google.common.collect.ImmutableMap)7 ImmutableSet (com.google.common.collect.ImmutableSet)6 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)6 NOT_SUPPORTED (io.trino.spi.StandardErrorCode.NOT_SUPPORTED)6 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)6 ConnectorSession (io.trino.spi.connector.ConnectorSession)6 IOException (java.io.IOException)6 List (java.util.List)6 Map (java.util.Map)6 Optional (java.util.Optional)6 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)5 Verify.verify (com.google.common.base.Verify.verify)5 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)5 ConnectorTableMetadata (io.trino.spi.connector.ConnectorTableMetadata)5 Preconditions.checkState (com.google.common.base.Preconditions.checkState)4