Search in sources :

Example 61 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class CassandraSession method getTable.

public CassandraTable getTable(SchemaTableName schemaTableName) throws TableNotFoundException {
    KeyspaceMetadata keyspace = getKeyspaceByCaseInsensitiveName(schemaTableName.getSchemaName());
    AbstractTableMetadata tableMeta = getTableMetadata(keyspace, schemaTableName.getTableName());
    List<String> columnNames = new ArrayList<>();
    List<ColumnMetadata> columns = tableMeta.getColumns();
    checkColumnNames(columns);
    for (ColumnMetadata columnMetadata : columns) {
        columnNames.add(columnMetadata.getName());
    }
    // check if there is a comment to establish column ordering
    String comment = tableMeta.getOptions().getComment();
    Set<String> hiddenColumns = ImmutableSet.of();
    if (comment != null && comment.startsWith(PRESTO_COMMENT_METADATA)) {
        String columnOrderingString = comment.substring(PRESTO_COMMENT_METADATA.length());
        // column ordering
        List<ExtraColumnMetadata> extras = extraColumnMetadataCodec.fromJson(columnOrderingString);
        List<String> explicitColumnOrder = new ArrayList<>(ImmutableList.copyOf(transform(extras, ExtraColumnMetadata::getName)));
        hiddenColumns = extras.stream().filter(ExtraColumnMetadata::isHidden).map(ExtraColumnMetadata::getName).collect(toImmutableSet());
        // add columns not in the comment to the ordering
        List<String> remaining = columnNames.stream().filter(name -> !explicitColumnOrder.contains(name)).collect(toList());
        explicitColumnOrder.addAll(remaining);
        // sort the actual columns names using the explicit column order (this allows for missing columns)
        columnNames = Ordering.explicit(explicitColumnOrder).sortedCopy(columnNames);
    }
    ImmutableList.Builder<CassandraColumnHandle> columnHandles = ImmutableList.builder();
    // add primary keys first
    Set<String> primaryKeySet = new HashSet<>();
    for (ColumnMetadata columnMeta : tableMeta.getPartitionKey()) {
        primaryKeySet.add(columnMeta.getName());
        boolean hidden = hiddenColumns.contains(columnMeta.getName());
        CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, true, false, columnNames.indexOf(columnMeta.getName()), hidden).orElseThrow(() -> new TrinoException(NOT_SUPPORTED, "Unsupported partition key type: " + columnMeta.getType().getName()));
        columnHandles.add(columnHandle);
    }
    // add clustering columns
    for (ColumnMetadata columnMeta : tableMeta.getClusteringColumns()) {
        primaryKeySet.add(columnMeta.getName());
        boolean hidden = hiddenColumns.contains(columnMeta.getName());
        Optional<CassandraColumnHandle> columnHandle = buildColumnHandle(tableMeta, columnMeta, false, true, columnNames.indexOf(columnMeta.getName()), hidden);
        columnHandle.ifPresent(columnHandles::add);
    }
    // add other columns
    for (ColumnMetadata columnMeta : columns) {
        if (!primaryKeySet.contains(columnMeta.getName())) {
            boolean hidden = hiddenColumns.contains(columnMeta.getName());
            Optional<CassandraColumnHandle> columnHandle = buildColumnHandle(tableMeta, columnMeta, false, false, columnNames.indexOf(columnMeta.getName()), hidden);
            columnHandle.ifPresent(columnHandles::add);
        }
    }
    List<CassandraColumnHandle> sortedColumnHandles = columnHandles.build().stream().sorted(comparing(CassandraColumnHandle::getOrdinalPosition)).collect(toList());
    CassandraTableHandle tableHandle = new CassandraTableHandle(tableMeta.getKeyspace().getName(), tableMeta.getName());
    return new CassandraTable(tableHandle, sortedColumnHandles);
}
Also used : CassandraType.isFullySupported(io.trino.plugin.cassandra.CassandraType.isFullySupported) QueryBuilder(com.datastax.driver.core.querybuilder.QueryBuilder) Iterables.transform(com.google.common.collect.Iterables.transform) CassandraType.toCassandraType(io.trino.plugin.cassandra.CassandraType.toCassandraType) RegularStatement(com.datastax.driver.core.RegularStatement) Clause(com.datastax.driver.core.querybuilder.Clause) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) ByteBuffer(java.nio.ByteBuffer) Duration(io.airlift.units.Duration) ReconnectionPolicy(com.datastax.driver.core.policies.ReconnectionPolicy) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Session(com.datastax.driver.core.Session) Map(java.util.Map) VersionNumber(com.datastax.driver.core.VersionNumber) ENGLISH(java.util.Locale.ENGLISH) CassandraCqlUtils.validSchemaName(io.trino.plugin.cassandra.util.CassandraCqlUtils.validSchemaName) TableMetadata(com.datastax.driver.core.TableMetadata) ImmutableSet(com.google.common.collect.ImmutableSet) ColumnMetadata(com.datastax.driver.core.ColumnMetadata) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) NoHostAvailableException(com.datastax.driver.core.exceptions.NoHostAvailableException) Sets(com.google.common.collect.Sets) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) Collectors.joining(java.util.stream.Collectors.joining) Preconditions.checkState(com.google.common.base.Preconditions.checkState) ProtocolVersion(com.datastax.driver.core.ProtocolVersion) List(java.util.List) Stream(java.util.stream.Stream) Cluster(com.datastax.driver.core.Cluster) Host(com.datastax.driver.core.Host) Optional(java.util.Optional) Select(com.datastax.driver.core.querybuilder.Select) Statement(com.datastax.driver.core.Statement) JsonCodec(io.airlift.json.JsonCodec) TokenRange(com.datastax.driver.core.TokenRange) Logger(io.airlift.log.Logger) NullableValue(io.trino.spi.predicate.NullableValue) Row(com.datastax.driver.core.Row) HashMap(java.util.HashMap) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) PRESTO_COMMENT_METADATA(io.trino.plugin.cassandra.CassandraMetadata.PRESTO_COMMENT_METADATA) AbstractTableMetadata(com.datastax.driver.core.AbstractTableMetadata) PreparedStatement(com.datastax.driver.core.PreparedStatement) HashSet(java.util.HashSet) CassandraCqlUtils.selectDistinctFrom(io.trino.plugin.cassandra.util.CassandraCqlUtils.selectDistinctFrom) ResultSet(com.datastax.driver.core.ResultSet) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) Comparator.comparing(java.util.Comparator.comparing) Suppliers.memoize(com.google.common.base.Suppliers.memoize) QueryBuilder.eq(com.datastax.driver.core.querybuilder.QueryBuilder.eq) TupleDomain(io.trino.spi.predicate.TupleDomain) IndexMetadata(com.datastax.driver.core.IndexMetadata) CassandraCqlUtils(io.trino.plugin.cassandra.util.CassandraCqlUtils) Collectors.toList(java.util.stream.Collectors.toList) MaterializedViewMetadata(com.datastax.driver.core.MaterializedViewMetadata) KeyspaceMetadata(com.datastax.driver.core.KeyspaceMetadata) Ordering(com.google.common.collect.Ordering) QueryBuilder.select(com.datastax.driver.core.querybuilder.QueryBuilder.select) DataType(com.datastax.driver.core.DataType) CASSANDRA_VERSION_ERROR(io.trino.plugin.cassandra.CassandraErrorCode.CASSANDRA_VERSION_ERROR) ReconnectionSchedule(com.datastax.driver.core.policies.ReconnectionPolicy.ReconnectionSchedule) ColumnMetadata(com.datastax.driver.core.ColumnMetadata) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) ArrayList(java.util.ArrayList) AbstractTableMetadata(com.datastax.driver.core.AbstractTableMetadata) TrinoException(io.trino.spi.TrinoException) KeyspaceMetadata(com.datastax.driver.core.KeyspaceMetadata) HashSet(java.util.HashSet)

Example 62 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class ViewReaderUtil method coralTableRedirectionResolver.

private static CoralTableRedirectionResolver coralTableRedirectionResolver(ConnectorSession session, BiFunction<ConnectorSession, SchemaTableName, Optional<CatalogSchemaTableName>> tableRedirectionResolver, MetadataProvider metadataProvider) {
    return schemaTableName -> tableRedirectionResolver.apply(session, schemaTableName).map(target -> {
        ConnectorTableSchema tableSchema = metadataProvider.getRelationMetadata(session, target).orElseThrow(() -> new TableNotFoundException(target.getSchemaTableName(), format("%s is redirected to %s, but that relation cannot be found", schemaTableName, target)));
        List<Column> columns = tableSchema.getColumns().stream().filter(columnSchema -> !columnSchema.isHidden()).map(columnSchema -> new Column(columnSchema.getName(), toHiveType(columnSchema.getType()), Optional.empty())).collect(toImmutableList());
        Table table = Table.builder().setDatabaseName(schemaTableName.getSchemaName()).setTableName(schemaTableName.getTableName()).setTableType(EXTERNAL_TABLE.name()).setDataColumns(columns).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE))).setOwner(Optional.empty()).build();
        return toMetastoreApiTable(table);
    });
}
Also used : BiFunction(java.util.function.BiFunction) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) CoralSemiTransactionalHiveMSCAdapter(io.trino.plugin.hive.metastore.CoralSemiTransactionalHiveMSCAdapter) HiveMetastoreClient(com.linkedin.coral.common.HiveMetastoreClient) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) ObjectMapperProvider(io.airlift.json.ObjectMapperProvider) MetadataProvider(io.trino.spi.connector.MetadataProvider) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Column(io.trino.plugin.hive.metastore.Column) Verify.verify(com.google.common.base.Verify.verify) Locale(java.util.Locale) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) TABLE_COMMENT(io.trino.plugin.hive.HiveMetadata.TABLE_COMMENT) HiveSessionProperties.isLegacyHiveViewTranslation(io.trino.plugin.hive.HiveSessionProperties.isLegacyHiveViewTranslation) RelDataType(org.apache.calcite.rel.type.RelDataType) HIVE_VIEW_TRANSLATION_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_VIEW_TRANSLATION_ERROR) HiveToRelConverter(com.linkedin.coral.hive.hive2rel.HiveToRelConverter) Table(io.trino.plugin.hive.metastore.Table) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) RelNode(org.apache.calcite.rel.RelNode) CatalogName(io.trino.plugin.base.CatalogName) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) Collectors.joining(java.util.stream.Collectors.joining) ConnectorTableSchema(io.trino.spi.connector.ConnectorTableSchema) ThriftMetastoreUtil.toMetastoreApiTable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiTable) HiveUtil.checkCondition(io.trino.plugin.hive.util.HiveUtil.checkCondition) Base64(java.util.Base64) List(java.util.List) RelToTrinoConverter(com.linkedin.coral.trino.rel2trino.RelToTrinoConverter) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) JsonCodecFactory(io.airlift.json.JsonCodecFactory) TableType(org.apache.hadoop.hive.metastore.TableType) HIVE_INVALID_VIEW_DATA(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_VIEW_DATA) Optional(java.util.Optional) TypeManager(io.trino.spi.type.TypeManager) JsonCodec(io.airlift.json.JsonCodec) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Table(io.trino.plugin.hive.metastore.Table) ThriftMetastoreUtil.toMetastoreApiTable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiTable) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) ConnectorTableSchema(io.trino.spi.connector.ConnectorTableSchema)

Example 63 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class RegisterPartitionProcedure method doRegisterPartition.

private void doRegisterPartition(ConnectorSession session, ConnectorAccessControl accessControl, String schemaName, String tableName, List<String> partitionColumn, List<String> partitionValues, String location) {
    if (!allowRegisterPartition) {
        throw new TrinoException(PERMISSION_DENIED, "register_partition procedure is disabled");
    }
    SemiTransactionalHiveMetastore metastore = hiveMetadataFactory.create(session.getIdentity(), true).getMetastore();
    HdfsContext hdfsContext = new HdfsContext(session);
    SchemaTableName schemaTableName = new SchemaTableName(schemaName, tableName);
    Table table = metastore.getTable(schemaName, tableName).orElseThrow(() -> new TableNotFoundException(schemaTableName));
    accessControl.checkCanInsertIntoTable(null, schemaTableName);
    checkIsPartitionedTable(table);
    checkPartitionColumns(table, partitionColumn);
    Optional<Partition> partition = metastore.unsafeGetRawHiveMetastoreClosure().getPartition(schemaName, tableName, partitionValues);
    if (partition.isPresent()) {
        String partitionName = FileUtils.makePartName(partitionColumn, partitionValues);
        throw new TrinoException(ALREADY_EXISTS, format("Partition [%s] is already registered with location %s", partitionName, partition.get().getStorage().getLocation()));
    }
    Path partitionLocation;
    if (location == null) {
        partitionLocation = new Path(table.getStorage().getLocation(), FileUtils.makePartName(partitionColumn, partitionValues));
    } else {
        partitionLocation = new Path(location);
    }
    if (!HiveWriteUtils.pathExists(hdfsContext, hdfsEnvironment, partitionLocation)) {
        throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, "Partition location does not exist: " + partitionLocation);
    }
    metastore.addPartition(session, table.getDatabaseName(), table.getTableName(), buildPartitionObject(session, table, partitionValues, partitionLocation), partitionLocation, // no need for failed attempts cleanup
    Optional.empty(), PartitionStatistics.empty(), false);
    metastore.commit();
}
Also used : Path(org.apache.hadoop.fs.Path) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Partition(io.trino.plugin.hive.metastore.Partition) Procedures.checkIsPartitionedTable(io.trino.plugin.hive.procedure.Procedures.checkIsPartitionedTable) Table(io.trino.plugin.hive.metastore.Table) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) TrinoException(io.trino.spi.TrinoException) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 64 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class SqlStandardAccessControlMetadata method listTablePrivileges.

@Override
public List<GrantInfo> listTablePrivileges(ConnectorSession session, List<SchemaTableName> tableNames) {
    Set<HivePrincipal> principals = ThriftMetastoreUtil.listEnabledPrincipals(session.getIdentity(), metastore::listRoleGrants).collect(toImmutableSet());
    boolean isAdminRoleSet = hasAdminRole(principals);
    ImmutableList.Builder<GrantInfo> result = ImmutableList.builder();
    for (SchemaTableName tableName : tableNames) {
        try {
            result.addAll(buildGrants(principals, isAdminRoleSet, tableName));
        } catch (TableNotFoundException e) {
        // table disappeared during listing operation
        } catch (HiveViewNotSupportedException e) {
        // table is an unsupported hive view but shouldn't fail listTablePrivileges.
        }
    }
    return result.build();
}
Also used : TableNotFoundException(io.trino.spi.connector.TableNotFoundException) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) ImmutableList(com.google.common.collect.ImmutableList) GrantInfo(io.trino.spi.security.GrantInfo) SchemaTableName(io.trino.spi.connector.SchemaTableName) HiveViewNotSupportedException(io.trino.plugin.hive.HiveViewNotSupportedException)

Example 65 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class DefaultJdbcMetadata method listTableColumns.

@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix) {
    ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> columns = ImmutableMap.builder();
    List<SchemaTableName> tables = prefix.toOptionalSchemaTableName().<List<SchemaTableName>>map(ImmutableList::of).orElseGet(() -> listTables(session, prefix.getSchema()));
    for (SchemaTableName tableName : tables) {
        try {
            jdbcClient.getTableHandle(session, tableName).ifPresent(tableHandle -> columns.put(tableName, getTableMetadata(session, tableHandle).getColumns()));
        } catch (TableNotFoundException | AccessDeniedException e) {
        // table disappeared during listing operation or user is not allowed to access it
        // these exceptions are ignored because listTableColumns is used for metadata queries (SELECT FROM information_schema)
        }
    }
    return columns.buildOrThrow();
}
Also used : TableNotFoundException(io.trino.spi.connector.TableNotFoundException) AccessDeniedException(io.trino.spi.security.AccessDeniedException) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap)

Aggregations

TableNotFoundException (io.trino.spi.connector.TableNotFoundException)84 SchemaTableName (io.trino.spi.connector.SchemaTableName)65 TrinoException (io.trino.spi.TrinoException)39 Table (io.trino.plugin.hive.metastore.Table)33 ImmutableMap (com.google.common.collect.ImmutableMap)27 ImmutableList (com.google.common.collect.ImmutableList)26 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)26 List (java.util.List)25 Optional (java.util.Optional)24 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)22 Path (org.apache.hadoop.fs.Path)22 ColumnHandle (io.trino.spi.connector.ColumnHandle)21 Map (java.util.Map)21 Objects.requireNonNull (java.util.Objects.requireNonNull)20 ConnectorSession (io.trino.spi.connector.ConnectorSession)19 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)18 TupleDomain (io.trino.spi.predicate.TupleDomain)18 Set (java.util.Set)18 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)17 CatalogSchemaTableName (io.trino.spi.connector.CatalogSchemaTableName)17