Search in sources :

Example 11 with ConnectorIdentity

use of io.trino.spi.security.ConnectorIdentity in project trino by trinodb.

the class IcebergPageSourceProvider method createOrcPageSource.

private static ReaderPageSource createOrcPageSource(HdfsEnvironment hdfsEnvironment, ConnectorIdentity identity, Configuration configuration, Path path, long start, long length, long fileSize, List<IcebergColumnHandle> columns, TupleDomain<IcebergColumnHandle> effectivePredicate, OrcReaderOptions options, FileFormatDataSourceStats stats, TypeManager typeManager, Optional<NameMapping> nameMapping) {
    OrcDataSource orcDataSource = null;
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(identity, path, configuration);
        FSDataInputStream inputStream = hdfsEnvironment.doAs(identity, () -> fileSystem.open(path));
        orcDataSource = new HdfsOrcDataSource(new OrcDataSourceId(path.toString()), fileSize, options, inputStream, stats);
        OrcReader reader = OrcReader.createOrcReader(orcDataSource, options).orElseThrow(() -> new TrinoException(ICEBERG_BAD_DATA, "ORC file is zero length"));
        List<OrcColumn> fileColumns = reader.getRootColumn().getNestedColumns();
        if (nameMapping.isPresent() && !hasIds(reader.getRootColumn())) {
            fileColumns = fileColumns.stream().map(orcColumn -> setMissingFieldIds(orcColumn, nameMapping.get(), ImmutableList.of(orcColumn.getColumnName()))).collect(toImmutableList());
        }
        Map<Integer, OrcColumn> fileColumnsByIcebergId = mapIdsToOrcFileColumns(fileColumns);
        TupleDomainOrcPredicateBuilder predicateBuilder = TupleDomainOrcPredicate.builder().setBloomFiltersEnabled(options.isBloomFiltersEnabled());
        Map<IcebergColumnHandle, Domain> effectivePredicateDomains = effectivePredicate.getDomains().orElseThrow(() -> new IllegalArgumentException("Effective predicate is none"));
        Optional<ReaderColumns> columnProjections = projectColumns(columns);
        Map<Integer, List<List<Integer>>> projectionsByFieldId = columns.stream().collect(groupingBy(column -> column.getBaseColumnIdentity().getId(), mapping(IcebergColumnHandle::getPath, toUnmodifiableList())));
        List<IcebergColumnHandle> readColumns = columnProjections.map(readerColumns -> (List<IcebergColumnHandle>) readerColumns.get().stream().map(IcebergColumnHandle.class::cast).collect(toImmutableList())).orElse(columns);
        List<OrcColumn> fileReadColumns = new ArrayList<>(readColumns.size());
        List<Type> fileReadTypes = new ArrayList<>(readColumns.size());
        List<ProjectedLayout> projectedLayouts = new ArrayList<>(readColumns.size());
        List<ColumnAdaptation> columnAdaptations = new ArrayList<>(readColumns.size());
        for (IcebergColumnHandle column : readColumns) {
            verify(column.isBaseColumn(), "Column projections must be based from a root column");
            OrcColumn orcColumn = fileColumnsByIcebergId.get(column.getId());
            if (orcColumn != null) {
                Type readType = getOrcReadType(column.getType(), typeManager);
                if (column.getType() == UUID && !"UUID".equals(orcColumn.getAttributes().get(ICEBERG_BINARY_TYPE))) {
                    throw new TrinoException(ICEBERG_BAD_DATA, format("Expected ORC column for UUID data to be annotated with %s=UUID: %s", ICEBERG_BINARY_TYPE, orcColumn));
                }
                List<List<Integer>> fieldIdProjections = projectionsByFieldId.get(column.getId());
                ProjectedLayout projectedLayout = IcebergOrcProjectedLayout.createProjectedLayout(orcColumn, fieldIdProjections);
                int sourceIndex = fileReadColumns.size();
                columnAdaptations.add(ColumnAdaptation.sourceColumn(sourceIndex));
                fileReadColumns.add(orcColumn);
                fileReadTypes.add(readType);
                projectedLayouts.add(projectedLayout);
                for (Map.Entry<IcebergColumnHandle, Domain> domainEntry : effectivePredicateDomains.entrySet()) {
                    IcebergColumnHandle predicateColumn = domainEntry.getKey();
                    OrcColumn predicateOrcColumn = fileColumnsByIcebergId.get(predicateColumn.getId());
                    if (predicateOrcColumn != null && column.getColumnIdentity().equals(predicateColumn.getBaseColumnIdentity())) {
                        predicateBuilder.addColumn(predicateOrcColumn.getColumnId(), domainEntry.getValue());
                    }
                }
            } else {
                columnAdaptations.add(ColumnAdaptation.nullColumn(column.getType()));
            }
        }
        AggregatedMemoryContext memoryUsage = newSimpleAggregatedMemoryContext();
        OrcDataSourceId orcDataSourceId = orcDataSource.getId();
        OrcRecordReader recordReader = reader.createRecordReader(fileReadColumns, fileReadTypes, projectedLayouts, predicateBuilder.build(), start, length, UTC, memoryUsage, INITIAL_BATCH_SIZE, exception -> handleException(orcDataSourceId, exception), new IdBasedFieldMapperFactory(readColumns));
        return new ReaderPageSource(new OrcPageSource(recordReader, columnAdaptations, orcDataSource, Optional.empty(), Optional.empty(), memoryUsage, stats), columnProjections);
    } catch (Exception e) {
        if (orcDataSource != null) {
            try {
                orcDataSource.close();
            } catch (IOException ignored) {
            }
        }
        if (e instanceof TrinoException) {
            throw (TrinoException) e;
        }
        String message = format("Error opening Iceberg split %s (offset=%s, length=%s): %s", path, start, length, e.getMessage());
        if (e instanceof BlockMissingException) {
            throw new TrinoException(ICEBERG_MISSING_DATA, message, e);
        }
        throw new TrinoException(ICEBERG_CANNOT_OPEN_SPLIT, message, e);
    }
}
Also used : FileSystem(org.apache.hadoop.fs.FileSystem) Maps.uniqueIndex(com.google.common.collect.Maps.uniqueIndex) ORC_ICEBERG_ID_KEY(io.trino.plugin.iceberg.TypeConverter.ORC_ICEBERG_ID_KEY) ColumnAdaptation(io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation) FileStatus(org.apache.hadoop.fs.FileStatus) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) ParquetDataSourceId(io.trino.parquet.ParquetDataSourceId) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ICEBERG_CANNOT_OPEN_SPLIT(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_CANNOT_OPEN_SPLIT) UUID(io.trino.spi.type.UuidType.UUID) ENGLISH(java.util.Locale.ENGLISH) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FileFormatDataSourceStats(io.trino.plugin.hive.FileFormatDataSourceStats) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) ICEBERG_FILESYSTEM_ERROR(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_FILESYSTEM_ERROR) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ReaderColumns(io.trino.plugin.hive.ReaderColumns) Set(java.util.Set) ReaderPageSource(io.trino.plugin.hive.ReaderPageSource) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) OrcReaderConfig(io.trino.plugin.hive.orc.OrcReaderConfig) ColumnDescriptor(org.apache.parquet.column.ColumnDescriptor) BlockMetaData(org.apache.parquet.hadoop.metadata.BlockMetaData) ColumnIO(org.apache.parquet.io.ColumnIO) IcebergSessionProperties.getOrcTinyStripeThreshold(io.trino.plugin.iceberg.IcebergSessionProperties.getOrcTinyStripeThreshold) ParquetReaderConfig(io.trino.plugin.hive.parquet.ParquetReaderConfig) ParquetCorruptionException(io.trino.parquet.ParquetCorruptionException) MappedField(org.apache.iceberg.mapping.MappedField) Collectors.groupingBy(java.util.stream.Collectors.groupingBy) IcebergSessionProperties.isOrcNestedLazy(io.trino.plugin.iceberg.IcebergSessionProperties.isOrcNestedLazy) IcebergSessionProperties.getOrcMaxBufferSize(io.trino.plugin.iceberg.IcebergSessionProperties.getOrcMaxBufferSize) HdfsParquetDataSource(io.trino.plugin.hive.parquet.HdfsParquetDataSource) ArrayList(java.util.ArrayList) Collectors.toUnmodifiableList(java.util.stream.Collectors.toUnmodifiableList) TupleDomainOrcPredicateBuilder(io.trino.orc.TupleDomainOrcPredicate.TupleDomainOrcPredicateBuilder) OrcPageSource(io.trino.plugin.hive.orc.OrcPageSource) IcebergSessionProperties.getOrcMaxMergeDistance(io.trino.plugin.iceberg.IcebergSessionProperties.getOrcMaxMergeDistance) ICEBERG_MISSING_DATA(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_MISSING_DATA) AggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) MappedFields(org.apache.iceberg.mapping.MappedFields) RichColumnDescriptor(io.trino.parquet.RichColumnDescriptor) OrcType(io.trino.orc.metadata.OrcType) Predicate(io.trino.parquet.predicate.Predicate) IcebergSessionProperties.isUseFileSizeFromMetadata(io.trino.plugin.iceberg.IcebergSessionProperties.isUseFileSizeFromMetadata) MapType(io.trino.spi.type.MapType) PredicateUtils.predicateMatches(io.trino.parquet.predicate.PredicateUtils.predicateMatches) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) StandardTypes(io.trino.spi.type.StandardTypes) NameMappingParser(org.apache.iceberg.mapping.NameMappingParser) IcebergSessionProperties.getOrcLazyReadSmallRanges(io.trino.plugin.iceberg.IcebergSessionProperties.getOrcLazyReadSmallRanges) IcebergSessionProperties.getParquetMaxReadBlockSize(io.trino.plugin.iceberg.IcebergSessionProperties.getParquetMaxReadBlockSize) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) UTC(org.joda.time.DateTimeZone.UTC) Field(io.trino.parquet.Field) Traverser(com.google.common.graph.Traverser) ParquetPageSource(io.trino.plugin.hive.parquet.ParquetPageSource) ProjectedLayout(io.trino.orc.OrcReader.ProjectedLayout) FileMetaData(org.apache.parquet.hadoop.metadata.FileMetaData) IcebergSessionProperties.getOrcStreamBufferSize(io.trino.plugin.iceberg.IcebergSessionProperties.getOrcStreamBufferSize) ParquetMetadata(org.apache.parquet.hadoop.metadata.ParquetMetadata) ParquetSchemaUtil(org.apache.iceberg.parquet.ParquetSchemaUtil) OrcColumn(io.trino.orc.OrcColumn) PredicateUtils.buildPredicate(io.trino.parquet.predicate.PredicateUtils.buildPredicate) MetadataReader(io.trino.parquet.reader.MetadataReader) ICEBERG_DOMAIN_COMPACTION_THRESHOLD(io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD) OrcRecordReader(io.trino.orc.OrcRecordReader) NameMapping(org.apache.iceberg.mapping.NameMapping) Path(org.apache.hadoop.fs.Path) OrcDataSource(io.trino.orc.OrcDataSource) ReaderProjectionsAdapter(io.trino.plugin.hive.ReaderProjectionsAdapter) RowType(io.trino.spi.type.RowType) ICEBERG_BAD_DATA(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_BAD_DATA) ImmutableMap(com.google.common.collect.ImmutableMap) INITIAL_BATCH_SIZE(io.trino.orc.OrcReader.INITIAL_BATCH_SIZE) ParquetReader(io.trino.parquet.reader.ParquetReader) FieldContext(io.trino.plugin.iceberg.IcebergParquetColumnIOConverter.FieldContext) ICEBERG_CURSOR_ERROR(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_CURSOR_ERROR) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) ParquetTypeUtils.getColumnIO(io.trino.parquet.ParquetTypeUtils.getColumnIO) Collectors(java.util.stream.Collectors) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) Objects(java.util.Objects) OrcDataSourceId(io.trino.orc.OrcDataSourceId) MessageType(org.apache.parquet.schema.MessageType) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) DynamicFilter(io.trino.spi.connector.DynamicFilter) Optional(java.util.Optional) IcebergSessionProperties.getOrcMaxReadBlockSize(io.trino.plugin.iceberg.IcebergSessionProperties.getOrcMaxReadBlockSize) MessageColumnIO(org.apache.parquet.io.MessageColumnIO) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) TupleDomainOrcPredicate(io.trino.orc.TupleDomainOrcPredicate) Function(java.util.function.Function) Inject(javax.inject.Inject) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) OrcReaderOptions(io.trino.orc.OrcReaderOptions) Objects.requireNonNull(java.util.Objects.requireNonNull) Collectors.mapping(java.util.stream.Collectors.mapping) IcebergSessionProperties.isOrcBloomFiltersEnabled(io.trino.plugin.iceberg.IcebergSessionProperties.isOrcBloomFiltersEnabled) HdfsOrcDataSource(io.trino.plugin.hive.orc.HdfsOrcDataSource) ParquetReaderOptions(io.trino.parquet.ParquetReaderOptions) OrcReader(io.trino.orc.OrcReader) ConnectorPageSourceProvider(io.trino.spi.connector.ConnectorPageSourceProvider) ICEBERG_BINARY_TYPE(io.trino.plugin.iceberg.TypeConverter.ICEBERG_BINARY_TYPE) TupleDomain(io.trino.spi.predicate.TupleDomain) OrcReader.fullyProjectedLayout(io.trino.orc.OrcReader.fullyProjectedLayout) OrcCorruptionException(io.trino.orc.OrcCorruptionException) Collectors.toList(java.util.stream.Collectors.toList) ParquetTypeUtils.getDescriptors(io.trino.parquet.ParquetTypeUtils.getDescriptors) ParquetDataSource(io.trino.parquet.ParquetDataSource) TypeManager(io.trino.spi.type.TypeManager) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) ArrayList(java.util.ArrayList) HdfsOrcDataSource(io.trino.plugin.hive.orc.HdfsOrcDataSource) FileSystem(org.apache.hadoop.fs.FileSystem) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ArrayList(java.util.ArrayList) Collectors.toUnmodifiableList(java.util.stream.Collectors.toUnmodifiableList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) OrcDataSource(io.trino.orc.OrcDataSource) HdfsOrcDataSource(io.trino.plugin.hive.orc.HdfsOrcDataSource) OrcDataSourceId(io.trino.orc.OrcDataSourceId) OrcColumn(io.trino.orc.OrcColumn) OrcPageSource(io.trino.plugin.hive.orc.OrcPageSource) IOException(java.io.IOException) OrcRecordReader(io.trino.orc.OrcRecordReader) AggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) ParquetCorruptionException(io.trino.parquet.ParquetCorruptionException) IOException(java.io.IOException) TrinoException(io.trino.spi.TrinoException) OrcCorruptionException(io.trino.orc.OrcCorruptionException) OrcType(io.trino.orc.metadata.OrcType) MapType(io.trino.spi.type.MapType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) MessageType(org.apache.parquet.schema.MessageType) Type(io.trino.spi.type.Type) OrcReader(io.trino.orc.OrcReader) TupleDomainOrcPredicateBuilder(io.trino.orc.TupleDomainOrcPredicate.TupleDomainOrcPredicateBuilder) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) TrinoException(io.trino.spi.TrinoException) ReaderPageSource(io.trino.plugin.hive.ReaderPageSource) ReaderColumns(io.trino.plugin.hive.ReaderColumns) Domain(io.trino.spi.predicate.Domain) TupleDomain(io.trino.spi.predicate.TupleDomain) Map(java.util.Map) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) ProjectedLayout(io.trino.orc.OrcReader.ProjectedLayout) OrcReader.fullyProjectedLayout(io.trino.orc.OrcReader.fullyProjectedLayout) ColumnAdaptation(io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation)

Example 12 with ConnectorIdentity

use of io.trino.spi.security.ConnectorIdentity in project trino by trinodb.

the class FileBasedAccessControl method checkCanCreateViewWithSelectFromColumns.

@Override
public void checkCanCreateViewWithSelectFromColumns(ConnectorSecurityContext context, SchemaTableName tableName, Set<String> columnNames) {
    if (INFORMATION_SCHEMA_NAME.equals(tableName.getSchemaName())) {
        return;
    }
    ConnectorIdentity identity = context.getIdentity();
    TableAccessControlRule rule = tableRules.stream().filter(tableRule -> tableRule.matches(identity.getUser(), identity.getEnabledSystemRoles(), identity.getGroups(), tableName)).findFirst().orElse(null);
    if (rule == null || !rule.canSelectColumns(columnNames)) {
        denySelectTable(tableName.toString());
    }
    if (!rule.getPrivileges().contains(GRANT_SELECT)) {
        denyCreateViewWithSelect(tableName.toString(), context.getIdentity());
    }
}
Also used : ConnectorIdentity(io.trino.spi.security.ConnectorIdentity)

Example 13 with ConnectorIdentity

use of io.trino.spi.security.ConnectorIdentity in project trino by trinodb.

the class SqlStandardAccessControl method listApplicableTablePrivileges.

private Stream<HivePrivilegeInfo> listApplicableTablePrivileges(ConnectorSecurityContext context, String databaseName, String tableName, ConnectorIdentity identity) {
    String user = identity.getUser();
    HivePrincipal userPrincipal = new HivePrincipal(USER, user);
    Stream<HivePrincipal> principals = Stream.concat(Stream.of(userPrincipal), listApplicableRoles(userPrincipal, hivePrincipal -> metastore.listRoleGrants(context, hivePrincipal)).map(role -> new HivePrincipal(ROLE, role.getRoleName())));
    return listTablePrivileges(context, databaseName, tableName, principals);
}
Also used : AccessDeniedException.denyAddColumn(io.trino.spi.security.AccessDeniedException.denyAddColumn) DEFAULT_DATABASE_NAME(io.trino.plugin.hive.metastore.Database.DEFAULT_DATABASE_NAME) SchemaRoutineName(io.trino.spi.connector.SchemaRoutineName) AccessDeniedException.denySetCatalogSessionProperty(io.trino.spi.security.AccessDeniedException.denySetCatalogSessionProperty) AccessDeniedException.denyDropTable(io.trino.spi.security.AccessDeniedException.denyDropTable) AccessDeniedException.denySetTableProperties(io.trino.spi.security.AccessDeniedException.denySetTableProperties) USER(io.trino.spi.security.PrincipalType.USER) AccessDeniedException.denySetMaterializedViewProperties(io.trino.spi.security.AccessDeniedException.denySetMaterializedViewProperties) Database(io.trino.plugin.hive.metastore.Database) AccessDeniedException.denyInsertTable(io.trino.spi.security.AccessDeniedException.denyInsertTable) ThriftMetastoreUtil.listEnabledPrincipals(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.listEnabledPrincipals) AccessDeniedException.denyExecuteTableProcedure(io.trino.spi.security.AccessDeniedException.denyExecuteTableProcedure) AccessDeniedException.denyShowCreateTable(io.trino.spi.security.AccessDeniedException.denyShowCreateTable) AccessDeniedException.denyRevokeTablePrivilege(io.trino.spi.security.AccessDeniedException.denyRevokeTablePrivilege) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) AccessDeniedException.denyUpdateTableColumns(io.trino.spi.security.AccessDeniedException.denyUpdateTableColumns) Map(java.util.Map) AccessDeniedException.denyCreateSchema(io.trino.spi.security.AccessDeniedException.denyCreateSchema) AccessDeniedException.denyCreateMaterializedView(io.trino.spi.security.AccessDeniedException.denyCreateMaterializedView) AccessDeniedException.denyCreateTable(io.trino.spi.security.AccessDeniedException.denyCreateTable) AccessDeniedException.denyDeleteTable(io.trino.spi.security.AccessDeniedException.denyDeleteTable) AccessDeniedException.denyDropView(io.trino.spi.security.AccessDeniedException.denyDropView) AccessDeniedException.denyRenameSchema(io.trino.spi.security.AccessDeniedException.denyRenameSchema) AccessDeniedException.denyShowRoles(io.trino.spi.security.AccessDeniedException.denyShowRoles) Collectors.toSet(java.util.stream.Collectors.toSet) AccessDeniedException.denyShowColumns(io.trino.spi.security.AccessDeniedException.denyShowColumns) UPDATE(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.UPDATE) INSERT(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.INSERT) AccessDeniedException.denyRenameMaterializedView(io.trino.spi.security.AccessDeniedException.denyRenameMaterializedView) ImmutableSet(com.google.common.collect.ImmutableSet) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) AccessDeniedException.denySetTableAuthorization(io.trino.spi.security.AccessDeniedException.denySetTableAuthorization) AccessDeniedException.denyDropSchema(io.trino.spi.security.AccessDeniedException.denyDropSchema) AccessDeniedException.denyTruncateTable(io.trino.spi.security.AccessDeniedException.denyTruncateTable) ViewExpression(io.trino.spi.security.ViewExpression) ConnectorAccessControl(io.trino.spi.connector.ConnectorAccessControl) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) SchemaTableName(io.trino.spi.connector.SchemaTableName) AccessDeniedException.denySetRole(io.trino.spi.security.AccessDeniedException.denySetRole) AccessDeniedException.denyShowCreateSchema(io.trino.spi.security.AccessDeniedException.denyShowCreateSchema) Stream(java.util.stream.Stream) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) OWNERSHIP(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.OWNERSHIP) AccessDeniedException.denyRefreshMaterializedView(io.trino.spi.security.AccessDeniedException.denyRefreshMaterializedView) AccessDeniedException.denyCreateRole(io.trino.spi.security.AccessDeniedException.denyCreateRole) Optional(java.util.Optional) HivePrivilegeInfo(io.trino.plugin.hive.metastore.HivePrivilegeInfo) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) AccessDeniedException(io.trino.spi.security.AccessDeniedException) HivePrivilegeInfo.toHivePrivilege(io.trino.plugin.hive.metastore.HivePrivilegeInfo.toHivePrivilege) AccessDeniedException.denyDropColumn(io.trino.spi.security.AccessDeniedException.denyDropColumn) Type(io.trino.spi.type.Type) ThriftMetastoreUtil.isRoleApplicable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.isRoleApplicable) AccessDeniedException.denyDropRole(io.trino.spi.security.AccessDeniedException.denyDropRole) AccessDeniedException.denySetViewAuthorization(io.trino.spi.security.AccessDeniedException.denySetViewAuthorization) Inject(javax.inject.Inject) AccessDeniedException.denyCommentColumn(io.trino.spi.security.AccessDeniedException.denyCommentColumn) AccessDeniedException.denySetSchemaAuthorization(io.trino.spi.security.AccessDeniedException.denySetSchemaAuthorization) AccessDeniedException.denyCreateViewWithSelect(io.trino.spi.security.AccessDeniedException.denyCreateViewWithSelect) AccessDeniedException.denyDropMaterializedView(io.trino.spi.security.AccessDeniedException.denyDropMaterializedView) Objects.requireNonNull(java.util.Objects.requireNonNull) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) AccessDeniedException.denyRevokeRoles(io.trino.spi.security.AccessDeniedException.denyRevokeRoles) Privilege(io.trino.spi.security.Privilege) AccessDeniedException.denyRenameTable(io.trino.spi.security.AccessDeniedException.denyRenameTable) AccessDeniedException.denyShowRoleAuthorizationDescriptors(io.trino.spi.security.AccessDeniedException.denyShowRoleAuthorizationDescriptors) ConnectorSecurityContext(io.trino.spi.connector.ConnectorSecurityContext) AccessDeniedException.denySelectTable(io.trino.spi.security.AccessDeniedException.denySelectTable) AccessDeniedException.denyCreateView(io.trino.spi.security.AccessDeniedException.denyCreateView) DELETE(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.DELETE) AccessDeniedException.denyCommentTable(io.trino.spi.security.AccessDeniedException.denyCommentTable) CatalogName(io.trino.plugin.base.CatalogName) ROLE(io.trino.spi.security.PrincipalType.ROLE) RoleGrant(io.trino.spi.security.RoleGrant) SELECT(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.SELECT) AccessDeniedException.denyRenameColumn(io.trino.spi.security.AccessDeniedException.denyRenameColumn) AccessDeniedException.denyGrantRoles(io.trino.spi.security.AccessDeniedException.denyGrantRoles) ThriftMetastoreUtil.listApplicableRoles(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.listApplicableRoles) ThriftMetastoreUtil.isRoleEnabled(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.isRoleEnabled) AccessDeniedException.denyRenameView(io.trino.spi.security.AccessDeniedException.denyRenameView) AccessDeniedException.denyGrantTablePrivilege(io.trino.spi.security.AccessDeniedException.denyGrantTablePrivilege) HivePrivilege(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal)

Example 14 with ConnectorIdentity

use of io.trino.spi.security.ConnectorIdentity in project trino by trinodb.

the class BaseJdbcClient method beginInsertTable.

@Override
public JdbcOutputTableHandle beginInsertTable(ConnectorSession session, JdbcTableHandle tableHandle, List<JdbcColumnHandle> columns) {
    SchemaTableName schemaTableName = tableHandle.asPlainTable().getSchemaTableName();
    ConnectorIdentity identity = session.getIdentity();
    try (Connection connection = connectionFactory.openConnection(session)) {
        String remoteSchema = identifierMapping.toRemoteSchemaName(identity, connection, schemaTableName.getSchemaName());
        String remoteTable = identifierMapping.toRemoteTableName(identity, connection, remoteSchema, schemaTableName.getTableName());
        String catalog = connection.getCatalog();
        ImmutableList.Builder<String> columnNames = ImmutableList.builder();
        ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
        ImmutableList.Builder<JdbcTypeHandle> jdbcColumnTypes = ImmutableList.builder();
        for (JdbcColumnHandle column : columns) {
            columnNames.add(column.getColumnName());
            columnTypes.add(column.getColumnType());
            jdbcColumnTypes.add(column.getJdbcTypeHandle());
        }
        if (isNonTransactionalInsert(session)) {
            return new JdbcOutputTableHandle(catalog, remoteSchema, remoteTable, columnNames.build(), columnTypes.build(), Optional.of(jdbcColumnTypes.build()), remoteTable);
        }
        String remoteTemporaryTableName = identifierMapping.toRemoteTableName(identity, connection, remoteSchema, generateTemporaryTableName());
        copyTableSchema(connection, catalog, remoteSchema, remoteTable, remoteTemporaryTableName, columnNames.build());
        return new JdbcOutputTableHandle(catalog, remoteSchema, remoteTable, columnNames.build(), columnTypes.build(), Optional.of(jdbcColumnTypes.build()), remoteTemporaryTableName);
    } catch (SQLException e) {
        throw new TrinoException(JDBC_ERROR, e);
    }
}
Also used : SQLException(java.sql.SQLException) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) Connection(java.sql.Connection) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) SchemaTableName(io.trino.spi.connector.SchemaTableName) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) JoinType(io.trino.spi.connector.JoinType) CharType(io.trino.spi.type.CharType) TrinoException(io.trino.spi.TrinoException)

Example 15 with ConnectorIdentity

use of io.trino.spi.security.ConnectorIdentity in project trino by trinodb.

the class BaseJdbcClient method renameTable.

protected void renameTable(ConnectorSession session, String catalogName, String remoteSchemaName, String remoteTableName, SchemaTableName newTable) {
    try (Connection connection = connectionFactory.openConnection(session)) {
        String newSchemaName = newTable.getSchemaName();
        String newTableName = newTable.getTableName();
        ConnectorIdentity identity = session.getIdentity();
        String newRemoteSchemaName = identifierMapping.toRemoteSchemaName(identity, connection, newSchemaName);
        String newRemoteTableName = identifierMapping.toRemoteTableName(identity, connection, newRemoteSchemaName, newTableName);
        String sql = format("ALTER TABLE %s RENAME TO %s", quoted(catalogName, remoteSchemaName, remoteTableName), quoted(catalogName, newRemoteSchemaName, newRemoteTableName));
        execute(connection, sql);
    } catch (SQLException e) {
        throw new TrinoException(JDBC_ERROR, e);
    }
}
Also used : SQLException(java.sql.SQLException) Connection(java.sql.Connection) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) TrinoException(io.trino.spi.TrinoException)

Aggregations

ConnectorIdentity (io.trino.spi.security.ConnectorIdentity)21 TrinoException (io.trino.spi.TrinoException)16 Type (io.trino.spi.type.Type)12 Map (java.util.Map)11 Optional (java.util.Optional)11 ImmutableList (com.google.common.collect.ImmutableList)10 Connection (java.sql.Connection)10 SchemaTableName (io.trino.spi.connector.SchemaTableName)9 List (java.util.List)9 Set (java.util.Set)9 Objects.requireNonNull (java.util.Objects.requireNonNull)8 Inject (javax.inject.Inject)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 ImmutableSet (com.google.common.collect.ImmutableSet)7 ImmutableSet.toImmutableSet (com.google.common.collect.ImmutableSet.toImmutableSet)7 NOT_SUPPORTED (io.trino.spi.StandardErrorCode.NOT_SUPPORTED)7 String.format (java.lang.String.format)7 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)6 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)6 ConnectorSession (io.trino.spi.connector.ConnectorSession)6