Search in sources :

Example 1 with INVALID_PROCEDURE_ARGUMENT

use of io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT in project trino by trinodb.

the class SyncPartitionMetadataProcedure method doSyncPartitionMetadata.

private void doSyncPartitionMetadata(ConnectorSession session, ConnectorAccessControl accessControl, String schemaName, String tableName, String mode, boolean caseSensitive) {
    SyncMode syncMode = toSyncMode(mode);
    HdfsContext hdfsContext = new HdfsContext(session);
    SemiTransactionalHiveMetastore metastore = hiveMetadataFactory.create(session.getIdentity(), true).getMetastore();
    SchemaTableName schemaTableName = new SchemaTableName(schemaName, tableName);
    Table table = metastore.getTable(schemaName, tableName).orElseThrow(() -> new TableNotFoundException(schemaTableName));
    if (table.getPartitionColumns().isEmpty()) {
        throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, "Table is not partitioned: " + schemaTableName);
    }
    if (syncMode == SyncMode.ADD || syncMode == SyncMode.FULL) {
        accessControl.checkCanInsertIntoTable(null, new SchemaTableName(schemaName, tableName));
    }
    if (syncMode == SyncMode.DROP || syncMode == SyncMode.FULL) {
        accessControl.checkCanDeleteFromTable(null, new SchemaTableName(schemaName, tableName));
    }
    Path tableLocation = new Path(table.getStorage().getLocation());
    Set<String> partitionsToAdd;
    Set<String> partitionsToDrop;
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(hdfsContext, tableLocation);
        List<String> partitionsInMetastore = metastore.getPartitionNames(schemaName, tableName).orElseThrow(() -> new TableNotFoundException(schemaTableName));
        List<String> partitionsInFileSystem = listDirectory(fileSystem, fileSystem.getFileStatus(tableLocation), table.getPartitionColumns(), table.getPartitionColumns().size(), caseSensitive).stream().map(fileStatus -> fileStatus.getPath().toUri()).map(uri -> tableLocation.toUri().relativize(uri).getPath()).collect(toImmutableList());
        // partitions in file system but not in metastore
        partitionsToAdd = difference(partitionsInFileSystem, partitionsInMetastore);
        // partitions in metastore but not in file system
        partitionsToDrop = difference(partitionsInMetastore, partitionsInFileSystem);
    } catch (IOException e) {
        throw new TrinoException(HIVE_FILESYSTEM_ERROR, e);
    }
    syncPartitions(partitionsToAdd, partitionsToDrop, syncMode, metastore, session, table);
}
Also used : Path(org.apache.hadoop.fs.Path) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) MethodHandle(java.lang.invoke.MethodHandle) HivePartitionManager.extractPartitionValues(io.trino.plugin.hive.HivePartitionManager.extractPartitionValues) Provider(javax.inject.Provider) TransactionalMetadataFactory(io.trino.plugin.hive.TransactionalMetadataFactory) FileSystem(org.apache.hadoop.fs.FileSystem) MethodHandleUtil.methodHandle(io.trino.spi.block.MethodHandleUtil.methodHandle) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) FileStatus(org.apache.hadoop.fs.FileStatus) Inject(javax.inject.Inject) HashSet(java.util.HashSet) INVALID_PROCEDURE_ARGUMENT(io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) ImmutableList(com.google.common.collect.ImmutableList) Column(io.trino.plugin.hive.metastore.Column) Procedure(io.trino.spi.procedure.Procedure) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) Objects.requireNonNull(java.util.Objects.requireNonNull) Path(org.apache.hadoop.fs.Path) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) HIVE_FILESYSTEM_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR) ENGLISH(java.util.Locale.ENGLISH) Argument(io.trino.spi.procedure.Procedure.Argument) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Table(io.trino.plugin.hive.metastore.Table) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ConnectorAccessControl(io.trino.spi.connector.ConnectorAccessControl) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) ThreadContextClassLoader(io.trino.spi.classloader.ThreadContextClassLoader) Sets(com.google.common.collect.Sets) SchemaTableName(io.trino.spi.connector.SchemaTableName) List(java.util.List) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) Stream(java.util.stream.Stream) Optional(java.util.Optional) TRUE(java.lang.Boolean.TRUE) Partition(io.trino.plugin.hive.metastore.Partition) Table(io.trino.plugin.hive.metastore.Table) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) IOException(java.io.IOException) SchemaTableName(io.trino.spi.connector.SchemaTableName) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) FileSystem(org.apache.hadoop.fs.FileSystem) TrinoException(io.trino.spi.TrinoException) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext)

Example 2 with INVALID_PROCEDURE_ARGUMENT

use of io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT in project trino by trinodb.

the class CallTask method execute.

@Override
public ListenableFuture<Void> execute(Call call, QueryStateMachine stateMachine, List<Expression> parameters, WarningCollector warningCollector) {
    if (!transactionManager.isAutoCommit(stateMachine.getSession().getRequiredTransactionId())) {
        throw new TrinoException(NOT_SUPPORTED, "Procedures cannot be called within a transaction (use autocommit mode)");
    }
    Session session = stateMachine.getSession();
    QualifiedObjectName procedureName = createQualifiedObjectName(session, call, call.getName());
    CatalogName catalogName = plannerContext.getMetadata().getCatalogHandle(stateMachine.getSession(), procedureName.getCatalogName()).orElseThrow(() -> semanticException(CATALOG_NOT_FOUND, call, "Catalog '%s' does not exist", procedureName.getCatalogName()));
    Procedure procedure = procedureRegistry.resolve(catalogName, procedureName.asSchemaTableName());
    // map declared argument names to positions
    Map<String, Integer> positions = new HashMap<>();
    for (int i = 0; i < procedure.getArguments().size(); i++) {
        positions.put(procedure.getArguments().get(i).getName(), i);
    }
    // per specification, do not allow mixing argument types
    Predicate<CallArgument> hasName = argument -> argument.getName().isPresent();
    boolean anyNamed = call.getArguments().stream().anyMatch(hasName);
    boolean allNamed = call.getArguments().stream().allMatch(hasName);
    if (anyNamed && !allNamed) {
        throw semanticException(INVALID_ARGUMENTS, call, "Named and positional arguments cannot be mixed");
    }
    // get the argument names in call order
    Map<String, CallArgument> names = new LinkedHashMap<>();
    for (int i = 0; i < call.getArguments().size(); i++) {
        CallArgument argument = call.getArguments().get(i);
        if (argument.getName().isPresent()) {
            String name = argument.getName().get().getCanonicalValue();
            if (names.put(name, argument) != null) {
                throw semanticException(INVALID_ARGUMENTS, argument, "Duplicate procedure argument: %s", name);
            }
            if (!positions.containsKey(name)) {
                throw semanticException(INVALID_ARGUMENTS, argument, "Unknown argument name: %s", name);
            }
        } else if (i < procedure.getArguments().size()) {
            names.put(procedure.getArguments().get(i).getName(), argument);
        } else {
            throw semanticException(INVALID_ARGUMENTS, call, "Too many arguments for procedure");
        }
    }
    procedure.getArguments().stream().filter(Argument::isRequired).filter(argument -> !names.containsKey(argument.getName())).map(Argument::getName).findFirst().ifPresent(argument -> {
        throw semanticException(INVALID_ARGUMENTS, call, "Required procedure argument '%s' is missing", argument);
    });
    // get argument values
    Object[] values = new Object[procedure.getArguments().size()];
    Map<NodeRef<Parameter>, Expression> parameterLookup = parameterExtractor(call, parameters);
    for (Entry<String, CallArgument> entry : names.entrySet()) {
        CallArgument callArgument = entry.getValue();
        int index = positions.get(entry.getKey());
        Argument argument = procedure.getArguments().get(index);
        Expression expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(parameterLookup), callArgument.getValue());
        Type type = argument.getType();
        Object value = evaluateConstantExpression(expression, type, plannerContext, session, accessControl, parameterLookup);
        values[index] = toTypeObjectValue(session, type, value);
    }
    // fill values with optional arguments defaults
    for (int i = 0; i < procedure.getArguments().size(); i++) {
        Argument argument = procedure.getArguments().get(i);
        if (!names.containsKey(argument.getName())) {
            verify(argument.isOptional());
            values[i] = toTypeObjectValue(session, argument.getType(), argument.getDefaultValue());
        }
    }
    // validate arguments
    MethodType methodType = procedure.getMethodHandle().type();
    for (int i = 0; i < procedure.getArguments().size(); i++) {
        if ((values[i] == null) && methodType.parameterType(i).isPrimitive()) {
            String name = procedure.getArguments().get(i).getName();
            throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, "Procedure argument cannot be null: " + name);
        }
    }
    // insert session argument
    List<Object> arguments = new ArrayList<>();
    Iterator<Object> valuesIterator = asList(values).iterator();
    for (Class<?> type : methodType.parameterList()) {
        if (ConnectorSession.class.equals(type)) {
            arguments.add(session.toConnectorSession(catalogName));
        } else if (ConnectorAccessControl.class.equals(type)) {
            arguments.add(new InjectedConnectorAccessControl(accessControl, session.toSecurityContext(), catalogName.getCatalogName()));
        } else {
            arguments.add(valuesIterator.next());
        }
    }
    accessControl.checkCanExecuteProcedure(session.toSecurityContext(), procedureName);
    stateMachine.setRoutines(ImmutableList.of(new RoutineInfo(procedureName.getObjectName(), session.getUser())));
    try {
        procedure.getMethodHandle().invokeWithArguments(arguments);
    } catch (Throwable t) {
        if (t instanceof InterruptedException) {
            Thread.currentThread().interrupt();
        }
        throwIfInstanceOf(t, TrinoException.class);
        throw new TrinoException(PROCEDURE_CALL_FAILED, t);
    }
    return immediateVoidFuture();
}
Also used : InjectedConnectorAccessControl(io.trino.security.InjectedConnectorAccessControl) TransactionManager(io.trino.transaction.TransactionManager) ParameterUtils.parameterExtractor(io.trino.sql.ParameterUtils.parameterExtractor) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) CatalogName(io.trino.connector.CatalogName) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) SemanticExceptions.semanticException(io.trino.sql.analyzer.SemanticExceptions.semanticException) Argument(io.trino.spi.procedure.Procedure.Argument) Futures.immediateVoidFuture(com.google.common.util.concurrent.Futures.immediateVoidFuture) INVALID_ARGUMENTS(io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS) Predicate(java.util.function.Predicate) ExpressionTreeRewriter(io.trino.sql.tree.ExpressionTreeRewriter) ConnectorAccessControl(io.trino.spi.connector.ConnectorAccessControl) TrinoException(io.trino.spi.TrinoException) TypeUtils.writeNativeValue(io.trino.spi.type.TypeUtils.writeNativeValue) List(java.util.List) AccessControl(io.trino.security.AccessControl) Parameter(io.trino.sql.tree.Parameter) Entry(java.util.Map.Entry) Expression(io.trino.sql.tree.Expression) PROCEDURE_CALL_FAILED(io.trino.spi.StandardErrorCode.PROCEDURE_CALL_FAILED) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ExpressionInterpreter.evaluateConstantExpression(io.trino.sql.planner.ExpressionInterpreter.evaluateConstantExpression) RoutineInfo(io.trino.spi.eventlistener.RoutineInfo) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) LinkedHashMap(java.util.LinkedHashMap) INVALID_PROCEDURE_ARGUMENT(io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT) ImmutableList(com.google.common.collect.ImmutableList) Procedure(io.trino.spi.procedure.Procedure) Verify.verify(com.google.common.base.Verify.verify) MetadataUtil.createQualifiedObjectName(io.trino.metadata.MetadataUtil.createQualifiedObjectName) NodeRef(io.trino.sql.tree.NodeRef) Objects.requireNonNull(java.util.Objects.requireNonNull) Iterator(java.util.Iterator) CATALOG_NOT_FOUND(io.trino.spi.StandardErrorCode.CATALOG_NOT_FOUND) ConnectorSession(io.trino.spi.connector.ConnectorSession) Throwables.throwIfInstanceOf(com.google.common.base.Throwables.throwIfInstanceOf) CallArgument(io.trino.sql.tree.CallArgument) Call(io.trino.sql.tree.Call) MethodType(java.lang.invoke.MethodType) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) ProcedureRegistry(io.trino.metadata.ProcedureRegistry) WarningCollector(io.trino.execution.warnings.WarningCollector) BlockBuilder(io.trino.spi.block.BlockBuilder) ParameterRewriter(io.trino.sql.planner.ParameterRewriter) CallArgument(io.trino.sql.tree.CallArgument) Argument(io.trino.spi.procedure.Procedure.Argument) CallArgument(io.trino.sql.tree.CallArgument) ParameterRewriter(io.trino.sql.planner.ParameterRewriter) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) RoutineInfo(io.trino.spi.eventlistener.RoutineInfo) NodeRef(io.trino.sql.tree.NodeRef) Procedure(io.trino.spi.procedure.Procedure) MethodType(java.lang.invoke.MethodType) InjectedConnectorAccessControl(io.trino.security.InjectedConnectorAccessControl) ConnectorAccessControl(io.trino.spi.connector.ConnectorAccessControl) MetadataUtil.createQualifiedObjectName(io.trino.metadata.MetadataUtil.createQualifiedObjectName) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) InjectedConnectorAccessControl(io.trino.security.InjectedConnectorAccessControl) Type(io.trino.spi.type.Type) MethodType(java.lang.invoke.MethodType) Expression(io.trino.sql.tree.Expression) ExpressionInterpreter.evaluateConstantExpression(io.trino.sql.planner.ExpressionInterpreter.evaluateConstantExpression) TrinoException(io.trino.spi.TrinoException) CatalogName(io.trino.connector.CatalogName) Session(io.trino.Session) ConnectorSession(io.trino.spi.connector.ConnectorSession)

Example 3 with INVALID_PROCEDURE_ARGUMENT

use of io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT in project trino by trinodb.

the class DropStatsProcedure method doDropStats.

private void doDropStats(ConnectorSession session, ConnectorAccessControl accessControl, String schema, String table, List<?> partitionValues) {
    TransactionalMetadata hiveMetadata = hiveMetadataFactory.create(session.getIdentity(), true);
    HiveTableHandle handle = (HiveTableHandle) hiveMetadata.getTableHandle(session, new SchemaTableName(schema, table));
    if (handle == null) {
        throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, format("Table '%s' does not exist", new SchemaTableName(schema, table)));
    }
    accessControl.checkCanInsertIntoTable(null, new SchemaTableName(schema, table));
    Map<String, ColumnHandle> columns = hiveMetadata.getColumnHandles(session, handle);
    List<String> partitionColumns = columns.values().stream().map(HiveColumnHandle.class::cast).filter(HiveColumnHandle::isPartitionKey).map(HiveColumnHandle::getName).collect(toImmutableList());
    HiveMetastoreClosure metastore = hiveMetadata.getMetastore().unsafeGetRawHiveMetastoreClosure();
    if (partitionValues != null) {
        // drop stats for specified partitions
        List<List<String>> partitionStringValues = partitionValues.stream().map(DropStatsProcedure::validateParameterType).collect(toImmutableList());
        validatePartitions(partitionStringValues, partitionColumns);
        partitionStringValues.forEach(values -> metastore.updatePartitionStatistics(schema, table, makePartName(partitionColumns, values), stats -> PartitionStatistics.empty()));
    } else {
        // no partition specified, so drop stats for the entire table
        if (partitionColumns.isEmpty()) {
            // for non-partitioned tables, just wipe table stats
            metastore.updateTableStatistics(schema, table, NO_ACID_TRANSACTION, stats -> PartitionStatistics.empty());
        } else {
            // the table is partitioned; remove stats for every partition
            metastore.getPartitionNamesByFilter(handle.getSchemaName(), handle.getTableName(), partitionColumns, TupleDomain.all()).ifPresent(partitions -> partitions.forEach(partitionName -> metastore.updatePartitionStatistics(schema, table, partitionName, stats -> PartitionStatistics.empty())));
        }
    }
    hiveMetadata.commit();
}
Also used : PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) MethodHandle(java.lang.invoke.MethodHandle) Provider(javax.inject.Provider) TransactionalMetadataFactory(io.trino.plugin.hive.TransactionalMetadataFactory) MethodHandleUtil.methodHandle(io.trino.spi.block.MethodHandleUtil.methodHandle) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) Inject(javax.inject.Inject) INVALID_PROCEDURE_ARGUMENT(io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) Procedure(io.trino.spi.procedure.Procedure) HiveTableHandle(io.trino.plugin.hive.HiveTableHandle) TransactionalMetadata(io.trino.plugin.hive.TransactionalMetadata) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) Argument(io.trino.spi.procedure.Procedure.Argument) FileUtils.makePartName(org.apache.hadoop.hive.metastore.utils.FileUtils.makePartName) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ConnectorAccessControl(io.trino.spi.connector.ConnectorAccessControl) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) ConnectorSession(io.trino.spi.connector.ConnectorSession) TupleDomain(io.trino.spi.predicate.TupleDomain) ThreadContextClassLoader(io.trino.spi.classloader.ThreadContextClassLoader) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) List(java.util.List) HiveMetastoreClosure(io.trino.plugin.hive.HiveMetastoreClosure) ColumnHandle(io.trino.spi.connector.ColumnHandle) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) TransactionalMetadata(io.trino.plugin.hive.TransactionalMetadata) SchemaTableName(io.trino.spi.connector.SchemaTableName) HiveTableHandle(io.trino.plugin.hive.HiveTableHandle) TrinoException(io.trino.spi.TrinoException) ImmutableList(com.google.common.collect.ImmutableList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) HiveMetastoreClosure(io.trino.plugin.hive.HiveMetastoreClosure) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle)

Example 4 with INVALID_PROCEDURE_ARGUMENT

use of io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT in project trino by trinodb.

the class TableProceduresPropertyManager method getProperties.

public Map<String, Object> getProperties(CatalogName catalog, String procedureName, Map<String, Expression> sqlPropertyValues, Session session, PlannerContext plannerContext, AccessControl accessControl, Map<NodeRef<Parameter>, Expression> parameters) {
    Map<String, PropertyMetadata<?>> supportedProperties = connectorProperties.get(new Key(catalog, procedureName));
    if (supportedProperties == null) {
        throw new TrinoException(NOT_FOUND, format("Catalog '%s' table procedure '%s' property not found", catalog, procedureName));
    }
    Map<String, Optional<Object>> propertyValues = evaluateProperties(sqlPropertyValues.entrySet().stream().map(entry -> new Property(new Identifier(entry.getKey()), entry.getValue())).collect(toImmutableList()), session, plannerContext, accessControl, parameters, true, supportedProperties, INVALID_PROCEDURE_ARGUMENT, format("catalog '%s' table procedure '%s' property", catalog, procedureName));
    return propertyValues.entrySet().stream().filter(entry -> entry.getValue().isPresent()).collect(toImmutableMap(Entry::getKey, entry -> entry.getValue().orElseThrow()));
}
Also used : PropertyUtil.evaluateProperties(io.trino.metadata.PropertyUtil.evaluateProperties) ConcurrentMap(java.util.concurrent.ConcurrentMap) INVALID_PROCEDURE_ARGUMENT(io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT) CatalogName(io.trino.connector.CatalogName) NodeRef(io.trino.sql.tree.NodeRef) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) Property(io.trino.sql.tree.Property) Identifier(io.trino.sql.tree.Identifier) NOT_FOUND(io.trino.spi.StandardErrorCode.NOT_FOUND) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) PropertyMetadata(io.trino.spi.session.PropertyMetadata) Maps(com.google.common.collect.Maps) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) Objects(java.util.Objects) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) AccessControl(io.trino.security.AccessControl) Parameter(io.trino.sql.tree.Parameter) Entry(java.util.Map.Entry) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) Identifier(io.trino.sql.tree.Identifier) Optional(java.util.Optional) PropertyMetadata(io.trino.spi.session.PropertyMetadata) TrinoException(io.trino.spi.TrinoException) Property(io.trino.sql.tree.Property)

Aggregations

INVALID_PROCEDURE_ARGUMENT (io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT)4 TrinoException (io.trino.spi.TrinoException)4 List (java.util.List)4 Objects.requireNonNull (java.util.Objects.requireNonNull)4 ImmutableList (com.google.common.collect.ImmutableList)3 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)3 ConnectorAccessControl (io.trino.spi.connector.ConnectorAccessControl)3 ConnectorSession (io.trino.spi.connector.ConnectorSession)3 Procedure (io.trino.spi.procedure.Procedure)3 Argument (io.trino.spi.procedure.Procedure.Argument)3 Session (io.trino.Session)2 CatalogName (io.trino.connector.CatalogName)2 PartitionStatistics (io.trino.plugin.hive.PartitionStatistics)2 TransactionalMetadataFactory (io.trino.plugin.hive.TransactionalMetadataFactory)2 AccessControl (io.trino.security.AccessControl)2 String.format (java.lang.String.format)2 Map (java.util.Map)2 Inject (javax.inject.Inject)2 Preconditions.checkState (com.google.common.base.Preconditions.checkState)1 Throwables.throwIfInstanceOf (com.google.common.base.Throwables.throwIfInstanceOf)1