use of io.trino.spi.procedure.Procedure in project trino by trinodb.
the class InternalHiveConnectorFactory method createConnector.
public static Connector createConnector(String catalogName, Map<String, String> config, ConnectorContext context, Module module, Optional<HiveMetastore> metastore, Optional<CachingDirectoryLister> cachingDirectoryLister) {
requireNonNull(config, "config is null");
ClassLoader classLoader = InternalHiveConnectorFactory.class.getClassLoader();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new CatalogNameModule(catalogName), new EventModule(), new MBeanModule(), new ConnectorObjectNameGeneratorModule(catalogName, "io.trino.plugin.hive", "trino.plugin.hive"), new JsonModule(), new TypeDeserializerModule(context.getTypeManager()), new HiveModule(), new CachingDirectoryListerModule(cachingDirectoryLister), new HiveHdfsModule(), new HiveS3Module(), new HiveGcsModule(), new HiveAzureModule(), conditionalModule(RubixEnabledConfig.class, RubixEnabledConfig::isCacheEnabled, new RubixModule()), new HiveMetastoreModule(metastore), new HiveSecurityModule(), new HdfsAuthenticationModule(), new HiveProcedureModule(), new MBeanServerModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder());
binder.bind(MetadataProvider.class).toInstance(context.getMetadataProvider());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
}, binder -> newSetBinder(binder, EventListener.class), binder -> bindSessionPropertiesProvider(binder, HiveSessionProperties.class), module);
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
Set<SessionPropertiesProvider> sessionPropertiesProviders = injector.getInstance(Key.get(new TypeLiteral<Set<SessionPropertiesProvider>>() {
}));
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
HiveMaterializedViewPropertiesProvider hiveMaterializedViewPropertiesProvider = injector.getInstance(HiveMaterializedViewPropertiesProvider.class);
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
Set<TableProcedureMetadata> tableProcedures = injector.getInstance(Key.get(new TypeLiteral<Set<TableProcedureMetadata>>() {
}));
Set<EventListener> eventListeners = injector.getInstance(Key.get(new TypeLiteral<Set<EventListener>>() {
})).stream().map(listener -> new ClassLoaderSafeEventListener(listener, classLoader)).collect(toImmutableSet());
Set<SystemTableProvider> systemTableProviders = injector.getInstance(Key.get(new TypeLiteral<Set<SystemTableProvider>>() {
}));
Optional<ConnectorAccessControl> hiveAccessControl = injector.getInstance(Key.get(new TypeLiteral<Optional<ConnectorAccessControl>>() {
})).map(accessControl -> new SystemTableAwareAccessControl(accessControl, systemTableProviders)).map(accessControl -> new ClassLoaderSafeConnectorAccessControl(accessControl, classLoader));
return new HiveConnector(lifeCycleManager, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), procedures, tableProcedures, eventListeners, sessionPropertiesProviders, HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), hiveMaterializedViewPropertiesProvider.getMaterializedViewProperties(), hiveAccessControl, injector.getInstance(HiveConfig.class).isSingleStatementWritesOnly(), classLoader);
}
}
use of io.trino.spi.procedure.Procedure in project trino by trinodb.
the class CallTask method execute.
@Override
public ListenableFuture<Void> execute(Call call, QueryStateMachine stateMachine, List<Expression> parameters, WarningCollector warningCollector) {
if (!transactionManager.isAutoCommit(stateMachine.getSession().getRequiredTransactionId())) {
throw new TrinoException(NOT_SUPPORTED, "Procedures cannot be called within a transaction (use autocommit mode)");
}
Session session = stateMachine.getSession();
QualifiedObjectName procedureName = createQualifiedObjectName(session, call, call.getName());
CatalogName catalogName = plannerContext.getMetadata().getCatalogHandle(stateMachine.getSession(), procedureName.getCatalogName()).orElseThrow(() -> semanticException(CATALOG_NOT_FOUND, call, "Catalog '%s' does not exist", procedureName.getCatalogName()));
Procedure procedure = procedureRegistry.resolve(catalogName, procedureName.asSchemaTableName());
// map declared argument names to positions
Map<String, Integer> positions = new HashMap<>();
for (int i = 0; i < procedure.getArguments().size(); i++) {
positions.put(procedure.getArguments().get(i).getName(), i);
}
// per specification, do not allow mixing argument types
Predicate<CallArgument> hasName = argument -> argument.getName().isPresent();
boolean anyNamed = call.getArguments().stream().anyMatch(hasName);
boolean allNamed = call.getArguments().stream().allMatch(hasName);
if (anyNamed && !allNamed) {
throw semanticException(INVALID_ARGUMENTS, call, "Named and positional arguments cannot be mixed");
}
// get the argument names in call order
Map<String, CallArgument> names = new LinkedHashMap<>();
for (int i = 0; i < call.getArguments().size(); i++) {
CallArgument argument = call.getArguments().get(i);
if (argument.getName().isPresent()) {
String name = argument.getName().get().getCanonicalValue();
if (names.put(name, argument) != null) {
throw semanticException(INVALID_ARGUMENTS, argument, "Duplicate procedure argument: %s", name);
}
if (!positions.containsKey(name)) {
throw semanticException(INVALID_ARGUMENTS, argument, "Unknown argument name: %s", name);
}
} else if (i < procedure.getArguments().size()) {
names.put(procedure.getArguments().get(i).getName(), argument);
} else {
throw semanticException(INVALID_ARGUMENTS, call, "Too many arguments for procedure");
}
}
procedure.getArguments().stream().filter(Argument::isRequired).filter(argument -> !names.containsKey(argument.getName())).map(Argument::getName).findFirst().ifPresent(argument -> {
throw semanticException(INVALID_ARGUMENTS, call, "Required procedure argument '%s' is missing", argument);
});
// get argument values
Object[] values = new Object[procedure.getArguments().size()];
Map<NodeRef<Parameter>, Expression> parameterLookup = parameterExtractor(call, parameters);
for (Entry<String, CallArgument> entry : names.entrySet()) {
CallArgument callArgument = entry.getValue();
int index = positions.get(entry.getKey());
Argument argument = procedure.getArguments().get(index);
Expression expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(parameterLookup), callArgument.getValue());
Type type = argument.getType();
Object value = evaluateConstantExpression(expression, type, plannerContext, session, accessControl, parameterLookup);
values[index] = toTypeObjectValue(session, type, value);
}
// fill values with optional arguments defaults
for (int i = 0; i < procedure.getArguments().size(); i++) {
Argument argument = procedure.getArguments().get(i);
if (!names.containsKey(argument.getName())) {
verify(argument.isOptional());
values[i] = toTypeObjectValue(session, argument.getType(), argument.getDefaultValue());
}
}
// validate arguments
MethodType methodType = procedure.getMethodHandle().type();
for (int i = 0; i < procedure.getArguments().size(); i++) {
if ((values[i] == null) && methodType.parameterType(i).isPrimitive()) {
String name = procedure.getArguments().get(i).getName();
throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, "Procedure argument cannot be null: " + name);
}
}
// insert session argument
List<Object> arguments = new ArrayList<>();
Iterator<Object> valuesIterator = asList(values).iterator();
for (Class<?> type : methodType.parameterList()) {
if (ConnectorSession.class.equals(type)) {
arguments.add(session.toConnectorSession(catalogName));
} else if (ConnectorAccessControl.class.equals(type)) {
arguments.add(new InjectedConnectorAccessControl(accessControl, session.toSecurityContext(), catalogName.getCatalogName()));
} else {
arguments.add(valuesIterator.next());
}
}
accessControl.checkCanExecuteProcedure(session.toSecurityContext(), procedureName);
stateMachine.setRoutines(ImmutableList.of(new RoutineInfo(procedureName.getObjectName(), session.getUser())));
try {
procedure.getMethodHandle().invokeWithArguments(arguments);
} catch (Throwable t) {
if (t instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
throwIfInstanceOf(t, TrinoException.class);
throw new TrinoException(PROCEDURE_CALL_FAILED, t);
}
return immediateVoidFuture();
}
use of io.trino.spi.procedure.Procedure in project trino by trinodb.
the class TestCallTask method executeCallTask.
private void executeCallTask(MethodHandle methodHandle, Function<TransactionManager, AccessControl> accessControlProvider) {
TransactionManager transactionManager = queryRunner.getTransactionManager();
ProcedureRegistry procedureRegistry = createProcedureRegistry(new Procedure("test", "testing_procedure", ImmutableList.of(), methodHandle));
AccessControl accessControl = accessControlProvider.apply(transactionManager);
PlannerContext plannerContext = plannerContextBuilder().withTransactionManager(transactionManager).build();
new CallTask(transactionManager, plannerContext, accessControl, procedureRegistry).execute(new Call(QualifiedName.of("testing_procedure"), ImmutableList.of()), stateMachine(transactionManager, plannerContext.getMetadata(), accessControl), ImmutableList.of(), WarningCollector.NOOP);
}
use of io.trino.spi.procedure.Procedure in project trino by trinodb.
the class InternalDeltaLakeConnectorFactory method createConnector.
@VisibleForTesting
public static Connector createConnector(String catalogName, Map<String, String> config, ConnectorContext context, Module extraModule) {
ClassLoader classLoader = InternalDeltaLakeConnectorFactory.class.getClassLoader();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new JsonModule(), new MBeanServerModule(), new HiveHdfsModule(), new HiveS3Module(), new HiveAzureModule(), new HdfsAuthenticationModule(), new CatalogNameModule(catalogName), new DeltaLakeMetastoreModule(), new DeltaLakeModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(CatalogName.class).toInstance(new CatalogName(catalogName));
newSetBinder(binder, EventListener.class);
}, binder -> bindSessionPropertiesProvider(binder, DeltaLakeSessionProperties.class), extraModule);
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider connectorPageSink = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
Set<SessionPropertiesProvider> sessionPropertiesProviders = injector.getInstance(Key.get(new TypeLiteral<Set<SessionPropertiesProvider>>() {
}));
DeltaLakeTableProperties deltaLakeTableProperties = injector.getInstance(DeltaLakeTableProperties.class);
DeltaLakeAnalyzeProperties deltaLakeAnalyzeProperties = injector.getInstance(DeltaLakeAnalyzeProperties.class);
DeltaLakeTransactionManager transactionManager = injector.getInstance(DeltaLakeTransactionManager.class);
Set<EventListener> eventListeners = injector.getInstance(Key.get(new TypeLiteral<Set<EventListener>>() {
})).stream().map(listener -> new ClassLoaderSafeEventListener(listener, classLoader)).collect(toImmutableSet());
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
Set<TableProcedureMetadata> tableProcedures = injector.getInstance(Key.get(new TypeLiteral<Set<TableProcedureMetadata>>() {
}));
return new DeltaLakeConnector(lifeCycleManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(connectorPageSink, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, tableProcedures, sessionPropertiesProviders, DeltaLakeSchemaProperties.SCHEMA_PROPERTIES, deltaLakeTableProperties.getTableProperties(), deltaLakeAnalyzeProperties.getAnalyzeProperties(), eventListeners, transactionManager);
}
}
use of io.trino.spi.procedure.Procedure in project trino by trinodb.
the class InternalIcebergConnectorFactory method createConnector.
public static Connector createConnector(String catalogName, Map<String, String> config, ConnectorContext context, Module module, Optional<HiveMetastore> metastore, Optional<FileIoProvider> fileIoProvider) {
ClassLoader classLoader = InternalIcebergConnectorFactory.class.getClassLoader();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new ConnectorObjectNameGeneratorModule(catalogName, "io.trino.plugin.iceberg", "trino.plugin.iceberg"), new JsonModule(), new IcebergModule(), new IcebergSecurityModule(), new IcebergCatalogModule(metastore), new HiveHdfsModule(), new HiveS3Module(), new HiveGcsModule(), new HiveAzureModule(), new HdfsAuthenticationModule(), new MBeanServerModule(), fileIoProvider.<Module>map(provider -> binder -> binder.bind(FileIoProvider.class).toInstance(provider)).orElse(binder -> binder.bind(FileIoProvider.class).to(HdfsFileIoProvider.class).in(SINGLETON)), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(CatalogName.class).toInstance(new CatalogName(catalogName));
}, module);
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
IcebergTransactionManager transactionManager = injector.getInstance(IcebergTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
Set<SessionPropertiesProvider> sessionPropertiesProviders = injector.getInstance(Key.get(new TypeLiteral<Set<SessionPropertiesProvider>>() {
}));
IcebergTableProperties icebergTableProperties = injector.getInstance(IcebergTableProperties.class);
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
Set<TableProcedureMetadata> tableProcedures = injector.getInstance(Key.get(new TypeLiteral<Set<TableProcedureMetadata>>() {
}));
Optional<ConnectorAccessControl> accessControl = injector.getInstance(Key.get(new TypeLiteral<Optional<ConnectorAccessControl>>() {
}));
return new IcebergConnector(lifeCycleManager, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), sessionPropertiesProviders, IcebergSchemaProperties.SCHEMA_PROPERTIES, icebergTableProperties.getTableProperties(), accessControl, procedures, tableProcedures);
}
}
Aggregations