use of io.trino.plugin.hive.s3.HiveS3Module in project presto by prestodb.
the class DeltaConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> requiredConfig, ConnectorContext context) {
requireNonNull(requiredConfig, "requiredConfig is null");
try {
Bootstrap app = new Bootstrap(new JsonModule(), new DeltaModule(catalogName, context.getTypeManager()), new HiveS3Module(catalogName), new HiveGcsModule(), new HiveAuthenticationModule(), new HiveMetastoreModule(catalogName, Optional.empty()), binder -> {
binder.bind(RowExpressionService.class).toInstance(context.getRowExpressionService());
});
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(requiredConfig).initialize();
return injector.getInstance(DeltaConnector.class);
} catch (Exception exception) {
throwIfUnchecked(exception);
throw new RuntimeException(exception);
}
}
use of io.trino.plugin.hive.s3.HiveS3Module in project presto by prestodb.
the class HiveConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(config, "config is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new JsonModule(), new SmileModule(), new HiveClientModule(catalogName), new HiveS3Module(catalogName), new HiveGcsModule(), new HiveMetastoreModule(catalogName, metastore), new HiveSecurityModule(), new HiveAuthenticationModule(), new HiveProcedureModule(), new CachingModule(), binder -> {
MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
binder.bind(MBeanServer.class).toInstance(new RebindSafeMBeanServer(platformMBeanServer));
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
binder.bind(StandardFunctionResolution.class).toInstance(context.getStandardFunctionResolution());
binder.bind(FunctionMetadataManager.class).toInstance(context.getFunctionMetadataManager());
binder.bind(RowExpressionService.class).toInstance(context.getRowExpressionService());
binder.bind(FilterStatsCalculatorService.class).toInstance(context.getFilterStatsCalculatorService());
binder.bind(BlockEncodingSerde.class).toInstance(context.getBlockEncodingSerde());
});
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).quiet().initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
ConnectorPlanOptimizerProvider planOptimizerProvider = injector.getInstance(ConnectorPlanOptimizerProvider.class);
ConnectorMetadataUpdaterProvider metadataUpdaterProvider = injector.getInstance(ConnectorMetadataUpdaterProvider.class);
return new HiveConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, planOptimizerProvider, metadataUpdaterProvider, classLoader);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
use of io.trino.plugin.hive.s3.HiveS3Module in project presto by prestodb.
the class InternalIcebergConnectorFactory method createConnector.
public static Connector createConnector(String catalogName, Map<String, String> config, ConnectorContext context, Optional<ExtendedHiveMetastore> metastore) {
ClassLoader classLoader = InternalIcebergConnectorFactory.class.getClassLoader();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new JsonModule(), new IcebergModule(catalogName), new IcebergMetastoreModule(), new HiveS3Module(catalogName), new HiveAuthenticationModule(), new HiveMetastoreModule(catalogName, metastore), new CachingModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(StandardFunctionResolution.class).toInstance(context.getStandardFunctionResolution());
binder.bind(RowExpressionService.class).toInstance(context.getRowExpressionService());
});
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
IcebergTransactionManager transactionManager = injector.getInstance(IcebergTransactionManager.class);
IcebergMetadataFactory metadataFactory = injector.getInstance(IcebergMetadataFactory.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
IcebergSessionProperties icebergSessionProperties = injector.getInstance(IcebergSessionProperties.class);
IcebergTableProperties icebergTableProperties = injector.getInstance(IcebergTableProperties.class);
Set<Procedure> procedures = injector.getInstance((Key<Set<Procedure>>) Key.get(Types.setOf(Procedure.class)));
ConnectorPlanOptimizer planOptimizer = injector.getInstance(IcebergPlanOptimizer.class);
return new IcebergConnector(lifeCycleManager, transactionManager, metadataFactory, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), icebergSessionProperties.getSessionProperties(), IcebergSchemaProperties.SCHEMA_PROPERTIES, icebergTableProperties.getTableProperties(), new AllowAllAccessControl(), procedures, planOptimizer);
}
}
use of io.trino.plugin.hive.s3.HiveS3Module in project carbondata by apache.
the class InternalCarbonDataConnectorFactory method createConnector.
public static Connector createConnector(String catalogName, Map<String, String> config, ConnectorContext context, Module module, Optional<HiveMetastore> metastore) {
requireNonNull(config, "config is null");
ClassLoader classLoader = InternalCarbonDataConnectorFactory.class.getClassLoader();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new ConnectorObjectNameGeneratorModule(catalogName, "io.prestosql.plugin.carbondata", "presto.plugin.carbondata"), new JsonModule(), new CarbondataModule(), new HiveS3Module(), new HiveGcsModule(), new HiveAzureModule(), installModuleIf(RubixEnabledConfig.class, RubixEnabledConfig::isCacheEnabled, new RubixModule()), new HiveMetastoreModule(metastore), new HiveSecurityModule(catalogName), new HiveAuthenticationModule(), new HiveProcedureModule(), new MBeanServerModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
binder.bind(CatalogName.class).toInstance(new CatalogName(catalogName));
configBinder(binder).bindConfig(CarbonTableConfig.class);
}, binder -> newSetBinder(binder, EventListener.class), module);
Injector injector = app.strictConfig().doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
if (injector.getInstance(RubixEnabledConfig.class).isCacheEnabled()) {
// RubixInitializer needs ConfigurationInitializers, hence kept outside RubixModule
RubixInitializer rubixInitializer = injector.getInstance(RubixInitializer.class);
rubixInitializer.initializeRubix(context.getNodeManager());
}
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new ClassLoaderSafeConnectorAccessControl(new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class)), classLoader);
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
Set<SystemTable> systemTables = injector.getInstance(Key.get(new TypeLiteral<Set<SystemTable>>() {
}));
Set<EventListener> eventListeners = injector.getInstance(Key.get(new TypeLiteral<Set<EventListener>>() {
})).stream().map(listener -> new ClassLoaderSafeEventListener(listener, classLoader)).collect(toImmutableSet());
return new CarbonDataConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), systemTables, procedures, eventListeners, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, classLoader);
}
}
use of io.trino.plugin.hive.s3.HiveS3Module in project carbondata by apache.
the class CarbondataConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(config, "config is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new JsonModule(), new CarbondataModule(catalogName), new HiveS3Module(catalogName), new HiveMetastoreModule(catalogName, Optional.ofNullable(null)), new HiveSecurityModule(), new HiveAuthenticationModule(), new HiveProcedureModule(), binder -> {
javax.management.MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
binder.bind(javax.management.MBeanServer.class).toInstance(new RebindSafeMBeanServer(platformMBeanServer));
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
configBinder(binder).bindConfig(CarbonTableConfig.class);
});
Injector injector = app.strictConfig().doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
setCarbonEnum();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new PartitionsAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
return new HiveConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, classLoader);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
Aggregations