use of io.prestosql.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider in project hetu-core by openlookeng.
the class CarbondataConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(config, "config is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new ConnectorObjectNameGeneratorModule(catalogName), new JsonModule(), new CarbondataModule(catalogName), new HiveS3Module(), new HiveGcsModule(), new HiveMetastoreModule(Optional.ofNullable(null)), new HiveSecurityModule(), new HiveAuthenticationModule(), new HiveProcedureModule(), new MBeanServerModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
binder.bind(HiveCatalogName.class).toInstance(new HiveCatalogName(catalogName));
configBinder(binder).bindConfig(CarbondataTableConfig.class);
});
Injector injector = app.strictConfig().doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
CarbondataTableProperties hiveTableProperties = injector.getInstance(CarbondataTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
return new HiveConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, classLoader);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
use of io.prestosql.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider in project hetu-core by openlookeng.
the class HiveConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(config, "config is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new ConnectorObjectNameGeneratorModule(catalogName), new JsonModule(), new HiveModule(), new HiveS3Module(), new HiveGcsModule(), new HiveMetastoreModule(metastore), new HiveSecurityModule(), new HiveAuthenticationModule(), new HiveProcedureModule(), new MBeanServerModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
binder.bind(HiveCatalogName.class).toInstance(new HiveCatalogName(catalogName));
binder.bind(IndexClient.class).toInstance(context.getIndexClient());
});
Injector injector = app.strictConfig().doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
return new HiveConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, classLoader);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
use of io.prestosql.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider in project boostkit-bigdata by kunpengcompute.
the class HiveConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(config, "config is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new ConnectorObjectNameGeneratorModule(catalogName), new JsonModule(), new HiveModule(), new HiveS3Module(), new HiveGcsModule(), new HiveMetastoreModule(metastore), new HiveSecurityModule(), new HiveAuthenticationModule(), new HiveProcedureModule(), new MBeanServerModule(), binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
binder.bind(HiveCatalogName.class).toInstance(new HiveCatalogName(catalogName));
binder.bind(IndexClient.class).toInstance(context.getIndexClient());
binder.bind(StandardFunctionResolution.class).toInstance(context.getStandardFunctionResolution());
binder.bind(FunctionMetadataManager.class).toInstance(context.getFunctionMetadataManager());
binder.bind(FilterStatsCalculatorService.class).toInstance(context.getFilterStatsCalculatorService());
binder.bind(RowExpressionService.class).toInstance(context.getRowExpressionService());
});
Injector injector = app.strictConfig().doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
ConnectorPlanOptimizerProvider planOptimizerProvider = injector.getInstance(ConnectorPlanOptimizerProvider.class);
OmniDataNodeManager nodeManagerInstance = injector.getInstance(OmniDataNodeManager.class);
HivePushdownUtil.setOmniDataNodeManager(nodeManagerInstance);
nodeManagerInstance.startPollingNodeStates();
return new HiveConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, planOptimizerProvider, classLoader);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
Aggregations