use of com.facebook.airlift.json.JsonModule in project presto by prestodb.
the class TestSystemTableHandle method startUp.
@BeforeMethod
public void startUp() {
Injector injector = Guice.createInjector(new JsonModule(), new HandleJsonModule());
objectMapper = injector.getInstance(ObjectMapper.class);
}
use of com.facebook.airlift.json.JsonModule in project presto by prestodb.
the class BigQueryConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(catalogName, "catalogName is null");
requireNonNull(config, "config is null");
try {
Bootstrap app = new Bootstrap(new JsonModule(), new BigQueryConnectorModule(context.getNodeManager()), binder -> {
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
});
Injector injector = app.noStrictConfig().doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
return injector.getInstance(BigQueryConnector.class);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
use of com.facebook.airlift.json.JsonModule in project presto by prestodb.
the class ElasticsearchConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(catalogName, "catalogName is null");
requireNonNull(config, "config is null");
try {
Bootstrap app = new Bootstrap(new JsonModule(), new ElasticsearchConnectorModule(), binder -> {
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
});
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
return injector.getInstance(ElasticsearchConnector.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of com.facebook.airlift.json.JsonModule in project presto by prestodb.
the class HiveConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(config, "config is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new EventModule(), new MBeanModule(), new JsonModule(), new SmileModule(), new HiveClientModule(catalogName), new HiveS3Module(catalogName), new HiveGcsModule(), new HiveMetastoreModule(catalogName, metastore), new HiveSecurityModule(), new HiveAuthenticationModule(), new HiveProcedureModule(), new CachingModule(), binder -> {
MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
binder.bind(MBeanServer.class).toInstance(new RebindSafeMBeanServer(platformMBeanServer));
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory());
binder.bind(PageSorter.class).toInstance(context.getPageSorter());
binder.bind(StandardFunctionResolution.class).toInstance(context.getStandardFunctionResolution());
binder.bind(FunctionMetadataManager.class).toInstance(context.getFunctionMetadataManager());
binder.bind(RowExpressionService.class).toInstance(context.getRowExpressionService());
binder.bind(FilterStatsCalculatorService.class).toInstance(context.getFilterStatsCalculatorService());
binder.bind(BlockEncodingSerde.class).toInstance(context.getBlockEncodingSerde());
});
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).quiet().initialize();
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class);
HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class);
ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class);
ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class);
ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class);
ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class);
HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class);
HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class);
HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class);
ConnectorAccessControl accessControl = new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class));
Set<Procedure> procedures = injector.getInstance(Key.get(new TypeLiteral<Set<Procedure>>() {
}));
ConnectorPlanOptimizerProvider planOptimizerProvider = injector.getInstance(ConnectorPlanOptimizerProvider.class);
ConnectorMetadataUpdaterProvider metadataUpdaterProvider = injector.getInstance(ConnectorMetadataUpdaterProvider.class);
return new HiveConnector(lifeCycleManager, metadataFactory, transactionManager, new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), ImmutableSet.of(), procedures, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), accessControl, planOptimizerProvider, metadataUpdaterProvider, classLoader);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
use of com.facebook.airlift.json.JsonModule in project presto by prestodb.
the class KafkaConnectorFactory method create.
@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) {
requireNonNull(catalogName, "catalogName is null");
requireNonNull(config, "config is null");
try {
Bootstrap app = new Bootstrap(extension, new JsonModule(), new KafkaConnectorModule(), binder -> {
binder.bind(KafkaConnectorId.class).toInstance(new KafkaConnectorId(catalogName));
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
});
Injector injector = app.doNotInitializeLogging().setRequiredConfigurationProperties(config).initialize();
return injector.getInstance(KafkaConnector.class);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
Aggregations