use of io.cdap.cdap.logging.guice.KafkaLogAppenderModule in project cdap by cdapio.
the class UpgradeTool method createInjector.
@VisibleForTesting
Injector createInjector() {
return Guice.createInjector(new ConfigModule(cConf, hConf), RemoteAuthenticatorModules.getDefaultModule(), new DFSLocationModule(), new ZKClientModule(), new ZKDiscoveryModule(), new MessagingClientModule(), Modules.override(new DataSetsModules().getDistributedModules()).with(new AbstractModule() {
@Override
protected void configure() {
bind(DatasetFramework.class).to(InMemoryDatasetFramework.class).in(Scopes.SINGLETON);
// the DataSetsModules().getDistributedModules() binds to RemoteDatasetFramework so override that to
// the same InMemoryDatasetFramework
bind(DatasetFramework.class).annotatedWith(Names.named(DataSetsModules.BASE_DATASET_FRAMEWORK)).to(DatasetFramework.class);
bind(DatasetDefinitionRegistryFactory.class).to(DefaultDatasetDefinitionRegistryFactory.class).in(Scopes.SINGLETON);
// CDAP-5954 Upgrade tool does not need to record lineage and metadata changes for now.
bind(LineageWriter.class).to(NoOpLineageWriter.class);
bind(FieldLineageWriter.class).to(NoOpLineageWriter.class);
}
}), new TwillModule(), new ExploreClientModule(), new ProgramRunnerRuntimeModule().getDistributedModules(), new SystemDatasetRuntimeModule().getDistributedModules(), new KafkaClientModule(), new IOModule(), CoreSecurityRuntimeModule.getDistributedModule(cConf), new AuthenticationContextModules().getMasterModule(), new AuthorizationModule(), new AuthorizationEnforcementModule().getMasterModule(), new SecureStoreServerModule(), new DataFabricModules(UpgradeTool.class.getName()).getDistributedModules(), new AppFabricServiceRuntimeModule(cConf).getDistributedModules(), new KafkaLogAppenderModule(), // the DataFabricDistributedModule needs MetricsCollectionService binding
new AbstractModule() {
@Override
protected void configure() {
// Since Upgrade tool does not do anything with Metrics we just bind it to no-op implementations
bind(MetricsCollectionService.class).toInstance(new NoOpMetricsCollectionService());
bind(MetricsSystemClient.class).toInstance(new NoOpMetricsSystemClient());
}
@Provides
@Singleton
@Named("datasetInstanceManager")
@SuppressWarnings("unused")
public DatasetInstanceManager getDatasetInstanceManager(TransactionRunner transactionRunner) {
return new DatasetInstanceManager(transactionRunner);
}
// This is needed because the LocalApplicationManager
// expects a dsframework injection named datasetMDS
@Provides
@Singleton
@Named("datasetMDS")
@SuppressWarnings("unused")
public DatasetFramework getInDsFramework(DatasetFramework dsFramework) {
return dsFramework;
}
}, new AbstractModule() {
@Override
protected void configure() {
// TODO (CDAP-14677): find a better way to inject metadata publisher
bind(MetadataServiceClient.class).to(NoOpMetadataServiceClient.class);
}
});
}
use of io.cdap.cdap.logging.guice.KafkaLogAppenderModule in project cdap by cdapio.
the class SystemWorkerTwillRunnable method createInjector.
@VisibleForTesting
static Injector createInjector(CConfiguration cConf, Configuration hConf) {
List<Module> modules = new ArrayList<>();
CoreSecurityModule coreSecurityModule = CoreSecurityRuntimeModule.getDistributedModule(cConf);
modules.add(new ConfigModule(cConf, hConf));
modules.add(new LocalLocationModule());
modules.add(new IOModule());
modules.add(new AuthenticationContextModules().getMasterWorkerModule());
modules.add(coreSecurityModule);
modules.add(new MessagingClientModule());
modules.add(new SystemAppModule());
modules.add(new MetricsClientRuntimeModule().getDistributedModules());
// If MasterEnvironment is not available, assuming it is the old hadoop stack with ZK, Kafka
MasterEnvironment masterEnv = MasterEnvironments.getMasterEnvironment();
if (masterEnv == null) {
modules.add(new ZKClientModule());
modules.add(new ZKDiscoveryModule());
modules.add(new KafkaClientModule());
modules.add(new KafkaLogAppenderModule());
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(DiscoveryService.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceSupplier()));
bind(DiscoveryServiceClient.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceClientSupplier()));
}
});
modules.add(new RemoteLogAppenderModule());
if (coreSecurityModule.requiresZKClient()) {
modules.add(new ZKClientModule());
}
}
return Guice.createInjector(modules);
}
use of io.cdap.cdap.logging.guice.KafkaLogAppenderModule in project cdap by cdapio.
the class DistributedProgramContainerModule method addOnPremiseModules.
private void addOnPremiseModules(List<Module> modules) {
CoreSecurityModule coreSecurityModule = CoreSecurityRuntimeModule.getDistributedModule(cConf);
modules.add(new AuthenticationContextModules().getMasterModule());
modules.add(coreSecurityModule);
// If MasterEnvironment is not available, assuming it is the old hadoop stack with ZK, Kafka
MasterEnvironment masterEnv = MasterEnvironments.getMasterEnvironment();
if (masterEnv == null) {
modules.add(new ZKClientModule());
modules.add(new ZKDiscoveryModule());
modules.add(new KafkaClientModule());
modules.add(new KafkaLogAppenderModule());
return;
}
if (coreSecurityModule.requiresZKClient()) {
modules.add(new ZKClientModule());
}
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(DiscoveryService.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceSupplier()));
bind(DiscoveryServiceClient.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceClientSupplier()));
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
}
});
modules.add(new RemoteLogAppenderModule());
}
use of io.cdap.cdap.logging.guice.KafkaLogAppenderModule in project cdap by caskdata.
the class SystemWorkerTwillRunnable method createInjector.
@VisibleForTesting
static Injector createInjector(CConfiguration cConf, Configuration hConf) {
List<Module> modules = new ArrayList<>();
CoreSecurityModule coreSecurityModule = CoreSecurityRuntimeModule.getDistributedModule(cConf);
modules.add(new ConfigModule(cConf, hConf));
modules.add(new LocalLocationModule());
modules.add(new IOModule());
modules.add(new AuthenticationContextModules().getMasterWorkerModule());
modules.add(coreSecurityModule);
modules.add(new MessagingClientModule());
modules.add(new SystemAppModule());
modules.add(new MetricsClientRuntimeModule().getDistributedModules());
// If MasterEnvironment is not available, assuming it is the old hadoop stack with ZK, Kafka
MasterEnvironment masterEnv = MasterEnvironments.getMasterEnvironment();
if (masterEnv == null) {
modules.add(new ZKClientModule());
modules.add(new ZKDiscoveryModule());
modules.add(new KafkaClientModule());
modules.add(new KafkaLogAppenderModule());
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(DiscoveryService.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceSupplier()));
bind(DiscoveryServiceClient.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceClientSupplier()));
}
});
modules.add(new RemoteLogAppenderModule());
if (coreSecurityModule.requiresZKClient()) {
modules.add(new ZKClientModule());
}
}
return Guice.createInjector(modules);
}
use of io.cdap.cdap.logging.guice.KafkaLogAppenderModule in project cdap by caskdata.
the class ArtifactLocalizerTwillRunnable method createInjector.
@VisibleForTesting
static Injector createInjector(CConfiguration cConf, Configuration hConf) {
List<Module> modules = new ArrayList<>();
CoreSecurityModule coreSecurityModule = CoreSecurityRuntimeModule.getDistributedModule(cConf);
modules.add(new ConfigModule(cConf, hConf));
modules.add(new IOModule());
modules.add(RemoteAuthenticatorModules.getDefaultModule());
modules.add(new AuthenticationContextModules().getMasterModule());
modules.add(coreSecurityModule);
// If MasterEnvironment is not available, assuming it is the old hadoop stack with ZK, Kafka
MasterEnvironment masterEnv = MasterEnvironments.getMasterEnvironment();
if (masterEnv == null) {
modules.add(new ZKClientModule());
modules.add(new ZKDiscoveryModule());
modules.add(new KafkaClientModule());
modules.add(new KafkaLogAppenderModule());
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(DiscoveryService.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceSupplier()));
bind(DiscoveryServiceClient.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceClientSupplier()));
}
});
modules.add(new RemoteLogAppenderModule());
modules.add(new LocalLocationModule());
if (coreSecurityModule.requiresZKClient()) {
modules.add(new ZKClientModule());
}
}
modules.add(new DistributedArtifactManagerModule());
return Guice.createInjector(modules);
}
Aggregations