use of io.cdap.cdap.common.guice.ZKClientModule in project cdap by cdapio.
the class StandaloneMain method createPersistentModules.
private static List<Module> createPersistentModules(CConfiguration cConf, Configuration hConf) {
cConf.setInt(Constants.Master.MAX_INSTANCES, 1);
cConf.setIfUnset(Constants.CFG_DATA_LEVELDB_DIR, Constants.DEFAULT_DATA_LEVELDB_DIR);
cConf.set(Constants.CFG_DATA_INMEMORY_PERSISTENCE, Constants.InMemoryPersistenceType.LEVELDB.name());
// configure all services except for router and auth to bind to 127.0.0.1
String localhost = InetAddress.getLoopbackAddress().getHostAddress();
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, localhost);
cConf.set(Constants.MessagingSystem.HTTP_SERVER_BIND_ADDRESS, localhost);
cConf.set(Constants.Transaction.Container.ADDRESS, localhost);
cConf.set(Constants.Dataset.Executor.ADDRESS, localhost);
cConf.set(Constants.Metrics.ADDRESS, localhost);
cConf.set(Constants.MetricsProcessor.BIND_ADDRESS, localhost);
cConf.set(Constants.LogSaver.ADDRESS, localhost);
cConf.set(Constants.LogQuery.ADDRESS, localhost);
cConf.set(Constants.Explore.SERVER_ADDRESS, localhost);
cConf.set(Constants.Metadata.SERVICE_BIND_ADDRESS, localhost);
cConf.set(Constants.Preview.ADDRESS, localhost);
cConf.set(Constants.SupportBundle.SERVICE_BIND_ADDRESS, localhost);
return ImmutableList.of(new ConfigModule(cConf, hConf), RemoteAuthenticatorModules.getDefaultModule(), new IOModule(), new ZKClientModule(), new KafkaClientModule(), new MetricsHandlerModule(), new LogQueryRuntimeModule().getStandaloneModules(), new InMemoryDiscoveryModule(), new LocalLocationModule(), new ProgramRunnerRuntimeModule().getStandaloneModules(), new DataFabricModules(StandaloneMain.class.getName()).getStandaloneModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getStandaloneModules(), new MetricsClientRuntimeModule().getStandaloneModules(), new LocalLogAppenderModule(), new LogReaderRuntimeModules().getStandaloneModules(), new RouterModules().getStandaloneModules(), new CoreSecurityRuntimeModule().getStandaloneModules(), new ExternalAuthenticationModule(), new SecureStoreServerModule(), new ExploreRuntimeModule().getStandaloneModules(), new ExploreClientModule(), new MetadataServiceModule(), new MetadataReaderWriterModules().getStandaloneModules(), new AuditModule(), new AuthenticationContextModules().getMasterModule(), new AuthorizationModule(), new AuthorizationEnforcementModule().getStandaloneModules(), new PreviewConfigModule(cConf, new Configuration(), SConfiguration.create()), new PreviewManagerModule(false), new PreviewRunnerManagerModule().getStandaloneModules(), new MessagingServerRuntimeModule().getStandaloneModules(), new AppFabricServiceRuntimeModule(cConf).getStandaloneModules(), new MonitorHandlerModule(false), new RuntimeServerModule(), new OperationalStatsModule(), new MetricsWriterModule(), new SupportBundleServiceModule(), new AbstractModule() {
@Override
protected void configure() {
// Needed by MonitorHandlerModuler
bind(TwillRunner.class).to(NoopTwillRunnerService.class);
}
});
}
use of io.cdap.cdap.common.guice.ZKClientModule in project cdap by cdapio.
the class LocalLogAppenderResilientTest method testResilientLogging.
@Test
public void testResilientLogging() throws Exception {
Configuration hConf = new Configuration();
CConfiguration cConf = CConfiguration.create();
File datasetDir = new File(tmpFolder.newFolder(), "datasetUser");
// noinspection ResultOfMethodCallIgnored
datasetDir.mkdirs();
cConf.set(Constants.Dataset.Manager.OUTPUT_DIR, datasetDir.getAbsolutePath());
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, "localhost");
cConf.set(Constants.Dataset.Executor.ADDRESS, "localhost");
cConf.setInt(Constants.Dataset.Executor.PORT, Networks.getRandomPort());
cConf.set(Constants.CFG_LOCAL_DATA_DIR, tmpFolder.newFolder().getAbsolutePath());
Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), RemoteAuthenticatorModules.getNoOpModule(), new IOModule(), new ZKClientModule(), new KafkaClientModule(), new InMemoryDiscoveryModule(), new NonCustomLocationUnitTestModule(), new DataFabricModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new TransactionMetricsModule(), new ExploreClientModule(), new LocalLogAppenderModule(), new NamespaceAdminTestModule(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(NoOpOwnerAdmin.class);
bind(MetadataServiceClient.class).to(NoOpMetadataServiceClient.class);
}
});
TransactionManager txManager = injector.getInstance(TransactionManager.class);
txManager.startAndWait();
StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
DatasetOpExecutorService opExecutorService = injector.getInstance(DatasetOpExecutorService.class);
opExecutorService.startAndWait();
// Start the logging before starting the service.
LoggingContextAccessor.setLoggingContext(new WorkerLoggingContext("TRL_ACCT_1", "APP_1", "WORKER_1", "RUN", "INSTANCE"));
String logBaseDir = "trl-log/log_files_" + new Random(System.currentTimeMillis()).nextLong();
cConf.set(LoggingConfiguration.LOG_BASE_DIR, logBaseDir);
cConf.setInt(LoggingConfiguration.LOG_MAX_FILE_SIZE_BYTES, 20 * 1024);
final LogAppender appender = injector.getInstance(LocalLogAppender.class);
new LogAppenderInitializer(appender).initialize("TestResilientLogging");
int failureMsgCount = 3;
final CountDownLatch failureLatch = new CountDownLatch(failureMsgCount);
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
loggerContext.getStatusManager().add(new StatusListener() {
@Override
public void addStatusEvent(Status status) {
if (status.getLevel() != Status.ERROR || status.getOrigin() != appender) {
return;
}
Throwable cause = status.getThrowable();
if (cause != null) {
Throwable rootCause = Throwables.getRootCause(cause);
if (rootCause instanceof ServiceUnavailableException) {
String serviceName = ((ServiceUnavailableException) rootCause).getServiceName();
if (Constants.Service.DATASET_MANAGER.equals(serviceName)) {
failureLatch.countDown();
}
}
}
}
});
Logger logger = LoggerFactory.getLogger("TestResilientLogging");
for (int i = 0; i < failureMsgCount; ++i) {
Exception e1 = new Exception("Test Exception1");
Exception e2 = new Exception("Test Exception2", e1);
logger.warn("Test log message " + i + " {} {}", "arg1", "arg2", e2);
}
// Wait for the three failure to append to happen
// The wait time has to be > 3 seconds because DatasetServiceClient has 1 second timeout on discovery
failureLatch.await(5, TimeUnit.SECONDS);
// Start dataset service, wait for it to be discoverable
DatasetService dsService = injector.getInstance(DatasetService.class);
dsService.startAndWait();
final CountDownLatch startLatch = new CountDownLatch(1);
DiscoveryServiceClient discoveryClient = injector.getInstance(DiscoveryServiceClient.class);
discoveryClient.discover(Constants.Service.DATASET_MANAGER).watchChanges(new ServiceDiscovered.ChangeListener() {
@Override
public void onChange(ServiceDiscovered serviceDiscovered) {
if (!Iterables.isEmpty(serviceDiscovered)) {
startLatch.countDown();
}
}
}, Threads.SAME_THREAD_EXECUTOR);
startLatch.await(5, TimeUnit.SECONDS);
// Do some more logging after the service is started.
for (int i = 5; i < 10; ++i) {
Exception e1 = new Exception("Test Exception1");
Exception e2 = new Exception("Test Exception2", e1);
logger.warn("Test log message " + i + " {} {}", "arg1", "arg2", e2);
}
appender.stop();
// Verify - we should have at least 5 events.
LoggingContext loggingContext = new WorkerLoggingContext("TRL_ACCT_1", "APP_1", "WORKER_1", "RUN", "INSTANCE");
FileLogReader logTail = injector.getInstance(FileLogReader.class);
LoggingTester.LogCallback logCallback1 = new LoggingTester.LogCallback();
logTail.getLogPrev(loggingContext, ReadRange.LATEST, 10, Filter.EMPTY_FILTER, logCallback1);
List<LogEvent> allEvents = logCallback1.getEvents();
Assert.assertTrue(allEvents.toString(), allEvents.size() >= 5);
// Finally - stop all services
Services.chainStop(dsService, opExecutorService, txManager);
}
use of io.cdap.cdap.common.guice.ZKClientModule in project cdap by cdapio.
the class LeaderElectionMessagingServiceTest method createInjector.
private Injector createInjector(int instanceId) {
CConfiguration cConf = CConfiguration.copy(LeaderElectionMessagingServiceTest.cConf);
cConf.setInt(Constants.MessagingSystem.CONTAINER_INSTANCE_ID, instanceId);
return Guice.createInjector(new ConfigModule(cConf), new ZKClientModule(), new ZKDiscoveryModule(), new AuthorizationEnforcementModule().getNoOpModules(), new DFSLocationModule(), new AbstractModule() {
@Override
protected void configure() {
// Bindings to services for testing only
bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class);
// Use the same in memory client across all injectors.
bind(NamespaceQueryAdmin.class).toInstance(namespaceQueryAdmin);
}
}, new PrivateModule() {
@Override
protected void configure() {
// This is very similar to bindings in distributed mode, except we bind to level db instead of HBase
// Also the level DB has to be one instance since unit-test runs in the same process.
bind(TableFactory.class).annotatedWith(Names.named(CachingTableFactory.DELEGATE_TABLE_FACTORY)).toInstance(levelDBTableFactory);
// The cache must be in singleton scope
bind(MessageTableCacheProvider.class).to(DefaultMessageTableCacheProvider.class).in(Scopes.SINGLETON);
bind(TableFactory.class).to(CachingTableFactory.class);
// Bind http handlers
MessagingServerRuntimeModule.bindHandlers(binder(), Constants.MessagingSystem.HANDLER_BINDING_NAME);
bind(MessagingService.class).to(LeaderElectionMessagingService.class).in(Scopes.SINGLETON);
expose(MessagingService.class);
}
});
}
use of io.cdap.cdap.common.guice.ZKClientModule in project cdap by cdapio.
the class SystemWorkerTwillRunnable method createInjector.
@VisibleForTesting
static Injector createInjector(CConfiguration cConf, Configuration hConf, SConfiguration sConf) {
List<Module> modules = new ArrayList<>();
ExecutorService cleanupExecutorService = Executors.newFixedThreadPool(cConf.getInt(Constants.SystemWorker.CLEANUP_THREADS));
CoreSecurityModule coreSecurityModule = new FileBasedCoreSecurityModule() {
@Override
protected void bindKeyManager(Binder binder) {
super.bindKeyManager(binder);
expose(KeyManager.class);
}
};
modules.add(new RemoteTwillModule());
modules.add(new ConfigModule(cConf, hConf, sConf));
modules.add(RemoteAuthenticatorModules.getDefaultModule());
modules.add(new IOModule());
modules.add(new AuthenticationContextModules().getMasterModule());
modules.add(coreSecurityModule);
modules.add(new MetricsClientRuntimeModule().getDistributedModules());
modules.addAll(Arrays.asList(// In K8s, there won't be HBase and the cdap-site should be set to use SQL store for StructuredTable.
new DataSetServiceModules().getStandaloneModules(), new AbstractModule() {
@Override
protected void configure() {
bind(ExecutorService.class).annotatedWith(Names.named(Constants.SystemWorker.CLEANUP_EXECUTOR_SERVICE_BINDING)).toInstance(cleanupExecutorService);
}
}, // The Dataset set modules are only needed to satisfy dependency injection
new DataSetsModules().getStandaloneModules(), new MessagingClientModule(), new ExploreClientModule(), new AuthorizationModule(), new AuthorizationEnforcementModule().getDistributedModules(), Modules.override(new AppFabricServiceRuntimeModule(cConf).getDistributedModules()).with(new AbstractModule() {
@Override
protected void configure() {
bind(StorageProviderNamespaceAdmin.class).to(LocalStorageProviderNamespaceAdmin.class);
}
}, new DistributedArtifactManagerModule()), Modules.override(new ProgramRunnerRuntimeModule().getDistributedModules(true)).with(new AbstractModule() {
@Override
protected void configure() {
bind(RemoteExecutionTwillRunnerService.class).to(FireAndForgetTwillRunnerService.class).in(Scopes.SINGLETON);
}
}), new SecureStoreClientModule(), new AbstractModule() {
@Override
protected void configure() {
install(new StorageModule());
install(new TransactionExecutorModule());
bind(TransactionSystemClientService.class).to(DelegatingTransactionSystemClientService.class);
bind(TransactionSystemClient.class).to(ConstantTransactionSystemClient.class);
}
}, new DFSLocationModule(), new AbstractModule() {
@Override
protected void configure() {
bind(MetadataPublisher.class).to(MessagingMetadataPublisher.class);
bind(MetadataServiceClient.class).to(DefaultMetadataServiceClient.class);
}
}));
// If MasterEnvironment is not available, assuming it is the old hadoop stack with ZK, Kafka
MasterEnvironment masterEnv = MasterEnvironments.getMasterEnvironment();
if (masterEnv == null) {
modules.add(new ZKClientModule());
modules.add(new ZKDiscoveryModule());
modules.add(new KafkaClientModule());
modules.add(new KafkaLogAppenderModule());
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(DiscoveryService.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceSupplier()));
bind(DiscoveryServiceClient.class).toProvider(new SupplierProviderBridge<>(masterEnv.getDiscoveryServiceClientSupplier()));
}
});
modules.add(new RemoteLogAppenderModule());
if (coreSecurityModule.requiresZKClient()) {
modules.add(new ZKClientModule());
}
}
return Guice.createInjector(modules);
}
use of io.cdap.cdap.common.guice.ZKClientModule in project cdap by cdapio.
the class HBaseMetricsTableTest method setup.
@BeforeClass
public static void setup() throws Exception {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_HDFS_USER, System.getProperty("user.name"));
Injector injector = Guice.createInjector(new DataFabricModules().getDistributedModules(), new ConfigModule(cConf, TEST_HBASE.getConfiguration()), new ZKClientModule(), new ZKDiscoveryModule(), new TransactionMetricsModule(), new DFSLocationModule(), new NamespaceAdminTestModule(), new SystemDatasetRuntimeModule().getDistributedModules(), new DataSetsModules().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getNoOpModule(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
}
});
dsFramework = injector.getInstance(DatasetFramework.class);
tableUtil = injector.getInstance(HBaseTableUtil.class);
ddlExecutor = new HBaseDDLExecutorFactory(cConf, TEST_HBASE.getHBaseAdmin().getConfiguration()).get();
ddlExecutor.createNamespaceIfNotExists(tableUtil.getHBaseNamespace(NamespaceId.SYSTEM));
}
Aggregations