use of co.cask.cdap.common.guice.IOModule in project cdap by caskdata.
the class JobQueueDebugger method createInjector.
private static Injector createInjector() throws Exception {
CConfiguration cConf = CConfiguration.create();
if (cConf.getBoolean(Constants.Security.Authorization.ENABLED)) {
System.out.println(String.format("Disabling authorization for %s.", JobQueueDebugger.class.getSimpleName()));
cConf.setBoolean(Constants.Security.Authorization.ENABLED, false);
}
// Note: login has to happen before any objects that need Kerberos credentials are instantiated.
SecurityUtil.loginForMasterService(cConf);
return Guice.createInjector(new ConfigModule(cConf, HBaseConfiguration.create()), new IOModule(), new ZKClientModule(), new LocationRuntimeModule().getDistributedModules(), new DiscoveryRuntimeModule().getDistributedModules(), new ViewAdminModules().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new NotificationFeedClientModule(), new TwillModule(), new ExploreClientModule(), new DataFabricModules().getDistributedModules(), new ServiceStoreModules().getDistributedModules(), new DataSetsModules().getDistributedModules(), new AppFabricServiceRuntimeModule().getDistributedModules(), new ProgramRunnerRuntimeModule().getDistributedModules(), new SystemDatasetRuntimeModule().getDistributedModules(), new NotificationServiceRuntimeModule().getDistributedModules(), new MetricsClientRuntimeModule().getDistributedModules(), new MetricsStoreModule(), new KafkaClientModule(), new NamespaceStoreModule().getDistributedModules(), new AuthorizationModule(), new AuthorizationEnforcementModule().getMasterModule(), new SecureStoreModules().getDistributedModules(), new MessagingClientModule(), new AbstractModule() {
@Override
protected void configure() {
bind(HBaseTableUtil.class).toProvider(HBaseTableUtilFactory.class);
bind(Store.class).annotatedWith(Names.named("defaultStore")).to(DefaultStore.class).in(Singleton.class);
// This is needed because the LocalApplicationManager
// expects a dsframework injection named datasetMDS
bind(DatasetFramework.class).annotatedWith(Names.named("datasetMDS")).to(DatasetFramework.class).in(Singleton.class);
}
});
}
use of co.cask.cdap.common.guice.IOModule in project cdap by caskdata.
the class TestFileBasedTokenManager method testFileBasedKey.
/**
* Test that two token managers can share a key that is written to a file.
* @throws Exception
*/
@Test
public void testFileBasedKey() throws Exception {
// Create two token managers that points to the same path
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().getAbsolutePath());
TokenManager tokenManager = Guice.createInjector(new IOModule(), new ConfigModule(cConf), new FileBasedSecurityModule(), new DiscoveryRuntimeModule().getInMemoryModules()).getInstance(TokenManager.class);
tokenManager.startAndWait();
TokenManager tokenManager2 = Guice.createInjector(new IOModule(), new ConfigModule(cConf), new FileBasedSecurityModule(), new DiscoveryRuntimeModule().getInMemoryModules()).getInstance(TokenManager.class);
tokenManager2.startAndWait();
Assert.assertNotSame("ERROR: Both token managers refer to the same object.", tokenManager, tokenManager2);
String user = "testuser";
long now = System.currentTimeMillis();
List<String> groups = Lists.newArrayList("users", "admins");
AccessTokenIdentifier identifier = new AccessTokenIdentifier(user, groups, now, now + TOKEN_DURATION);
AccessToken token = tokenManager.signIdentifier(identifier);
// Since both tokenManagers have the same key, they must both be able to validate the secret.
tokenManager.validateSecret(token);
tokenManager2.validateSecret(token);
}
use of co.cask.cdap.common.guice.IOModule in project cdap by caskdata.
the class LocalLogAppenderResilientTest method testResilientLogging.
@Test
public void testResilientLogging() throws Exception {
Configuration hConf = new Configuration();
CConfiguration cConf = CConfiguration.create();
File datasetDir = new File(tmpFolder.newFolder(), "datasetUser");
//noinspection ResultOfMethodCallIgnored
datasetDir.mkdirs();
cConf.set(Constants.Dataset.Manager.OUTPUT_DIR, datasetDir.getAbsolutePath());
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, "localhost");
cConf.set(Constants.Dataset.Executor.ADDRESS, "localhost");
cConf.setInt(Constants.Dataset.Executor.PORT, Networks.getRandomPort());
cConf.set(Constants.CFG_LOCAL_DATA_DIR, tmpFolder.newFolder().getAbsolutePath());
Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), new IOModule(), new ZKClientModule(), new KafkaClientModule(), new DiscoveryRuntimeModule().getInMemoryModules(), new NonCustomLocationUnitTestModule().getModule(), new DataFabricModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new TransactionMetricsModule(), new ExploreClientModule(), new LoggingModules().getInMemoryModules(), new NamespaceClientRuntimeModule().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(NoOpOwnerAdmin.class);
}
});
TransactionManager txManager = injector.getInstance(TransactionManager.class);
txManager.startAndWait();
DatasetOpExecutorService opExecutorService = injector.getInstance(DatasetOpExecutorService.class);
opExecutorService.startAndWait();
// Start the logging before starting the service.
LoggingContextAccessor.setLoggingContext(new FlowletLoggingContext("TRL_ACCT_1", "APP_1", "FLOW_1", "FLOWLET_1", "RUN", "INSTANCE"));
String logBaseDir = "trl-log/log_files_" + new Random(System.currentTimeMillis()).nextLong();
cConf.set(LoggingConfiguration.LOG_BASE_DIR, logBaseDir);
cConf.setInt(LoggingConfiguration.LOG_MAX_FILE_SIZE_BYTES, 20 * 1024);
final LogAppender appender = injector.getInstance(LocalLogAppender.class);
new LogAppenderInitializer(appender).initialize("TestResilientLogging");
int failureMsgCount = 3;
final CountDownLatch failureLatch = new CountDownLatch(failureMsgCount);
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
loggerContext.getStatusManager().add(new StatusListener() {
@Override
public void addStatusEvent(Status status) {
if (status.getLevel() != Status.ERROR || status.getOrigin() != appender) {
return;
}
Throwable cause = status.getThrowable();
if (cause != null) {
Throwable rootCause = Throwables.getRootCause(cause);
if (rootCause instanceof ServiceUnavailableException) {
String serviceName = ((ServiceUnavailableException) rootCause).getServiceName();
if (Constants.Service.DATASET_MANAGER.equals(serviceName)) {
failureLatch.countDown();
}
}
}
}
});
Logger logger = LoggerFactory.getLogger("TestResilientLogging");
for (int i = 0; i < failureMsgCount; ++i) {
Exception e1 = new Exception("Test Exception1");
Exception e2 = new Exception("Test Exception2", e1);
logger.warn("Test log message " + i + " {} {}", "arg1", "arg2", e2);
}
// Wait for the three failure to append to happen
// The wait time has to be > 3 seconds because DatasetServiceClient has 1 second timeout on discovery
failureLatch.await(5, TimeUnit.SECONDS);
// Start dataset service, wait for it to be discoverable
DatasetService dsService = injector.getInstance(DatasetService.class);
dsService.startAndWait();
final CountDownLatch startLatch = new CountDownLatch(1);
DiscoveryServiceClient discoveryClient = injector.getInstance(DiscoveryServiceClient.class);
discoveryClient.discover(Constants.Service.DATASET_MANAGER).watchChanges(new ServiceDiscovered.ChangeListener() {
@Override
public void onChange(ServiceDiscovered serviceDiscovered) {
if (!Iterables.isEmpty(serviceDiscovered)) {
startLatch.countDown();
}
}
}, Threads.SAME_THREAD_EXECUTOR);
startLatch.await(5, TimeUnit.SECONDS);
// Do some more logging after the service is started.
for (int i = 5; i < 10; ++i) {
Exception e1 = new Exception("Test Exception1");
Exception e2 = new Exception("Test Exception2", e1);
logger.warn("Test log message " + i + " {} {}", "arg1", "arg2", e2);
}
appender.stop();
// Verify - we should have at least 5 events.
LoggingContext loggingContext = new FlowletLoggingContext("TRL_ACCT_1", "APP_1", "FLOW_1", "", "RUN", "INSTANCE");
FileLogReader logTail = injector.getInstance(FileLogReader.class);
LoggingTester.LogCallback logCallback1 = new LoggingTester.LogCallback();
logTail.getLogPrev(loggingContext, ReadRange.LATEST, 10, Filter.EMPTY_FILTER, logCallback1);
List<LogEvent> allEvents = logCallback1.getEvents();
Assert.assertTrue(allEvents.toString(), allEvents.size() >= 5);
// Finally - stop all services
Services.chainStop(dsService, opExecutorService, txManager);
}
use of co.cask.cdap.common.guice.IOModule in project cdap by caskdata.
the class StandaloneMain method createPersistentModules.
private static List<Module> createPersistentModules(CConfiguration cConf, Configuration hConf) {
cConf.setIfUnset(Constants.CFG_DATA_LEVELDB_DIR, Constants.DEFAULT_DATA_LEVELDB_DIR);
cConf.set(Constants.CFG_DATA_INMEMORY_PERSISTENCE, Constants.InMemoryPersistenceType.LEVELDB.name());
// configure all services except for router and auth to bind to 127.0.0.1
String localhost = InetAddress.getLoopbackAddress().getHostAddress();
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, localhost);
cConf.set(Constants.Transaction.Container.ADDRESS, localhost);
cConf.set(Constants.Dataset.Executor.ADDRESS, localhost);
cConf.set(Constants.Stream.ADDRESS, localhost);
cConf.set(Constants.Metrics.ADDRESS, localhost);
cConf.set(Constants.Metrics.SERVER_ADDRESS, localhost);
cConf.set(Constants.MetricsProcessor.ADDRESS, localhost);
cConf.set(Constants.LogSaver.ADDRESS, localhost);
cConf.set(Constants.Explore.SERVER_ADDRESS, localhost);
cConf.set(Constants.Metadata.SERVICE_BIND_ADDRESS, localhost);
cConf.set(Constants.Preview.ADDRESS, localhost);
return ImmutableList.of(new ConfigModule(cConf, hConf), new IOModule(), new ZKClientModule(), new KafkaClientModule(), new MetricsHandlerModule(), new DiscoveryRuntimeModule().getStandaloneModules(), new LocationRuntimeModule().getStandaloneModules(), new ProgramRunnerRuntimeModule().getStandaloneModules(), new DataFabricModules(StandaloneMain.class.getName()).getStandaloneModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getStandaloneModules(), new MetricsClientRuntimeModule().getStandaloneModules(), new LoggingModules().getStandaloneModules(), new LogReaderRuntimeModules().getStandaloneModules(), new RouterModules().getStandaloneModules(), new SecurityModules().getStandaloneModules(), new SecureStoreModules().getStandaloneModules(), new StreamServiceRuntimeModule().getStandaloneModules(), new ExploreRuntimeModule().getStandaloneModules(), new ServiceStoreModules().getStandaloneModules(), new ExploreClientModule(), new NotificationFeedServiceRuntimeModule().getStandaloneModules(), new NotificationServiceRuntimeModule().getStandaloneModules(), new ViewAdminModules().getStandaloneModules(), new StreamAdminModules().getStandaloneModules(), new NamespaceStoreModule().getStandaloneModules(), new MetadataServiceModule(), new RemoteSystemOperationsServiceModule(), new AuditModule().getStandaloneModules(), new AuthorizationModule(), new AuthorizationEnforcementModule().getStandaloneModules(), new PreviewHttpModule(), new MessagingServerRuntimeModule().getStandaloneModules(), new AppFabricServiceRuntimeModule().getStandaloneModules(), new OperationalStatsModule());
}
use of co.cask.cdap.common.guice.IOModule in project cdap by caskdata.
the class DefaultPreviewManager method createPreviewInjector.
/**
* Create injector for the given application id.
*/
@VisibleForTesting
Injector createPreviewInjector(ApplicationId applicationId) throws IOException {
CConfiguration previewCConf = CConfiguration.copy(cConf);
java.nio.file.Path previewDirPath = Paths.get(cConf.get(Constants.CFG_LOCAL_DATA_DIR), "preview").toAbsolutePath();
Files.createDirectories(previewDirPath);
java.nio.file.Path previewDir = Files.createDirectories(Paths.get(previewDirPath.toAbsolutePath().toString(), applicationId.getApplication()));
previewCConf.set(Constants.CFG_LOCAL_DATA_DIR, previewDir.toString());
Configuration previewHConf = new Configuration(hConf);
previewHConf.set(Constants.CFG_LOCAL_DATA_DIR, previewDir.toString());
previewCConf.setIfUnset(Constants.CFG_DATA_LEVELDB_DIR, previewDir.toString());
previewCConf.setBoolean(Constants.Explore.EXPLORE_ENABLED, false);
return Guice.createInjector(new ConfigModule(previewCConf, previewHConf), new IOModule(), new AuthenticationContextModules().getMasterModule(), new SecurityModules().getStandaloneModules(), new PreviewSecureStoreModule(secureStore), new PreviewStreamAdminModule(streamAdmin), new PreviewDiscoveryRuntimeModule(discoveryService), new LocationRuntimeModule().getStandaloneModules(), new ConfigStoreModule().getStandaloneModule(), new PreviewRunnerModule(artifactRepository, artifactStore, authorizerInstantiator, authorizationEnforcer, privilegesManager, streamCoordinatorClient, preferencesStore), new ProgramRunnerRuntimeModule().getStandaloneModules(), new PreviewDataModules().getDataFabricModule(transactionManager), new PreviewDataModules().getDataSetsModule(datasetFramework), new DataSetServiceModules().getStandaloneModules(), new MetricsClientRuntimeModule().getStandaloneModules(), new LoggingModules().getStandaloneModules(), new NamespaceStoreModule().getStandaloneModules(), new MessagingServerRuntimeModule().getInMemoryModules(), new AbstractModule() {
@Override
protected void configure() {
// Bind system datasets defined in App-fabric.
// Have to do it here as public binding, instead of inside PreviewRunnerModule due to Guice 3
// doesn't support exporting multi-binder from private module
MapBinder<String, DatasetModule> datasetModuleBinder = MapBinder.newMapBinder(binder(), String.class, DatasetModule.class, Constants.Dataset.Manager.DefaultDatasetModules.class);
datasetModuleBinder.addBinding("app-fabric").toInstance(new AppFabricDatasetModule());
}
@Provides
@Named(Constants.Service.MASTER_SERVICES_BIND_ADDRESS)
@SuppressWarnings("unused")
public InetAddress providesHostname(CConfiguration cConf) {
String address = cConf.get(Constants.Preview.ADDRESS);
return Networks.resolve(address, new InetSocketAddress("localhost", 0).getAddress());
}
});
}
Aggregations