use of io.cdap.cdap.api.security.store.SecureStoreManager in project cdap by caskdata.
the class MapReduceTaskContextProvider method createCacheLoader.
/**
* Creates a {@link CacheLoader} for the task context cache.
*/
private CacheLoader<ContextCacheKey, BasicMapReduceTaskContext> createCacheLoader(final Injector injector) {
DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
SecureStore secureStore = injector.getInstance(SecureStore.class);
SecureStoreManager secureStoreManager = injector.getInstance(SecureStoreManager.class);
MessagingService messagingService = injector.getInstance(MessagingService.class);
// Multiple instances of BasicMapReduceTaskContext can share the same program.
AtomicReference<Program> programRef = new AtomicReference<>();
MetadataReader metadataReader = injector.getInstance(MetadataReader.class);
MetadataPublisher metadataPublisher = injector.getInstance(MetadataPublisher.class);
FieldLineageWriter fieldLineageWriter = injector.getInstance(FieldLineageWriter.class);
RemoteClientFactory remoteClientFactory = injector.getInstance(RemoteClientFactory.class);
return new CacheLoader<ContextCacheKey, BasicMapReduceTaskContext>() {
@Override
public BasicMapReduceTaskContext load(ContextCacheKey key) throws Exception {
TaskAttemptID taskAttemptId = key.getTaskAttemptID();
// taskAttemptId could be null if used from a org.apache.hadoop.mapreduce.Partitioner or
// from a org.apache.hadoop.io.RawComparator, in which case we can get the JobId from the conf. Note that the
// JobId isn't in the conf for the OutputCommitter#setupJob method, in which case we use the taskAttemptId
Path txFile = MainOutputCommitter.getTxFile(key.getConfiguration(), taskAttemptId != null ? taskAttemptId.getJobID() : null);
FileSystem fs = txFile.getFileSystem(key.getConfiguration());
Transaction transaction = null;
if (fs.exists(txFile)) {
try (FSDataInputStream txFileInputStream = fs.open(txFile)) {
transaction = new TransactionCodec().decode(ByteStreams.toByteArray(txFileInputStream));
}
}
MapReduceContextConfig contextConfig = new MapReduceContextConfig(key.getConfiguration());
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration());
Program program = programRef.get();
if (program == null) {
// Creation of program is relatively cheap, so just create and do compare and set.
programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader()));
program = programRef.get();
}
WorkflowProgramInfo workflowInfo = contextConfig.getWorkflowProgramInfo();
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, program.getApplicationSpecification());
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramRunId programRunId = program.getId().run(ProgramRunners.getRunId(contextConfig.getProgramOptions()));
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programRunId));
}
MapReduceSpecification spec = program.getApplicationSpecification().getMapReduce().get(program.getName());
MetricsCollectionService metricsCollectionService = null;
MapReduceMetrics.TaskType taskType = null;
String taskId = null;
ProgramOptions options = contextConfig.getProgramOptions();
// from a org.apache.hadoop.io.RawComparator
if (taskAttemptId != null) {
taskId = taskAttemptId.getTaskID().toString();
if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) {
taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
// if this is not for a mapper or a reducer, we don't need the metrics collection service
metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
options = new SimpleProgramOptions(options.getProgramId(), options.getArguments(), new BasicArguments(RuntimeArguments.extractScope("task", taskType.toString().toLowerCase(), contextConfig.getProgramOptions().getUserArguments().asMap())), options.isDebug());
}
}
CConfiguration cConf = injector.getInstance(CConfiguration.class);
TransactionSystemClient txClient = injector.getInstance(TransactionSystemClient.class);
NamespaceQueryAdmin namespaceQueryAdmin = injector.getInstance(NamespaceQueryAdmin.class);
return new BasicMapReduceTaskContext(program, options, cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, transaction, programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, accessEnforcer, authenticationContext, messagingService, mapReduceClassLoader, metadataReader, metadataPublisher, namespaceQueryAdmin, fieldLineageWriter, remoteClientFactory);
}
};
}
use of io.cdap.cdap.api.security.store.SecureStoreManager in project cdap by caskdata.
the class TestBase method initialize.
@BeforeClass
public static void initialize() throws Exception {
if (nestedStartCount++ > 0) {
return;
}
File localDataDir = TMP_FOLDER.newFolder();
cConf = createCConf(localDataDir);
CConfiguration previewCConf = createPreviewConf(cConf);
LevelDBTableService previewLevelDBTableService = new LevelDBTableService();
previewLevelDBTableService.setConfiguration(previewCConf);
// enable default services
File capabilityFolder = new File(localDataDir.toString(), "capability");
capabilityFolder.mkdir();
cConf.set(Constants.Capability.CONFIG_DIR, capabilityFolder.getAbsolutePath());
cConf.setInt(Constants.Capability.AUTO_INSTALL_THREADS, 5);
org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration();
hConf.addResource("mapred-site-local.xml");
hConf.reloadConfiguration();
hConf.set(Constants.CFG_LOCAL_DATA_DIR, localDataDir.getAbsolutePath());
hConf.set(Constants.AppFabric.OUTPUT_DIR, cConf.get(Constants.AppFabric.OUTPUT_DIR));
hConf.set("hadoop.tmp.dir", new File(localDataDir, cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsolutePath());
// Windows specific requirements
if (OSDetector.isWindows()) {
File tmpDir = TMP_FOLDER.newFolder();
File binDir = new File(tmpDir, "bin");
Assert.assertTrue(binDir.mkdirs());
copyTempFile("hadoop.dll", tmpDir);
copyTempFile("winutils.exe", binDir);
System.setProperty("hadoop.home.dir", tmpDir.getAbsolutePath());
System.load(new File(tmpDir, "hadoop.dll").getAbsolutePath());
}
injector = Guice.createInjector(createDataFabricModule(), new TransactionExecutorModule(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new ConfigModule(cConf, hConf), RemoteAuthenticatorModules.getNoOpModule(), new IOModule(), new LocalLocationModule(), new InMemoryDiscoveryModule(), new AppFabricServiceRuntimeModule(cConf).getInMemoryModules(), new MonitorHandlerModule(false), new AuthenticationContextModules().getMasterModule(), new AuthorizationModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new ProgramRunnerRuntimeModule().getInMemoryModules(), new SecureStoreServerModule(), new MetadataReaderWriterModules().getInMemoryModules(), new MetadataServiceModule(), new AbstractModule() {
@Override
protected void configure() {
bind(MetricsManager.class).toProvider(MetricsManagerProvider.class);
}
}, new MetricsClientRuntimeModule().getInMemoryModules(), new LocalLogAppenderModule(), new LogReaderRuntimeModules().getInMemoryModules(), new ExploreRuntimeModule().getInMemoryModules(), new ExploreClientModule(), new MessagingServerRuntimeModule().getInMemoryModules(), new PreviewConfigModule(cConf, new Configuration(), SConfiguration.create()), new PreviewManagerModule(false), new PreviewRunnerManagerModule().getInMemoryModules(), new SupportBundleServiceModule(), new MockProvisionerModule(), new AbstractModule() {
@Override
protected void configure() {
install(new FactoryModuleBuilder().implement(ApplicationManager.class, DefaultApplicationManager.class).build(ApplicationManagerFactory.class));
install(new FactoryModuleBuilder().implement(ArtifactManager.class, DefaultArtifactManager.class).build(ArtifactManagerFactory.class));
bind(TemporaryFolder.class).toInstance(TMP_FOLDER);
bind(AuthorizationHandler.class).in(Scopes.SINGLETON);
// Needed by MonitorHandlerModuler
bind(TwillRunner.class).to(NoopTwillRunnerService.class);
bind(MetadataSubscriberService.class).in(Scopes.SINGLETON);
}
});
messagingService = injector.getInstance(MessagingService.class);
if (messagingService instanceof Service) {
((Service) messagingService).startAndWait();
}
txService = injector.getInstance(TransactionManager.class);
txService.startAndWait();
metadataSubscriberService = injector.getInstance(MetadataSubscriberService.class);
metadataStorage = injector.getInstance(MetadataStorage.class);
metadataAdmin = injector.getInstance(MetadataAdmin.class);
metadataStorage.createIndex();
metadataService = injector.getInstance(MetadataService.class);
metadataService.startAndWait();
// Define all StructuredTable before starting any services that need StructuredTable
StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
dsOpService = injector.getInstance(DatasetOpExecutorService.class);
dsOpService.startAndWait();
datasetService = injector.getInstance(DatasetService.class);
datasetService.startAndWait();
metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
metricsCollectionService.startAndWait();
if (cConf.getBoolean(Constants.Explore.EXPLORE_ENABLED)) {
exploreExecutorService = injector.getInstance(ExploreExecutorService.class);
exploreExecutorService.startAndWait();
// wait for explore service to be discoverable
DiscoveryServiceClient discoveryService = injector.getInstance(DiscoveryServiceClient.class);
EndpointStrategy endpointStrategy = new RandomEndpointStrategy(() -> discoveryService.discover(Constants.Service.EXPLORE_HTTP_USER_SERVICE));
Preconditions.checkNotNull(endpointStrategy.pick(5, TimeUnit.SECONDS), "%s service is not up after 5 seconds", Constants.Service.EXPLORE_HTTP_USER_SERVICE);
exploreClient = injector.getInstance(ExploreClient.class);
}
programScheduler = injector.getInstance(Scheduler.class);
if (programScheduler instanceof Service) {
((Service) programScheduler).startAndWait();
}
testManager = injector.getInstance(UnitTestManager.class);
metricsManager = injector.getInstance(MetricsManager.class);
accessControllerInstantiator = injector.getInstance(AccessControllerInstantiator.class);
// This is needed so the logged-in user can successfully create the default namespace
if (cConf.getBoolean(Constants.Security.Authorization.ENABLED)) {
String user = System.getProperty("user.name");
SecurityRequestContext.setUserId(user);
InstanceId instance = new InstanceId(cConf.get(Constants.INSTANCE_NAME));
Principal principal = new Principal(user, Principal.PrincipalType.USER);
accessControllerInstantiator.get().grant(Authorizable.fromEntityId(instance), principal, EnumSet.allOf(StandardPermission.class));
accessControllerInstantiator.get().grant(Authorizable.fromEntityId(NamespaceId.DEFAULT), principal, EnumSet.allOf(StandardPermission.class));
}
namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
if (firstInit) {
// only create the default namespace on first test. if multiple tests are run in the same JVM,
// then any time after the first time, the default namespace already exists. That is because
// the namespaceAdmin.delete(Id.Namespace.DEFAULT) in finish() only clears the default namespace
// but does not remove it entirely
namespaceAdmin.create(NamespaceMeta.DEFAULT);
ProfileService profileService = injector.getInstance(ProfileService.class);
profileService.saveProfile(ProfileId.NATIVE, Profile.NATIVE);
}
secureStore = injector.getInstance(SecureStore.class);
secureStoreManager = injector.getInstance(SecureStoreManager.class);
messagingContext = new MultiThreadMessagingContext(messagingService);
firstInit = false;
previewHttpServer = injector.getInstance(PreviewHttpServer.class);
previewHttpServer.startAndWait();
fieldLineageAdmin = injector.getInstance(FieldLineageAdmin.class);
lineageAdmin = injector.getInstance(LineageAdmin.class);
metadataSubscriberService.startAndWait();
previewRunnerManager = injector.getInstance(PreviewRunnerManager.class);
if (previewRunnerManager instanceof Service) {
((Service) previewRunnerManager).startAndWait();
}
appFabricServer = injector.getInstance(AppFabricServer.class);
appFabricServer.startAndWait();
preferencesService = injector.getInstance(PreferencesService.class);
scheduler = injector.getInstance(Scheduler.class);
if (scheduler instanceof Service) {
((Service) scheduler).startAndWait();
}
if (scheduler instanceof CoreSchedulerService) {
((CoreSchedulerService) scheduler).waitUntilFunctional(10, TimeUnit.SECONDS);
}
supportBundleInternalService = injector.getInstance(SupportBundleInternalService.class);
supportBundleInternalService.startAndWait();
appFabricHealthCheckService = injector.getInstance(HealthCheckService.class);
appFabricHealthCheckService.helper(Constants.AppFabricHealthCheck.APP_FABRIC_HEALTH_CHECK_SERVICE, cConf, Constants.Service.MASTER_SERVICES_BIND_ADDRESS);
appFabricHealthCheckService.startAndWait();
}
use of io.cdap.cdap.api.security.store.SecureStoreManager in project cdap by caskdata.
the class DefaultSecureStoreServiceTest method setup.
@BeforeClass
public static void setup() throws Exception {
SConfiguration sConf = SConfiguration.create();
sConf.set(Constants.Security.Store.FILE_PASSWORD, "secret");
CConfiguration cConf = createCConf();
final Injector injector = AppFabricTestHelper.getInjector(cConf, sConf);
discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
appFabricServer = injector.getInstance(AppFabricServer.class);
appFabricServer.startAndWait();
waitForService(Constants.Service.DATASET_MANAGER);
secureStore = injector.getInstance(SecureStore.class);
secureStoreManager = injector.getInstance(SecureStoreManager.class);
accessController = injector.getInstance(AccessControllerInstantiator.class).get();
// Wait for the default namespace creation
String user = AuthorizationUtil.getEffectiveMasterUser(cConf);
accessController.grant(Authorizable.fromEntityId(NamespaceId.DEFAULT), new Principal(user, Principal.PrincipalType.USER), EnumSet.allOf(StandardPermission.class));
// Starting the Appfabric server will create the default namespace
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return injector.getInstance(NamespaceAdmin.class).exists(NamespaceId.DEFAULT);
}
}, 5, TimeUnit.SECONDS);
accessController.revoke(Authorizable.fromEntityId(NamespaceId.DEFAULT), new Principal(user, Principal.PrincipalType.USER), Collections.singleton(StandardPermission.UPDATE));
}
Aggregations