use of co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository in project cdap by caskdata.
the class AuthorizationBootstrapperTest method setup.
@BeforeClass
public static void setup() throws Exception {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TMP_FOLDER.newFolder().getAbsolutePath());
cConf.setBoolean(Constants.Security.ENABLED, true);
cConf.setBoolean(Constants.Security.KERBEROS_ENABLED, false);
cConf.setBoolean(Constants.Security.Authorization.ENABLED, true);
Location deploymentJar = AppJarHelper.createDeploymentJar(new LocalLocationFactory(TMP_FOLDER.newFolder()), InMemoryAuthorizer.class);
cConf.set(Constants.Security.Authorization.EXTENSION_JAR_PATH, deploymentJar.toURI().getPath());
// make Alice an admin user, so she can create namespaces
cConf.set(Constants.Security.Authorization.ADMIN_USERS, ADMIN_USER.getName());
instanceId = new InstanceId(cConf.get(Constants.INSTANCE_NAME));
// setup a system artifact
File systemArtifactsDir = TMP_FOLDER.newFolder();
cConf.set(Constants.AppFabric.SYSTEM_ARTIFACTS_DIR, systemArtifactsDir.getAbsolutePath());
createSystemArtifact(systemArtifactsDir);
Injector injector = Guice.createInjector(new AppFabricTestModule(cConf));
namespaceQueryAdmin = injector.getInstance(NamespaceQueryAdmin.class);
namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
defaultNamespaceEnsurer = new DefaultNamespaceEnsurer(namespaceAdmin);
discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
txManager = injector.getInstance(TransactionManager.class);
datasetService = injector.getInstance(DatasetService.class);
systemArtifactLoader = injector.getInstance(SystemArtifactLoader.class);
authorizationBootstrapper = injector.getInstance(AuthorizationBootstrapper.class);
artifactRepository = injector.getInstance(ArtifactRepository.class);
dsFramework = injector.getInstance(DatasetFramework.class);
authorizationEnforcer = injector.getInstance(AuthorizationEnforcer.class);
}
use of co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository in project cdap by caskdata.
the class StandaloneTester method addSystemArtifact.
/**
* Adds a system artifact to CDAP instance that is used for testing.
*/
public void addSystemArtifact(String name, ArtifactVersion version, File artifactFile, @Nullable Set<ArtifactRange> parentArtifacts) throws Exception {
ArtifactRepository artifactRepository = standaloneMain.getInjector().getInstance(DefaultArtifactRepository.class);
ArtifactId artifactId = NamespaceId.SYSTEM.artifact(name, version.getVersion());
artifactRepository.addArtifact(Id.Artifact.fromEntityId(artifactId), artifactFile, parentArtifacts, null);
}
use of co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository in project cdap by caskdata.
the class ArtifactHttpHandlerTest method setup.
@BeforeClass
public static void setup() throws IOException {
artifactRepository = getInjector().getInstance(ArtifactRepository.class);
systemArtifactsDir = getInjector().getInstance(CConfiguration.class).get(Constants.AppFabric.SYSTEM_ARTIFACTS_DIR);
DiscoveryServiceClient discoveryClient = getInjector().getInstance(DiscoveryServiceClient.class);
ServiceDiscovered metadataHttpDiscovered = discoveryClient.discover(Constants.Service.METADATA_SERVICE);
EndpointStrategy endpointStrategy = new RandomEndpointStrategy(metadataHttpDiscovered);
Discoverable discoverable = endpointStrategy.pick(1, TimeUnit.SECONDS);
Assert.assertNotNull(discoverable);
String host = "127.0.0.1";
int port = discoverable.getSocketAddress().getPort();
ConnectionConfig connectionConfig = ConnectionConfig.builder().setHostname(host).setPort(port).build();
clientConfig = ClientConfig.builder().setConnectionConfig(connectionConfig).build();
metadataClient = new MetadataClient(clientConfig);
}
use of co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository in project cdap by caskdata.
the class DefaultAppConfigurer method addSpark.
@Override
public void addSpark(Spark spark) {
Preconditions.checkArgument(spark != null, "Spark cannot be null.");
DefaultSparkConfigurer configurer = null;
// It is a bit hacky here to look for the DefaultExtendedSparkConfigurer implementation through the
// SparkRunnerClassloader directly (CDAP-11797)
ClassLoader sparkRunnerClassLoader = ClassLoaders.findByName(spark.getClass().getClassLoader(), "co.cask.cdap.app.runtime.spark.classloader.SparkRunnerClassLoader");
if (sparkRunnerClassLoader != null) {
try {
configurer = (DefaultSparkConfigurer) sparkRunnerClassLoader.loadClass("co.cask.cdap.app.deploy.spark.DefaultExtendedSparkConfigurer").getConstructor(Spark.class, Id.Namespace.class, Id.Artifact.class, ArtifactRepository.class, PluginInstantiator.class).newInstance(spark, deployNamespace, artifactId, artifactRepository, pluginInstantiator);
} catch (Exception e) {
// Ignore it and the configurer will be defaulted to DefaultSparkConfigurer
LOG.trace("No DefaultExtendedSparkConfigurer found. Fallback to DefaultSparkConfigurer.", e);
}
}
if (configurer == null) {
configurer = new DefaultSparkConfigurer(spark, deployNamespace, artifactId, artifactRepository, pluginInstantiator);
}
spark.configure(configurer);
addDatasetsAndPlugins(configurer);
SparkSpecification spec = configurer.createSpecification();
sparks.put(spec.getName(), spec);
}
use of co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository in project cdap by caskdata.
the class PreviewRunnerModule method configure.
@Override
protected void configure() {
bind(ArtifactRepository.class).toInstance(artifactRepository);
expose(ArtifactRepository.class);
bind(ArtifactStore.class).toInstance(artifactStore);
expose(ArtifactStore.class);
bind(AuthorizerInstantiator.class).toInstance(authorizerInstantiator);
expose(AuthorizerInstantiator.class);
bind(AuthorizationEnforcer.class).toInstance(authorizationEnforcer);
expose(AuthorizationEnforcer.class);
bind(PrivilegesManager.class).toInstance(privilegesManager);
expose(PrivilegesManager.class);
bind(StreamConsumerFactory.class).to(InMemoryStreamConsumerFactory.class).in(Scopes.SINGLETON);
expose(StreamConsumerFactory.class);
bind(StreamCoordinatorClient.class).toInstance(streamCoordinatorClient);
expose(StreamCoordinatorClient.class);
bind(PreferencesStore.class).toInstance(preferencesStore);
// bind explore client to mock.
bind(ExploreClient.class).to(MockExploreClient.class);
expose(ExploreClient.class);
bind(StorageProviderNamespaceAdmin.class).to(LocalStorageProviderNamespaceAdmin.class);
bind(PipelineFactory.class).to(SynchronousPipelineFactory.class);
install(new FactoryModuleBuilder().implement(new TypeLiteral<Manager<AppDeploymentInfo, ApplicationWithPrograms>>() {
}, new TypeLiteral<PreviewApplicationManager<AppDeploymentInfo, ApplicationWithPrograms>>() {
}).build(new TypeLiteral<ManagerFactory<AppDeploymentInfo, ApplicationWithPrograms>>() {
}));
bind(Store.class).to(DefaultStore.class);
bind(RouteStore.class).to(LocalRouteStore.class).in(Scopes.SINGLETON);
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
expose(UGIProvider.class);
bind(RuntimeStore.class).to(DefaultStore.class);
expose(RuntimeStore.class);
// we don't delete namespaces in preview as we just delete preview directory when its done
bind(NamespaceResourceDeleter.class).to(NoopNamespaceResourceDeleter.class).in(Scopes.SINGLETON);
bind(NamespaceAdmin.class).to(DefaultNamespaceAdmin.class).in(Scopes.SINGLETON);
bind(NamespaceQueryAdmin.class).to(DefaultNamespaceAdmin.class).in(Scopes.SINGLETON);
expose(NamespaceAdmin.class);
expose(NamespaceQueryAdmin.class);
bind(PreviewRunner.class).to(DefaultPreviewRunner.class).in(Scopes.SINGLETON);
expose(PreviewRunner.class);
bind(PreviewStore.class).to(DefaultPreviewStore.class).in(Scopes.SINGLETON);
bind(Scheduler.class).to(NoOpScheduler.class);
bind(DataTracerFactory.class).to(DefaultDataTracerFactory.class);
expose(DataTracerFactory.class);
bind(OwnerStore.class).to(DefaultOwnerStore.class);
expose(OwnerStore.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
expose(OwnerAdmin.class);
}
Aggregations