use of io.cdap.cdap.internal.app.deploy.pipeline.AppSpecInfo in project cdap by caskdata.
the class ConfiguratorTask method run.
@Override
public void run(RunnableTaskContext context) throws Exception {
AppDeploymentInfo deploymentInfo = GSON.fromJson(context.getParam(), AppDeploymentInfo.class);
Injector injector = Guice.createInjector(new ConfigModule(cConf), RemoteAuthenticatorModules.getDefaultModule(), new LocalLocationModule(), new ConfiguratorTaskModule(), new AuthenticationContextModules().getMasterWorkerModule());
ConfigResponse result = injector.getInstance(ConfiguratorTaskRunner.class).configure(deploymentInfo);
AppSpecInfo appSpecInfo = result.getAppSpecInfo();
// If configuration succeeded and if only system artifacts are involved, no need to restart the task
if (result.getExitCode() == 0 && appSpecInfo != null && NamespaceId.SYSTEM.equals(deploymentInfo.getArtifactId().getNamespaceId())) {
boolean hasUserPlugins = appSpecInfo.getAppSpec().getPlugins().values().stream().map(Plugin::getArtifactId).map(ArtifactId::getScope).anyMatch(ArtifactScope.USER::equals);
context.setTerminateOnComplete(hasUserPlugins);
}
context.writeResult(GSON.toJson(result).getBytes(StandardCharsets.UTF_8));
}
use of io.cdap.cdap.internal.app.deploy.pipeline.AppSpecInfo in project cdap by caskdata.
the class InMemoryConfigurator method createResponse.
private <T extends Config> ConfigResponse createResponse(Application<T> app, ClassLoader artifactClassLoader) throws Exception {
// This Gson cannot be static since it is used to deserialize user class.
// Gson will keep a static map to class, hence will leak the classloader
Gson gson = new GsonBuilder().registerTypeAdapterFactory(new CaseInsensitiveEnumTypeAdapterFactory()).create();
// Now, we call configure, which returns application specification.
DefaultAppConfigurer configurer;
File tempDir = DirUtils.createTempDir(baseUnpackDir);
try (PluginInstantiator pluginInstantiator = new PluginInstantiator(cConf, app.getClass().getClassLoader(), tempDir)) {
RuntimeConfigurer runtimeConfigurer = runtimeInfo != null ? new DefaultAppRuntimeConfigurer(appNamespace.getId(), remoteClientFactory, runtimeInfo.getUserArguments(), runtimeInfo.getExistingAppSpec()) : null;
configurer = new DefaultAppConfigurer(appNamespace, artifactId, app, configString, pluginFinder, pluginInstantiator, runtimeConfigurer, runtimeInfo, featureFlagsProvider);
T appConfig;
Type configType = Artifacts.getConfigType(app.getClass());
if (configString.isEmpty()) {
// noinspection unchecked
appConfig = ((Class<T>) configType).newInstance();
} else {
try {
appConfig = gson.fromJson(configString, configType);
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Invalid JSON configuration was provided. Please check the syntax.", e);
}
}
try {
ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(new CombineClassLoader(null, app.getClass().getClassLoader(), getClass().getClassLoader()));
try {
app.configure(configurer, new DefaultApplicationContext<>(appConfig));
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
} catch (Throwable t) {
Throwable rootCause = Throwables.getRootCause(t);
if (rootCause instanceof ClassNotFoundException) {
// Heuristic to provide better error message
String missingClass = rootCause.getMessage();
// If the missing class has "spark" in the name, try to see if Spark is available
if (missingClass.startsWith("org.apache.spark.") || missingClass.startsWith("io.cdap.cdap.api.spark.")) {
// Try to load the SparkContext class, which should be available if Spark is available in the platform
try {
artifactClassLoader.loadClass("org.apache.spark.SparkContext");
} catch (ClassNotFoundException e) {
// Spark is not available, it is most likely caused by missing Spark in the platform
throw new IllegalStateException("Missing Spark related class " + missingClass + ". It may be caused by unavailability of Spark. " + "Please verify environment variable " + Constants.SPARK_HOME + " is set correctly", t);
}
// Spark is available, can be caused by incompatible Spark version
throw new InvalidArtifactException("Missing Spark related class " + missingClass + ". Configured to use Spark located at " + System.getenv(Constants.SPARK_HOME) + ", which may be incompatible with the one required by the application", t);
}
// then the missing class is most likely due to some missing library in the artifact jar
throw new InvalidArtifactException("Missing class " + missingClass + ". It may be caused by missing dependency jar(s) in the artifact jar.", t);
}
throw t;
}
} finally {
try {
DirUtils.deleteDirectoryContents(tempDir);
} catch (IOException e) {
LOG.warn("Exception raised when deleting directory {}", tempDir, e);
}
}
ApplicationSpecification specification = configurer.createSpecification(applicationName, applicationVersion);
AppSpecInfo appSpecInfo = new AppSpecInfo(specification, configurer.getSystemTables(), configurer.getMetadata());
return new DefaultConfigResponse(0, appSpecInfo);
}
use of io.cdap.cdap.internal.app.deploy.pipeline.AppSpecInfo in project cdap by caskdata.
the class AbstractProgramRuntimeService method regenerateAppSpec.
/**
* Regenerates the app spec before the program start
*
* @return the regenerated app spec, or null if there is any exception generating the app spec.
*/
@Nullable
private ApplicationSpecification regenerateAppSpec(ArtifactDetail artifactDetail, ProgramId programId, ArtifactId artifactId, ApplicationSpecification existingAppSpec, ProgramOptions options) throws InterruptedException, ExecutionException, TimeoutException {
ApplicationClass appClass = Iterables.getFirst(artifactDetail.getMeta().getClasses().getApps(), null);
if (appClass == null) {
// This should never happen.
throw new IllegalStateException(String.format("No application class found in artifact '%s' in namespace '%s'.", artifactDetail.getDescriptor().getArtifactId(), programId.getNamespace()));
}
AppDeploymentInfo deploymentInfo = new AppDeploymentInfo(artifactId, artifactDetail.getDescriptor().getLocation(), programId.getNamespaceId(), appClass, existingAppSpec.getName(), existingAppSpec.getAppVersion(), existingAppSpec.getConfiguration(), null, false, new AppDeploymentRuntimeInfo(existingAppSpec, options.getUserArguments().asMap(), options.getArguments().asMap()));
Configurator configurator = this.configuratorFactory.create(deploymentInfo);
ListenableFuture<ConfigResponse> future = configurator.config();
ConfigResponse response = future.get(120, TimeUnit.SECONDS);
if (response.getExitCode() == 0) {
AppSpecInfo appSpecInfo = response.getAppSpecInfo();
if (appSpecInfo != null && appSpecInfo.getAppSpec() != null) {
return appSpecInfo.getAppSpec();
}
}
return null;
}
use of io.cdap.cdap.internal.app.deploy.pipeline.AppSpecInfo in project cdap by caskdata.
the class RemoteConfiguratorTest method testRemoteConfigurator.
@Test
public void testRemoteConfigurator() throws Exception {
LocationFactory locationFactory = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, AllProgramsApp.class);
ArtifactId artifactId = NamespaceId.DEFAULT.artifact(AllProgramsApp.class.getSimpleName(), "1.0.0");
artifacts.put(artifactId, new ArtifactDetail(new ArtifactDescriptor(artifactId.getNamespace(), artifactId.toApiArtifactId(), appJar), new ArtifactMeta(ArtifactClasses.builder().build())));
AppDeploymentInfo info = new AppDeploymentInfo(artifactId, appJar, NamespaceId.DEFAULT, new ApplicationClass(AllProgramsApp.class.getName(), "", null), null, null, null);
Configurator configurator = new RemoteConfigurator(cConf, metricsCollectionService, info, remoteClientFactory);
// Extract response from the configurator.
ListenableFuture<ConfigResponse> result = configurator.config();
ConfigResponse response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
AppSpecInfo appSpecInfo = response.getAppSpecInfo();
if (appSpecInfo == null) {
throw new IllegalStateException("Failed to deploy application");
}
ApplicationSpecification specification = appSpecInfo.getAppSpec();
Assert.assertNotNull(specification);
// Simple checks.
Assert.assertEquals(AllProgramsApp.NAME, specification.getName());
ApplicationSpecification expectedSpec = Specifications.from(new AllProgramsApp());
for (ProgramType programType : ProgramType.values()) {
Assert.assertEquals(expectedSpec.getProgramsByType(programType), specification.getProgramsByType(programType));
}
Assert.assertEquals(expectedSpec.getDatasets(), specification.getDatasets());
}
use of io.cdap.cdap.internal.app.deploy.pipeline.AppSpecInfo in project cdap by caskdata.
the class ConfiguratorTest method testAppWithConfig.
@Test
public void testAppWithConfig() throws Exception {
LocationFactory locationFactory = new LocalLocationFactory(TMP_FOLDER.newFolder());
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, ConfigTestApp.class);
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, ConfigTestApp.class.getSimpleName(), "1.0.0");
CConfiguration cConf = CConfiguration.create();
ArtifactRepository baseArtifactRepo = new DefaultArtifactRepository(conf, null, null, null, new DummyProgramRunnerFactory(), new DefaultImpersonator(cConf, null));
ArtifactRepository artifactRepo = new AuthorizationArtifactRepository(baseArtifactRepo, authEnforcer, authenticationContext);
PluginFinder pluginFinder = new LocalPluginFinder(artifactRepo);
ConfigTestApp.ConfigClass config = new ConfigTestApp.ConfigClass("myTable");
AppDeploymentInfo appDeploymentInfo = new AppDeploymentInfo(artifactId.toEntityId(), appJar, NamespaceId.DEFAULT, new ApplicationClass(ConfigTestApp.class.getName(), "", null), null, null, new Gson().toJson(config));
// Create a configurator that is testable. Provide it an application.
Configurator configurator = new InMemoryConfigurator(conf, pluginFinder, new DefaultImpersonator(cConf, null), artifactRepo, null, appDeploymentInfo);
ListenableFuture<ConfigResponse> result = configurator.config();
ConfigResponse response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
AppSpecInfo appSpecInfo = response.getAppSpecInfo();
if (appSpecInfo == null) {
throw new IllegalStateException("Failed to deploy application");
}
ApplicationSpecification specification = appSpecInfo.getAppSpec();
Assert.assertNotNull(specification);
Assert.assertEquals(1, specification.getDatasets().size());
Assert.assertTrue(specification.getDatasets().containsKey("myTable"));
// Create a deployment info without the app configuration
appDeploymentInfo = new AppDeploymentInfo(artifactId.toEntityId(), appJar, NamespaceId.DEFAULT, new ApplicationClass(ConfigTestApp.class.getName(), "", null), null, null, null);
Configurator configuratorWithoutConfig = new InMemoryConfigurator(conf, pluginFinder, new DefaultImpersonator(cConf, null), artifactRepo, null, appDeploymentInfo);
result = configuratorWithoutConfig.config();
response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
appSpecInfo = response.getAppSpecInfo();
if (appSpecInfo == null) {
throw new IllegalStateException("Failed to deploy application");
}
specification = appSpecInfo.getAppSpec();
Assert.assertNotNull(specification);
Assert.assertEquals(1, specification.getDatasets().size());
Assert.assertTrue(specification.getDatasets().containsKey(ConfigTestApp.DEFAULT_TABLE));
Assert.assertNotNull(specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME));
ProgramStatusTrigger trigger = (ProgramStatusTrigger) specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME).getTrigger();
Assert.assertEquals(trigger.getProgramId().getProgram(), ConfigTestApp.WORKFLOW_NAME);
}
Aggregations