use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class SystemMetadataWriterStage method process.
@Override
public void process(ApplicationWithPrograms input) throws Exception {
// add system metadata for apps
ApplicationId appId = input.getApplicationId();
ApplicationSpecification appSpec = input.getSpecification();
// only update creation time if this is a new app
Map<String, String> properties = metadataStore.getProperties(MetadataScope.SYSTEM, appId);
SystemMetadataWriter appSystemMetadataWriter = new AppSystemMetadataWriter(metadataStore, appId, appSpec, !properties.isEmpty());
appSystemMetadataWriter.write();
// add system metadata for programs
writeProgramSystemMetadata(appId, ProgramType.FLOW, appSpec.getFlows().values());
writeProgramSystemMetadata(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce().values());
writeProgramSystemMetadata(appId, ProgramType.SERVICE, appSpec.getServices().values());
writeProgramSystemMetadata(appId, ProgramType.SPARK, appSpec.getSpark().values());
writeProgramSystemMetadata(appId, ProgramType.WORKER, appSpec.getWorkers().values());
writeProgramSystemMetadata(appId, ProgramType.WORKFLOW, appSpec.getWorkflows().values());
// Emit input to the next stage
emit(input);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class InMemoryConfigurator method getSpecJson.
private <T extends Config> String getSpecJson(Application<T> app) throws Exception {
// This Gson cannot be static since it is used to deserialize user class.
// Gson will keep a static map to class, hence will leak the classloader
Gson gson = new GsonBuilder().registerTypeAdapterFactory(new CaseInsensitiveEnumTypeAdapterFactory()).create();
// Now, we call configure, which returns application specification.
DefaultAppConfigurer configurer;
File tempDir = DirUtils.createTempDir(baseUnpackDir);
try (PluginInstantiator pluginInstantiator = new PluginInstantiator(cConf, app.getClass().getClassLoader(), tempDir)) {
configurer = new DefaultAppConfigurer(appNamespace, artifactId, app, configString, artifactRepository, pluginInstantiator);
T appConfig;
Type configType = Artifacts.getConfigType(app.getClass());
if (configString.isEmpty()) {
// noinspection unchecked
appConfig = ((Class<T>) configType).newInstance();
} else {
try {
appConfig = gson.fromJson(configString, configType);
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Invalid JSON configuration was provided. Please check the syntax.", e);
}
}
try {
ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(new CombineClassLoader(null, app.getClass().getClassLoader(), getClass().getClassLoader()));
try {
app.configure(configurer, new DefaultApplicationContext<>(appConfig));
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
} catch (Throwable t) {
Throwable rootCause = Throwables.getRootCause(t);
if (rootCause instanceof ClassNotFoundException) {
// Heuristic to provide better error message
String missingClass = rootCause.getMessage();
// If the missing class has "spark" in the name, try to see if Spark is available
if (missingClass.startsWith("org.apache.spark.") || missingClass.startsWith("co.cask.cdap.api.spark.")) {
// Try to load the SparkContext class, which should be available if Spark is available in the platform
try {
artifactClassLoader.loadClass("org.apache.spark.SparkContext");
} catch (ClassNotFoundException e) {
// Spark is not available, it is most likely caused by missing Spark in the platform
throw new IllegalStateException("Missing Spark related class " + missingClass + ". It may be caused by unavailability of Spark. " + "Please verify environment variable " + Constants.SPARK_HOME + " is set correctly", t);
}
// Spark is available, can be caused by incompatible Spark version
throw new InvalidArtifactException("Missing Spark related class " + missingClass + ". Configured to use Spark located at " + System.getenv(Constants.SPARK_HOME) + ", which may be incompatible with the one required by the application", t);
}
// then the missing class is most likely due to some missing library in the artifact jar
throw new InvalidArtifactException("Missing class " + missingClass + ". It may be caused by missing dependency jar(s) in the artifact jar.", t);
}
throw t;
}
} finally {
try {
DirUtils.deleteDirectoryContents(tempDir);
} catch (IOException e) {
LOG.warn("Exception raised when deleting directory {}", tempDir, e);
}
}
ApplicationSpecification specification = configurer.createSpecification(applicationName, applicationVersion);
// TODO: The SchemaGenerator should be injected
return ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator()).toJson(specification);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ApplicationRegistrationStage method process.
@Override
public void process(ApplicationWithPrograms input) throws Exception {
ApplicationSpecification applicationSpecification = input.getSpecification();
Collection<ApplicationId> allAppVersionsAppIds = store.getAllAppVersionsAppIds(input.getApplicationId());
boolean ownerAdded = addOwnerIfRequired(input, allAppVersionsAppIds);
try {
store.addApplication(input.getApplicationId(), applicationSpecification);
} catch (Exception e) {
// if we failed to store the app spec cleanup the owner if it was added in this call
if (ownerAdded) {
ownerAdmin.delete(input.getApplicationId());
}
// propagate the exception
throw e;
}
registerDatasets(input);
emit(input);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ApplicationRegistrationStage method registerDatasets.
// Register dataset usage, based upon the program specifications.
// Note that worker specifications' datasets are not registered upon app deploy because the useDataset of the
// WorkerConfigurer is deprecated. Workers' access to datasets is aimed to be completely dynamic. Other programs are
// moving in this direction.
// Also, SparkSpecifications are the same in that a Spark program's dataset access is completely dynamic.
private void registerDatasets(ApplicationWithPrograms input) {
ApplicationSpecification appSpec = input.getSpecification();
ApplicationId appId = input.getApplicationId();
NamespaceId namespaceId = appId.getParent();
for (FlowSpecification flow : appSpec.getFlows().values()) {
ProgramId programId = appId.flow(flow.getName());
for (FlowletConnection connection : flow.getConnections()) {
if (connection.getSourceType().equals(FlowletConnection.Type.STREAM)) {
usageRegistry.register(programId, namespaceId.stream(connection.getSourceName()));
}
}
for (FlowletDefinition flowlet : flow.getFlowlets().values()) {
for (String dataset : flowlet.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
}
for (MapReduceSpecification program : appSpec.getMapReduce().values()) {
ProgramId programId = appId.mr(program.getName());
for (String dataset : program.getDataSets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
for (SparkSpecification sparkSpec : appSpec.getSpark().values()) {
ProgramId programId = appId.spark(sparkSpec.getName());
for (String dataset : sparkSpec.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
for (ServiceSpecification serviceSpecification : appSpec.getServices().values()) {
ProgramId programId = appId.service(serviceSpecification.getName());
for (HttpServiceHandlerSpecification handlerSpecification : serviceSpecification.getHandlers().values()) {
for (String dataset : handlerSpecification.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
}
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class DistributedMapReduceProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(LaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
ApplicationSpecification appSpec = program.getApplicationSpecification();
MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
// Get the resource for the container that runs the mapred client that will launch the actual mapred job.
Map<String, String> clientArgs = RuntimeArguments.extractScope("task", "client", options.getUserArguments().asMap());
// Add runnable. Only one instance for the MR driver
launchConfig.addRunnable(spec.getName(), new MapReduceTwillRunnable(spec.getName()), 1, clientArgs, spec.getDriverResources(), 0).addExtraResources(MapReduceContainerHelper.localizeFramework(hConf, new HashMap<String, LocalizeResource>())).addExtraClasspath(MapReduceContainerHelper.addMapReduceClassPath(hConf, new ArrayList<String>())).addExtraDependencies(YarnClientProtocolProvider.class);
}
Aggregations