use of io.cdap.cdap.api.mapreduce.MapReduceSpecification in project cdap by caskdata.
the class ApplicationRegistrationStage method registerDatasets.
// Register dataset usage, based upon the program specifications.
// Note that worker specifications' datasets are not registered upon app deploy because the useDataset of the
// WorkerConfigurer is deprecated. Workers' access to datasets is aimed to be completely dynamic. Other programs are
// moving in this direction.
// Also, SparkSpecifications are the same in that a Spark program's dataset access is completely dynamic.
private void registerDatasets(ApplicationWithPrograms input) {
ApplicationSpecification appSpec = input.getSpecification();
ApplicationId appId = input.getApplicationId();
NamespaceId namespaceId = appId.getParent();
for (MapReduceSpecification program : appSpec.getMapReduce().values()) {
ProgramId programId = appId.mr(program.getName());
for (String dataset : program.getDataSets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
for (SparkSpecification sparkSpec : appSpec.getSpark().values()) {
ProgramId programId = appId.spark(sparkSpec.getName());
for (String dataset : sparkSpec.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
for (ServiceSpecification serviceSpecification : appSpec.getServices().values()) {
ProgramId programId = appId.service(serviceSpecification.getName());
for (HttpServiceHandlerSpecification handlerSpecification : serviceSpecification.getHandlers().values()) {
for (String dataset : handlerSpecification.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
}
}
use of io.cdap.cdap.api.mapreduce.MapReduceSpecification in project cdap by caskdata.
the class ApplicationSpecificationCodec method deserialize.
@Override
public ApplicationSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String name = jsonObj.get("name").getAsString();
String appVersion = ApplicationId.DEFAULT_VERSION;
if (jsonObj.has("appVersion")) {
appVersion = jsonObj.get("appVersion").getAsString();
}
String appCDAPVersion = jsonObj.has("appCDAPVersion") ? jsonObj.get("appCDAPVersion").getAsString() : null;
String description = jsonObj.get("description").getAsString();
String configuration = null;
if (jsonObj.has("configuration")) {
configuration = jsonObj.get("configuration").getAsString();
}
ArtifactId artifactId = context.deserialize(jsonObj.get("artifactId"), ArtifactId.class);
Map<String, String> datasetModules = deserializeMap(jsonObj.get("datasetModules"), context, String.class);
Map<String, DatasetCreationSpec> datasetInstances = deserializeMap(jsonObj.get("datasetInstances"), context, DatasetCreationSpec.class);
Map<String, MapReduceSpecification> mapReduces = deserializeMap(jsonObj.get("mapReduces"), context, MapReduceSpecification.class);
Map<String, SparkSpecification> sparks = deserializeMap(jsonObj.get("sparks"), context, SparkSpecification.class);
Map<String, WorkflowSpecification> workflows = deserializeMap(jsonObj.get("workflows"), context, WorkflowSpecification.class);
Map<String, ServiceSpecification> services = deserializeMap(jsonObj.get("services"), context, ServiceSpecification.class);
Map<String, ScheduleCreationSpec> programSchedules = deserializeMap(jsonObj.get("programSchedules"), context, ScheduleCreationSpec.class);
Map<String, WorkerSpecification> workers = deserializeMap(jsonObj.get("workers"), context, WorkerSpecification.class);
Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
return new DefaultApplicationSpecification(name, appVersion, appCDAPVersion, description, configuration, artifactId, datasetModules, datasetInstances, mapReduces, sparks, workflows, services, programSchedules, workers, plugins);
}
use of io.cdap.cdap.api.mapreduce.MapReduceSpecification in project cdap by caskdata.
the class DefaultAppConfigurer method addMapReduce.
@Override
public void addMapReduce(MapReduce mapReduce) {
Preconditions.checkArgument(mapReduce != null, "MapReduce cannot be null.");
DefaultMapReduceConfigurer configurer = new DefaultMapReduceConfigurer(mapReduce, deployNamespace, artifactId, pluginFinder, pluginInstantiator, runtimeInfo, getFeatureFlagsProvider());
mapReduce.configure(configurer);
addDatasetsAndPlugins(configurer);
MapReduceSpecification spec = configurer.createSpecification();
mapReduces.put(spec.getName(), spec);
}
use of io.cdap.cdap.api.mapreduce.MapReduceSpecification in project cdap by cdapio.
the class DefaultStoreTest method testLoadingProgram.
@Test
public void testLoadingProgram() throws Exception {
ApplicationSpecification appSpec = Specifications.from(new FooApp());
ApplicationId appId = NamespaceId.DEFAULT.app(appSpec.getName());
store.addApplication(appId, appSpec);
ProgramDescriptor descriptor = store.loadProgram(appId.mr("mrJob1"));
Assert.assertNotNull(descriptor);
MapReduceSpecification mrSpec = descriptor.getSpecification();
Assert.assertEquals("mrJob1", mrSpec.getName());
Assert.assertEquals(FooMapReduceJob.class.getName(), mrSpec.getClassName());
}
use of io.cdap.cdap.api.mapreduce.MapReduceSpecification in project cdap by cdapio.
the class DistributedMapReduceProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(ProgramLaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) {
ApplicationSpecification appSpec = program.getApplicationSpecification();
MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
// Get the resource for the container that runs the mapred client that will launch the actual mapred job.
Map<String, String> clientArgs = RuntimeArguments.extractScope("task", "client", options.getUserArguments().asMap());
// Add runnable. Only one instance for the MR driver
launchConfig.addRunnable(spec.getName(), new MapReduceTwillRunnable(spec.getName()), 1, clientArgs, spec.getDriverResources(), 0);
if (clusterMode == ClusterMode.ON_PREMISE || cConf.getBoolean(Constants.AppFabric.PROGRAM_REMOTE_RUNNER, false)) {
// Add extra resources, classpath and dependencies
launchConfig.addExtraResources(MapReduceContainerHelper.localizeFramework(hConf, new HashMap<>())).addExtraClasspath(MapReduceContainerHelper.addMapReduceClassPath(hConf, new ArrayList<>())).addExtraDependencies(YarnClientProtocolProvider.class);
}
}
Aggregations