use of co.cask.cdap.api.Resources in project cdap by caskdata.
the class FlowletSpecificationCodec method serialize.
@Override
public JsonElement serialize(FlowletSpecification src, Type typeOfSrc, JsonSerializationContext context) {
JsonObject jsonObj = new JsonObject();
jsonObj.add("className", new JsonPrimitive(src.getClassName()));
jsonObj.add("name", new JsonPrimitive(src.getName()));
jsonObj.add("description", new JsonPrimitive(src.getDescription()));
jsonObj.add("failurePolicy", new JsonPrimitive(src.getFailurePolicy().name()));
jsonObj.add("datasets", serializeSet(src.getDataSets(), context, String.class));
jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class));
jsonObj.add("resources", context.serialize(src.getResources(), Resources.class));
return jsonObj;
}
use of co.cask.cdap.api.Resources in project cdap by caskdata.
the class SparkSpecificationCodec method deserialize.
@Override
public SparkSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String className = jsonObj.get("className").getAsString();
String name = jsonObj.get("name").getAsString();
String description = jsonObj.get("description").getAsString();
String mainClassName = jsonObj.get("mainClassName").getAsString();
Set<String> datasets = deserializeSet(jsonObj.get("datasets"), context, String.class);
Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
Resources clientResources = deserializeResources(jsonObj, "client", context);
Resources driverResources = deserializeResources(jsonObj, "driver", context);
Resources executorResources = deserializeResources(jsonObj, "executor", context);
return new SparkSpecification(className, name, description, mainClassName, datasets, properties, clientResources, driverResources, executorResources);
}
use of co.cask.cdap.api.Resources in project cdap by caskdata.
the class WorkerSpecificationCodec method deserialize.
@Override
public WorkerSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = (JsonObject) json;
String className = jsonObj.get("className").getAsString();
String name = jsonObj.get("name").getAsString();
String description = jsonObj.get("description").getAsString();
Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
Resources resources = context.deserialize(jsonObj.get("resources"), Resources.class);
Set<String> datasets = deserializeSet(jsonObj.get("datasets"), context, String.class);
int instances = jsonObj.get("instances").getAsInt();
return new WorkerSpecification(className, name, description, properties, datasets, resources, instances);
}
use of co.cask.cdap.api.Resources in project cdap by caskdata.
the class WorkerSpecificationCodec method serialize.
@Override
public JsonElement serialize(WorkerSpecification spec, Type typeOfSrc, JsonSerializationContext context) {
JsonObject object = new JsonObject();
object.addProperty("className", spec.getClassName());
object.addProperty("name", spec.getName());
object.addProperty("description", spec.getDescription());
object.add("properties", serializeMap(spec.getProperties(), context, String.class));
object.add("resources", context.serialize(spec.getResources(), Resources.class));
object.add("datasets", serializeSet(spec.getDatasets(), context, String.class));
object.addProperty("instances", spec.getInstances());
return object;
}
use of co.cask.cdap.api.Resources in project cdap by caskdata.
the class DistributedSparkProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(LaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
// Update the container hConf
hConf.setBoolean(SparkRuntimeContextConfig.HCONF_ATTR_CLUSTER_MODE, true);
hConf.set("hive.metastore.token.signature", HiveAuthFactory.HS2_CLIENT_TOKEN);
if (SecurityUtil.isKerberosEnabled(cConf)) {
// Need to divide the interval by 0.8 because Spark logic has a 0.8 discount on the interval
// If we don't offset it, it will look for the new credentials too soon
// Also add 5 seconds to the interval to give master time to push the changes to the Spark client container
hConf.setLong(SparkRuntimeContextConfig.HCONF_ATTR_CREDENTIALS_UPDATE_INTERVAL_MS, (long) ((secureStoreRenewer.getUpdateInterval() + 5000) / 0.8));
}
// Setup the launch config
ApplicationSpecification appSpec = program.getApplicationSpecification();
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Map<String, String> clientArgs = RuntimeArguments.extractScope("task", "client", options.getUserArguments().asMap());
Resources resources = SystemArguments.getResources(clientArgs, spec.getClientResources());
// Add runnable. Only one instance for the spark client
launchConfig.addRunnable(spec.getName(), new SparkTwillRunnable(spec.getName()), resources, 1);
// Add extra resources, classpath, dependencies, env and setup ClassAcceptor
Map<String, LocalizeResource> localizeResources = new HashMap<>();
Map<String, String> extraEnv = new HashMap<>(SparkPackageUtils.getSparkClientEnv());
SparkPackageUtils.prepareSparkResources(sparkCompat, locationFactory, tempDir, localizeResources, extraEnv);
// Add the mapreduce resources and path as well for the InputFormat/OutputFormat classes
MapReduceContainerHelper.localizeFramework(hConf, localizeResources);
extraEnv.put(Constants.SPARK_COMPAT_ENV, sparkCompat.getCompat());
launchConfig.addExtraResources(localizeResources).addExtraDependencies(SparkProgramRuntimeProvider.class).addExtraEnv(extraEnv).addExtraClasspath(MapReduceContainerHelper.addMapReduceClassPath(hConf, new ArrayList<String>())).setClassAcceptor(createBundlerClassAcceptor());
}
Aggregations