use of io.cdap.cdap.api.Resources in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method findDriverResources.
/**
* Returns the {@link Resources} requirement for the workflow runnable deduced by Spark
* or MapReduce driver resources requirement.
*/
private Resources findDriverResources(Collection<WorkflowNode> nodes, Map<String, Resources> runnablesResources) {
// Find the resource requirements for the workflow based on the nodes memory requirements
Resources resources = new Resources();
for (WorkflowNode node : nodes) {
switch(node.getType()) {
case ACTION:
String programName = ((WorkflowActionNode) node).getProgram().getProgramName();
Resources runnableResources = runnablesResources.get(programName);
if (runnableResources != null) {
resources = maxResources(resources, runnableResources);
}
break;
case FORK:
Resources forkResources = ((WorkflowForkNode) node).getBranches().stream().map(branches -> findDriverResources(branches, runnablesResources)).reduce(this::mergeForkResources).orElse(resources);
resources = maxResources(resources, forkResources);
break;
case CONDITION:
Resources branchesResources = maxResources(findDriverResources(((WorkflowConditionNode) node).getIfBranch(), runnablesResources), findDriverResources(((WorkflowConditionNode) node).getElseBranch(), runnablesResources));
resources = maxResources(resources, branchesResources);
break;
default:
// This shouldn't happen unless we add new node type
LOG.warn("Ignoring unsupported Workflow node type {}", node.getType());
}
}
return resources;
}
use of io.cdap.cdap.api.Resources in project cdap by caskdata.
the class BatchPhaseSpecTest method testDescription.
@Test
public void testDescription() throws Exception {
/*
* source1 --|
* |--> sink.connector
* source2 --|
*/
Map<String, String> props = new HashMap<>();
PluginSpec connectorSpec = new PluginSpec(Constants.Connector.PLUGIN_TYPE, "connector", ImmutableMap.<String, String>of(), null);
ArtifactId artifactId = new ArtifactId("art", new ArtifactVersion("1.0.0"), ArtifactScope.USER);
PipelinePhase.Builder builder = PipelinePhase.builder(ImmutableSet.of(BatchSource.PLUGIN_TYPE, Constants.Connector.PLUGIN_TYPE)).addStage(StageSpec.builder("source1", new PluginSpec(BatchSource.PLUGIN_TYPE, "src", props, artifactId)).build()).addStage(StageSpec.builder("source2", new PluginSpec(BatchSource.PLUGIN_TYPE, "src", props, artifactId)).addInputSchema("a", Schema.recordOf("stuff", Schema.Field.of("x", Schema.of(Schema.Type.INT)))).build()).addStage(StageSpec.builder("sink.connector", connectorSpec).build()).addConnection("source1", "sink.connector").addConnection("source2", "sink.connector");
BatchPhaseSpec phaseSpec = new BatchPhaseSpec("phase-1", builder.build(), new Resources(), new Resources(), new Resources(), false, false, Collections.<String, String>emptyMap(), 0, Collections.<String, String>emptyMap(), false, null);
Assert.assertEquals("Sources 'source1', 'source2' to sinks 'sink.connector'.", phaseSpec.getDescription());
}
use of io.cdap.cdap.api.Resources in project cdap by caskdata.
the class PipelineSpecGeneratorTest method testInputSchemasWithDifferentName.
@Test
public void testInputSchemasWithDifferentName() {
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("s1", MOCK_SOURCE)).addStage(new ETLStage("s2", MOCK_SOURCE2)).addStage(new ETLStage("sink", MOCK_SINK)).addConnection("s1", "sink").addConnection("s2", "sink").setNumOfRecordsPreview(100).build();
Map<String, String> emptyMap = Collections.emptyMap();
PipelineSpec expected = BatchPipelineSpec.builder().addStage(StageSpec.builder("s1", new PluginSpec(BatchSource.PLUGIN_TYPE, "mocksource", emptyMap, ARTIFACT_ID)).addOutput(SCHEMA_A, "sink").build()).addStage(StageSpec.builder("s2", new PluginSpec(BatchSource.PLUGIN_TYPE, "mocksource2", emptyMap, ARTIFACT_ID)).addOutput(SCHEMA_A2, "sink").build()).addStage(StageSpec.builder("sink", new PluginSpec(BatchSink.PLUGIN_TYPE, "mocksink", emptyMap, ARTIFACT_ID)).addInputSchemas(ImmutableMap.of("s1", SCHEMA_A, "s2", SCHEMA_A2)).setErrorSchema(SCHEMA_A).build()).addConnections(etlConfig.getConnections()).setResources(etlConfig.getResources()).setDriverResources(new Resources(1024, 1)).setClientResources(new Resources(1024, 1)).setStageLoggingEnabled(etlConfig.isStageLoggingEnabled()).setNumOfRecordsPreview(etlConfig.getNumOfRecordsPreview()).build();
PipelineSpec actual = specGenerator.generateSpec(etlConfig);
Assert.assertEquals(expected, actual);
}
use of io.cdap.cdap.api.Resources in project cdap by caskdata.
the class ServiceSpecificationCodec method decodeOldSpec.
private ServiceSpecification decodeOldSpec(JsonObject json) {
String className = json.get("classname").getAsString();
TwillSpecification twillSpec = twillSpecificationAdapter.fromJson(json.get("spec").getAsString()).getTwillSpecification();
Map<String, HttpServiceHandlerSpecification> handlers = Maps.newHashMap();
RuntimeSpecification handlerSpec = twillSpec.getRunnables().get(twillSpec.getName());
Map<String, String> configs = handlerSpec.getRunnableSpecification().getConfigs();
// Get the class names of all handlers. It is stored in the handler runnable spec configs
List<String> handlerClasses = GSON.fromJson(configs.get("service.runnable.handlers"), new TypeToken<List<String>>() {
}.getType());
List<JsonObject> handlerSpecs = GSON.fromJson(configs.get("service.runnable.handler.spec"), new TypeToken<List<JsonObject>>() {
}.getType());
for (int i = 0; i < handlerClasses.size(); i++) {
String handlerClass = handlerClasses.get(i);
JsonObject spec = handlerSpecs.get(i);
Map<String, String> properties = GSON.fromJson(spec.get("properties"), new TypeToken<Map<String, String>>() {
}.getType());
// Reconstruct the HttpServiceSpecification. However there is no way to determine the datasets or endpoints
// as it is not recorded in old spec. It's ok since the spec is only used to load data from MDS during redeploy.
handlers.put(spec.get("name").getAsString(), new HttpServiceHandlerSpecification(handlerClass, spec.get("name").getAsString(), spec.get("description").getAsString(), properties, Collections.emptySet(), Collections.emptyList()));
}
ResourceSpecification resourceSpec = handlerSpec.getResourceSpecification();
return new ServiceSpecification(className, twillSpec.getName(), twillSpec.getName(), handlers, new Resources(resourceSpec.getMemorySize(), resourceSpec.getVirtualCores()), resourceSpec.getInstances(), Collections.emptyMap());
}
use of io.cdap.cdap.api.Resources in project cdap by caskdata.
the class ServiceSpecificationCodec method serialize.
@Override
public JsonElement serialize(ServiceSpecification spec, Type typeOfSrc, JsonSerializationContext context) {
JsonObject object = new JsonObject();
object.addProperty("className", spec.getClassName());
object.addProperty("name", spec.getName());
object.addProperty("description", spec.getDescription());
object.add("plugins", serializeMap(spec.getPlugins(), context, Plugin.class));
object.add("handlers", serializeMap(spec.getHandlers(), context, HttpServiceHandlerSpecification.class));
object.add("resources", context.serialize(spec.getResources(), Resources.class));
object.addProperty("instances", spec.getInstances());
object.add("properties", serializeMap(spec.getProperties(), context, String.class));
return object;
}
Aggregations