use of co.cask.cdap.api.flow.FlowletDefinition in project cdap by caskdata.
the class FlowProgramRunner method createFlowlets.
/**
* Starts all flowlets in the flow program.
* @param program Program to run
* @param flowSpec The {@link FlowSpecification}.
* @return A {@link Table} with row as flowlet id, column as instance id, cell as the {@link ProgramController}
* for the flowlet.
*/
private Table<String, Integer, ProgramController> createFlowlets(Program program, ProgramOptions options, FlowSpecification flowSpec) {
Table<String, Integer, ProgramController> flowlets = HashBasedTable.create();
try {
for (Map.Entry<String, FlowletDefinition> entry : flowSpec.getFlowlets().entrySet()) {
ProgramOptions flowletOptions = resolveFlowletOptions(options, entry.getKey());
int instanceCount = entry.getValue().getInstances();
for (int instanceId = 0; instanceId < instanceCount; instanceId++) {
flowlets.put(entry.getKey(), instanceId, startFlowlet(program, createFlowletOptions(entry.getKey(), instanceId, instanceCount, flowletOptions)));
}
}
} catch (Throwable t) {
try {
// Need to stop all started flowlets
Futures.successfulAsList(Iterables.transform(flowlets.values(), new Function<ProgramController, ListenableFuture<?>>() {
@Override
public ListenableFuture<?> apply(ProgramController controller) {
return controller.stop();
}
})).get();
} catch (Exception e) {
LOG.error("Fail to stop all flowlets on failure.");
}
throw Throwables.propagate(t);
}
return flowlets;
}
use of co.cask.cdap.api.flow.FlowletDefinition in project cdap by caskdata.
the class FlowSpecificationCodec method deserialize.
@Override
public FlowSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String className = jsonObj.get("className").getAsString();
String name = jsonObj.get("name").getAsString();
String description = jsonObj.get("description").getAsString();
Map<String, FlowletDefinition> flowlets = deserializeMap(jsonObj.get("flowlets"), context, FlowletDefinition.class);
List<FlowletConnection> connections = deserializeList(jsonObj.get("connections"), context, FlowletConnection.class);
return new DefaultFlowSpecification(className, name, description, flowlets, connections);
}
use of co.cask.cdap.api.flow.FlowletDefinition in project cdap by caskdata.
the class FlowletProgramRunner method createSchemaCache.
private SchemaCache createSchemaCache(Program program) throws Exception {
ImmutableSet.Builder<Schema> schemas = ImmutableSet.builder();
for (FlowSpecification flowSpec : program.getApplicationSpecification().getFlows().values()) {
for (FlowletDefinition flowletDef : flowSpec.getFlowlets().values()) {
schemas.addAll(Iterables.concat(flowletDef.getInputs().values()));
schemas.addAll(Iterables.concat(flowletDef.getOutputs().values()));
}
}
// Temp fix for ENG-3949. Always add old stream event schema.
// TODO: Remove it later. The right thing to do is to have schemas history being stored to support schema
// evolution. By design, as long as the schema cache is populated with old schema, the type projection logic
// in the decoder would handle it correctly.
schemas.add(schemaGenerator.generate(StreamEventData.class));
return new SchemaCache(schemas.build(), program.getClassLoader());
}
use of co.cask.cdap.api.flow.FlowletDefinition in project cdap by caskdata.
the class InMemoryFlowProgramRunner method createFlowlets.
/**
* Starts all flowlets in the flow program.
* @param program Program to run
* @param flowSpec The {@link FlowSpecification}.
* @return A {@link Table} with row as flowlet id, column as instance id, cell as the {@link ProgramController}
* for the flowlet.
*/
private Table<String, Integer, ProgramController> createFlowlets(Program program, ProgramOptions options, FlowSpecification flowSpec) {
Table<String, Integer, ProgramController> flowlets = HashBasedTable.create();
try {
for (Map.Entry<String, FlowletDefinition> entry : flowSpec.getFlowlets().entrySet()) {
ProgramOptions flowletOptions = resolveFlowletOptions(options, entry.getKey());
int instanceCount = entry.getValue().getInstances();
for (int instanceId = 0; instanceId < instanceCount; instanceId++) {
flowlets.put(entry.getKey(), instanceId, startFlowlet(program, createFlowletOptions(instanceId, instanceCount, flowletOptions)));
}
}
} catch (Throwable t) {
try {
// Need to stop all started flowlets
Futures.successfulAsList(Iterables.transform(flowlets.values(), new Function<ProgramController, ListenableFuture<?>>() {
@Override
public ListenableFuture<?> apply(ProgramController controller) {
return controller.stop();
}
})).get();
} catch (Exception e) {
LOG.error("Fail to stop all flowlets on failure.");
}
throw Throwables.propagate(t);
}
return flowlets;
}
use of co.cask.cdap.api.flow.FlowletDefinition in project cdap by caskdata.
the class DefaultFlowConfigurer method addFlowlet.
@Override
public void addFlowlet(String name, Flowlet flowlet, int instances) {
Preconditions.checkNotNull(flowlet, UserMessages.getMessage(UserErrors.INVALID_FLOWLET_NULL));
DefaultFlowletConfigurer flowletConfigurer = new DefaultFlowletConfigurer(flowlet);
flowlet.configure(flowletConfigurer);
FlowletSpecification flowletSpecification = flowletConfigurer.createSpecification();
Map<String, Set<Type>> inputTypes = new HashMap<>();
Map<String, Set<Type>> outputTypes = new HashMap<>();
Reflections.visit(flowlet, flowlet.getClass(), new OutputEmitterFieldExtractor(outputTypes), new ProcessMethodExtractor(inputTypes));
FlowletDefinition flowletDef = new FlowletDefinition(name, inputTypes, outputTypes, flowletSpecification, instances);
String flowletName = flowletDef.getFlowletSpec().getName();
Preconditions.checkArgument(instances > 0, String.format(UserMessages.getMessage(UserErrors.INVALID_INSTANCES), flowletName, instances));
Preconditions.checkArgument(!flowlets.containsKey(flowletName), UserMessages.getMessage(UserErrors.INVALID_FLOWLET_EXISTS), flowletName);
flowlets.put(flowletName, flowletDef);
addStreams(flowletConfigurer.getStreams());
addDatasetSpecs(flowletConfigurer.getDatasetSpecs());
addDatasetModules(flowletConfigurer.getDatasetModules());
}
Aggregations