use of co.cask.cdap.api.workflow.WorkflowContext in project cdap by caskdata.
the class ETLWorkflow method destroy.
@TransactionPolicy(TransactionControl.EXPLICIT)
@Override
public void destroy() {
WorkflowContext workflowContext = getContext();
PipelineRuntime pipelineRuntime = new PipelineRuntime(workflowContext, workflowMetrics);
if (workflowContext.getDataTracer(PostAction.PLUGIN_TYPE).isEnabled()) {
return;
}
for (Map.Entry<String, PostAction> endingActionEntry : postActions.entrySet()) {
String name = endingActionEntry.getKey();
PostAction action = endingActionEntry.getValue();
StageSpec stageSpec = postActionSpecs.get(name);
BatchActionContext context = new WorkflowBackedActionContext(workflowContext, pipelineRuntime, stageSpec);
try {
action.run(context);
} catch (Throwable t) {
LOG.error("Error while running ending action {}.", name, t);
}
}
}
use of co.cask.cdap.api.workflow.WorkflowContext in project cdap by caskdata.
the class WorkflowDriver method executeCondition.
@SuppressWarnings("unchecked")
private void executeCondition(ApplicationSpecification appSpec, final WorkflowConditionNode node, InstantiatorFactory instantiator, ClassLoader classLoader, WorkflowToken token) throws Exception {
final BasicWorkflowContext context = new BasicWorkflowContext(workflowSpec, token, program, programOptions, cConf, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, nodeStates, pluginInstantiator, secureStore, secureStoreManager, messagingService, node.getConditionSpecification());
final Iterator<WorkflowNode> iterator;
Class<?> clz = classLoader.loadClass(node.getPredicateClassName());
Predicate<WorkflowContext> predicate = instantiator.get(TypeToken.of((Class<? extends Predicate<WorkflowContext>>) clz)).create();
if (!(predicate instanceof Condition)) {
iterator = predicate.apply(context) ? node.getIfBranch().iterator() : node.getElseBranch().iterator();
} else {
final Condition workflowCondition = (Condition) predicate;
Reflections.visit(workflowCondition, workflowCondition.getClass(), new PropertyFieldSetter(node.getConditionSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics()));
try {
// AbstractCondition implements final initialize(context) and requires subclass to
// implement initialize(), whereas conditions that directly implement Condition can
// override initialize(context)
TransactionControl txControl = workflowCondition instanceof AbstractCondition ? Transactions.getTransactionControl(TransactionControl.IMPLICIT, AbstractCondition.class, workflowCondition, "initialize") : Transactions.getTransactionControl(TransactionControl.IMPLICIT, Condition.class, workflowCondition, "initialize", WorkflowContext.class);
context.initializeProgram(workflowCondition, txControl, false);
boolean result = context.execute(() -> workflowCondition.apply(context));
iterator = result ? node.getIfBranch().iterator() : node.getElseBranch().iterator();
} finally {
TransactionControl txControl = Transactions.getTransactionControl(TransactionControl.IMPLICIT, Condition.class, workflowCondition, "destroy");
context.destroyProgram(workflowCondition, txControl, false);
}
}
// If a workflow updates its token at a condition node, it will be persisted after the execution of the next node.
// However, the call below ensures that even if the workflow fails/crashes after a condition node, updates from the
// condition node are also persisted.
runtimeStore.updateWorkflowToken(workflowRunId, token);
executeAll(iterator, appSpec, instantiator, classLoader, token);
}
use of co.cask.cdap.api.workflow.WorkflowContext in project cdap by caskdata.
the class SmartWorkflow method destroy.
@Override
public void destroy() {
WorkflowContext workflowContext = getContext();
PipelineRuntime pipelineRuntime = new PipelineRuntime(workflowContext, workflowMetrics);
// Execute the post actions only if pipeline is not running in preview mode.
if (!workflowContext.getDataTracer(PostAction.PLUGIN_TYPE).isEnabled()) {
for (Map.Entry<String, PostAction> endingActionEntry : postActions.entrySet()) {
String name = endingActionEntry.getKey();
PostAction action = endingActionEntry.getValue();
StageSpec stageSpec = stageSpecs.get(name);
BatchActionContext context = new WorkflowBackedActionContext(workflowContext, pipelineRuntime, stageSpec);
try {
action.run(context);
} catch (Throwable t) {
LOG.error("Error while running post action {}.", name, t);
}
}
}
// publish all alerts
for (Map.Entry<String, AlertPublisher> alertPublisherEntry : alertPublishers.entrySet()) {
String name = alertPublisherEntry.getKey();
AlertPublisher alertPublisher = alertPublisherEntry.getValue();
PartitionedFileSet alertConnector = workflowContext.getDataset(name);
try (CloseableIterator<Alert> alerts = new AlertReader(alertConnector.getPartitions(PartitionFilter.ALWAYS_MATCH))) {
if (!alerts.hasNext()) {
continue;
}
StageMetrics stageMetrics = new DefaultStageMetrics(workflowMetrics, name);
StageSpec stageSpec = stageSpecs.get(name);
AlertPublisherContext alertContext = new DefaultAlertPublisherContext(pipelineRuntime, stageSpec, workflowContext, workflowContext.getAdmin());
alertPublisher.initialize(alertContext);
TrackedIterator<Alert> trackedIterator = new TrackedIterator<>(alerts, stageMetrics, Constants.Metrics.RECORDS_IN);
alertPublisher.publish(trackedIterator);
} catch (Exception e) {
LOG.warn("Stage {} had errors publishing alerts. Alerts may not have been published.", name, e);
} finally {
try {
alertPublisher.destroy();
} catch (Exception e) {
LOG.warn("Error destroying alert publisher for stage {}", name, e);
}
}
}
ProgramStatus status = getContext().getState().getStatus();
if (status == ProgramStatus.FAILED) {
WRAPPERLOGGER.error("Pipeline '{}' failed.", getContext().getApplicationSpecification().getName());
} else {
WRAPPERLOGGER.info("Pipeline '{}' {}.", getContext().getApplicationSpecification().getName(), status == ProgramStatus.COMPLETED ? "succeeded" : status.name().toLowerCase());
}
MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(pipelineRuntime.getArguments(), workflowContext.getLogicalStartTime(), workflowContext, workflowContext.getNamespace());
// Get resolved plugin properties
Map<String, Map<String, String>> resolvedProperties = new HashMap<>();
for (StageSpec spec : stageSpecs.values()) {
String stageName = spec.getName();
resolvedProperties.put(stageName, workflowContext.getPluginProperties(stageName, macroEvaluator).getProperties());
}
// Add resolved plugin properties to workflow token as a JSON String
workflowContext.getToken().put(RESOLVED_PLUGIN_PROPERTIES_MAP, GSON.toJson(resolvedProperties));
}
Aggregations