use of io.cdap.cdap.api.plugin.PluginContext in project cdap by cdapio.
the class StreamingMultiSinkFunction method createStages.
private Map<String, SubmitterLifecycle<?>> createStages(MacroEvaluator evaluator) throws InstantiationException {
PluginContext pluginContext = sec.getPluginContext();
Map<String, SubmitterLifecycle<?>> stages = new HashMap<>();
for (String stageName : group) {
SubmitterLifecycle<?> plugin = pluginContext.newPluginInstance(stageName, evaluator);
stages.put(stageName, plugin);
}
return stages;
}
use of io.cdap.cdap.api.plugin.PluginContext in project cdap by cdapio.
the class StreamingMultiSinkFunction method call.
@Override
public void call(JavaRDD<RecordInfo<Object>> data, Time batchTime) throws Exception {
long logicalStartTime = batchTime.milliseconds();
MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getServiceDiscoverer(), sec.getNamespace());
PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled());
SparkBatchSinkFactory sinkFactory = new SparkBatchSinkFactory();
PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, logicalStartTime);
Map<String, SubmitterLifecycle<?>> stages = createStages(evaluator);
// call prepareRun() on all the stages in the group
// need to call it in an order that guarantees that inputs are called before outputs
// this is because plugins can call getArguments().set() in the prepareRun() method,
// which downstream stages should be able to read
List<String> traversalOrder = new ArrayList(group.size());
for (String stageName : phaseSpec.getPhase().getDag().getTopologicalOrder()) {
if (group.contains(stageName)) {
traversalOrder.add(stageName);
}
}
for (String stageName : traversalOrder) {
SubmitterLifecycle<?> plugin = stages.get(stageName);
StageSpec stageSpec = phaseSpec.getPhase().getStage(stageName);
try {
prepareRun(pipelineRuntime, sinkFactory, stageSpec, plugin);
} catch (Exception e) {
LOG.error("Error preparing sink {} for the batch for time {}.", stageName, logicalStartTime, e);
return;
}
}
// run the actual transforms and sinks in this group
boolean ranSuccessfully = true;
try {
MultiSinkFunction multiSinkFunction = new MultiSinkFunction(sec, phaseSpec, group, collectors);
Set<String> outputNames = sinkFactory.writeCombinedRDD(data.flatMapToPair(multiSinkFunction), sec, sinkNames);
sec.execute(new TxRunnable() {
@Override
public void run(DatasetContext context) throws Exception {
for (String outputName : outputNames) {
ExternalDatasets.registerLineage(sec.getAdmin(), outputName, AccessType.WRITE, null, () -> context.getDataset(outputName));
}
}
});
} catch (Exception e) {
LOG.error("Error writing to sinks {} for the batch for time {}.", sinkNames, logicalStartTime, e);
ranSuccessfully = false;
}
// run onRunFinish() for each sink
for (String stageName : traversalOrder) {
SubmitterLifecycle<?> plugin = stages.get(stageName);
StageSpec stageSpec = phaseSpec.getPhase().getStage(stageName);
try {
onRunFinish(pipelineRuntime, sinkFactory, stageSpec, plugin, ranSuccessfully);
} catch (Exception e) {
LOG.warn("Unable to execute onRunFinish for sink {}", stageName, e);
}
}
}
use of io.cdap.cdap.api.plugin.PluginContext in project cdap by cdapio.
the class SparkStreamingPipelineRunner method getSource.
@Override
protected SparkCollection<RecordInfo<Object>> getSource(StageSpec stageSpec, FunctionCache.Factory functionCacheFactory, StageStatisticsCollector collector) throws Exception {
StreamingSource<Object> source;
if (checkpointsDisabled) {
PluginFunctionContext pluginFunctionContext = new PluginFunctionContext(stageSpec, sec, collector);
source = pluginFunctionContext.createPlugin();
} else {
// check for macros in any StreamingSource. If checkpoints are enabled,
// SparkStreaming will serialize all InputDStreams created in the checkpoint, which means
// the InputDStream is deserialized directly from the checkpoint instead of instantiated through CDAP.
// This means there isn't any way for us to perform macro evaluation on sources when they are loaded from
// checkpoints. We can work around this in all other pipeline stages by dynamically instantiating the
// plugin in all DStream functions, but can't for InputDStreams because the InputDStream constructor
// adds itself to the context dag. Yay for constructors with global side effects.
// TODO: (HYDRATOR-1030) figure out how to do this at configure time instead of run time
MacroEvaluator macroEvaluator = new ErrorMacroEvaluator("Due to spark limitations, macro evaluation is not allowed in streaming sources when checkpointing " + "is enabled.");
PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), spec.isStageLoggingEnabled(), spec.isProcessTimingEnabled());
source = pluginContext.newPluginInstance(stageSpec.getName(), macroEvaluator);
}
DataTracer dataTracer = sec.getDataTracer(stageSpec.getName());
StreamingContext sourceContext = new DefaultStreamingContext(stageSpec, sec, streamingContext);
JavaDStream<Object> javaDStream = source.getStream(sourceContext);
if (dataTracer.isEnabled()) {
// it will create a new function for each RDD, which would limit each RDD but not the entire DStream.
javaDStream = javaDStream.transform(new LimitingFunction<>(spec.getNumOfRecordsPreview()));
}
JavaDStream<RecordInfo<Object>> outputDStream = javaDStream.transform(new CountingTransformFunction<>(stageSpec.getName(), sec.getMetrics(), "records.out", dataTracer)).map(new WrapOutputTransformFunction<>(stageSpec.getName()));
return new DStreamCollection<>(sec, functionCacheFactory, outputDStream);
}
use of io.cdap.cdap.api.plugin.PluginContext in project cdap by cdapio.
the class PipelineAction method run.
@Override
public void run() throws Exception {
CustomActionContext context = getContext();
Map<String, String> properties = context.getSpecification().getProperties();
BatchPhaseSpec phaseSpec = GSON.fromJson(properties.get(Constants.PIPELINEID), BatchPhaseSpec.class);
PipelinePhase phase = phaseSpec.getPhase();
StageSpec stageSpec = phase.iterator().next();
PluginContext pluginContext = new PipelinePluginContext(context, metrics, phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled());
PipelineRuntime pipelineRuntime = new PipelineRuntime(context, metrics);
Action action = pluginContext.newPluginInstance(stageSpec.getName(), new DefaultMacroEvaluator(pipelineRuntime.getArguments(), context.getLogicalStartTime(), context, context, context.getNamespace()));
ActionContext actionContext = new BasicActionContext(context, pipelineRuntime, stageSpec);
if (!context.getDataTracer(stageSpec.getName()).isEnabled()) {
action.run(actionContext);
}
WorkflowToken token = context.getWorkflowToken();
if (token == null) {
throw new IllegalStateException("WorkflowToken cannot be null when action is executed through Workflow.");
}
for (Map.Entry<String, String> entry : pipelineRuntime.getArguments().getAddedArguments().entrySet()) {
token.put(entry.getKey(), entry.getValue());
}
}
use of io.cdap.cdap.api.plugin.PluginContext in project cdap by cdapio.
the class SmartWorkflow method initialize.
@Override
public void initialize(WorkflowContext context) throws Exception {
super.initialize(context);
context.enableFieldLineageConsolidation();
TriggeringScheduleInfo scheduleInfo = context.getTriggeringScheduleInfo();
if (scheduleInfo != null) {
String propertiesMappingString = scheduleInfo.getProperties().get(TRIGGERING_PROPERTIES_MAPPING);
if (propertiesMappingString != null) {
TriggeringPropertyMapping propertiesMapping = GSON.fromJson(propertiesMappingString, TriggeringPropertyMapping.class);
updateTokenWithTriggeringProperties(scheduleInfo, propertiesMapping, context.getToken());
}
}
PipelineRuntime pipelineRuntime = new PipelineRuntime(context, workflowMetrics);
WRAPPERLOGGER.info("Pipeline '{}' is started by user '{}' with arguments {}", context.getApplicationSpecification().getName(), UserGroupInformation.getCurrentUser().getShortUserName(), pipelineRuntime.getArguments().asMap());
alertPublishers = new HashMap<>();
postActions = new LinkedHashMap<>();
spec = GSON.fromJson(context.getWorkflowSpecification().getProperty(Constants.PIPELINE_SPEC_KEY), BatchPipelineSpec.class);
stageSpecs = new HashMap<>();
MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(pipelineRuntime.getArguments(), context.getLogicalStartTime(), context, context, context.getNamespace());
PluginContext pluginContext = new PipelinePluginContext(context, workflowMetrics, spec.isStageLoggingEnabled(), spec.isProcessTimingEnabled());
for (ActionSpec actionSpec : spec.getEndingActions()) {
String stageName = actionSpec.getName();
postActions.put(stageName, pluginContext.newPluginInstance(stageName, macroEvaluator));
stageSpecs.put(stageName, StageSpec.builder(stageName, actionSpec.getPluginSpec()).setStageLoggingEnabled(spec.isStageLoggingEnabled()).setProcessTimingEnabled(spec.isProcessTimingEnabled()).setMaxPreviewRecords(spec.getNumOfRecordsPreview()).build());
}
for (StageSpec stageSpec : spec.getStages()) {
String stageName = stageSpec.getName();
stageSpecs.put(stageName, stageSpec);
if (AlertPublisher.PLUGIN_TYPE.equals(stageSpec.getPluginType())) {
AlertPublisher alertPublisher = context.newPluginInstance(stageName, macroEvaluator);
alertPublishers.put(stageName, alertPublisher);
}
}
WRAPPERLOGGER.info("Pipeline '{}' running", context.getApplicationSpecification().getName());
}
Aggregations