Search in sources :

Example 1 with PluginContext

use of co.cask.cdap.api.plugin.PluginContext in project cdap by caskdata.

the class SparkStreamingPipelineRunner method getSource.

@Override
protected SparkCollection<RecordInfo<Object>> getSource(StageSpec stageSpec, StageStatisticsCollector collector) throws Exception {
    StreamingSource<Object> source;
    if (checkpointsDisabled) {
        PluginFunctionContext pluginFunctionContext = new PluginFunctionContext(stageSpec, sec, collector);
        source = pluginFunctionContext.createPlugin();
    } else {
        // check for macros in any StreamingSource. If checkpoints are enabled,
        // SparkStreaming will serialize all InputDStreams created in the checkpoint, which means
        // the InputDStream is deserialized directly from the checkpoint instead of instantiated through CDAP.
        // This means there isn't any way for us to perform macro evaluation on sources when they are loaded from
        // checkpoints. We can work around this in all other pipeline stages by dynamically instantiating the
        // plugin in all DStream functions, but can't for InputDStreams because the InputDStream constructor
        // adds itself to the context dag. Yay for constructors with global side effects.
        // TODO: (HYDRATOR-1030) figure out how to do this at configure time instead of run time
        MacroEvaluator macroEvaluator = new ErrorMacroEvaluator("Due to spark limitations, macro evaluation is not allowed in streaming sources when checkpointing " + "is enabled.");
        PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), spec.isStageLoggingEnabled(), spec.isProcessTimingEnabled());
        source = pluginContext.newPluginInstance(stageSpec.getName(), macroEvaluator);
    }
    DataTracer dataTracer = sec.getDataTracer(stageSpec.getName());
    StreamingContext sourceContext = new DefaultStreamingContext(stageSpec, sec, streamingContext);
    JavaDStream<Object> javaDStream = source.getStream(sourceContext);
    if (dataTracer.isEnabled()) {
        // it will create a new function for each RDD, which would limit each RDD but not the entire DStream.
        javaDStream = javaDStream.transform(new LimitingFunction<>(spec.getNumOfRecordsPreview()));
    }
    JavaDStream<RecordInfo<Object>> outputDStream = javaDStream.transform(new CountingTransformFunction<>(stageSpec.getName(), sec.getMetrics(), "records.out", dataTracer)).map(new WrapOutputTransformFunction<>(stageSpec.getName()));
    return new DStreamCollection<>(sec, outputDStream);
}
Also used : PairDStreamCollection(co.cask.cdap.etl.spark.streaming.PairDStreamCollection) DStreamCollection(co.cask.cdap.etl.spark.streaming.DStreamCollection) StreamingContext(co.cask.cdap.etl.api.streaming.StreamingContext) JavaStreamingContext(org.apache.spark.streaming.api.java.JavaStreamingContext) DefaultStreamingContext(co.cask.cdap.etl.spark.streaming.DefaultStreamingContext) MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) PluginContext(co.cask.cdap.api.plugin.PluginContext) RecordInfo(co.cask.cdap.etl.common.RecordInfo) CountingTransformFunction(co.cask.cdap.etl.spark.streaming.function.CountingTransformFunction) DefaultStreamingContext(co.cask.cdap.etl.spark.streaming.DefaultStreamingContext) PluginFunctionContext(co.cask.cdap.etl.spark.function.PluginFunctionContext) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) DataTracer(co.cask.cdap.api.preview.DataTracer) LimitingFunction(co.cask.cdap.etl.spark.streaming.function.preview.LimitingFunction)

Example 2 with PluginContext

use of co.cask.cdap.api.plugin.PluginContext in project cdap by caskdata.

the class StreamingBatchSinkFunction method call.

@Override
public Void call(JavaRDD<T> data, Time batchTime) throws Exception {
    if (data.isEmpty()) {
        return null;
    }
    final long logicalStartTime = batchTime.milliseconds();
    MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getNamespace());
    PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), stageSpec.isStageLoggingEnabled(), stageSpec.isProcessTimingEnabled());
    final SparkBatchSinkFactory sinkFactory = new SparkBatchSinkFactory();
    final String stageName = stageSpec.getName();
    final BatchSink<Object, Object, Object> batchSink = pluginContext.newPluginInstance(stageName, evaluator);
    final PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, logicalStartTime);
    boolean isPrepared = false;
    boolean isDone = false;
    try {
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext datasetContext) throws Exception {
                SparkBatchSinkContext sinkContext = new SparkBatchSinkContext(sinkFactory, sec, datasetContext, pipelineRuntime, stageSpec);
                batchSink.prepareRun(sinkContext);
            }
        });
        isPrepared = true;
        PluginFunctionContext pluginFunctionContext = new PluginFunctionContext(stageSpec, sec, pipelineRuntime.getArguments().asMap(), batchTime.milliseconds(), new NoopStageStatisticsCollector());
        PairFlatMapFunc<T, Object, Object> sinkFunction = new BatchSinkFunction<T, Object, Object>(pluginFunctionContext);
        sinkFactory.writeFromRDD(data.flatMapToPair(Compat.convert(sinkFunction)), sec, stageName, Object.class, Object.class);
        isDone = true;
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext datasetContext) throws Exception {
                SparkBatchSinkContext sinkContext = new SparkBatchSinkContext(sinkFactory, sec, datasetContext, pipelineRuntime, stageSpec);
                batchSink.onRunFinish(true, sinkContext);
            }
        });
    } catch (Exception e) {
        LOG.error("Error writing to sink {} for the batch for time {}.", stageName, logicalStartTime, e);
    } finally {
        if (isPrepared && !isDone) {
            sec.execute(new TxRunnable() {

                @Override
                public void run(DatasetContext datasetContext) throws Exception {
                    SparkBatchSinkContext sinkContext = new SparkBatchSinkContext(sinkFactory, sec, datasetContext, pipelineRuntime, stageSpec);
                    batchSink.onRunFinish(false, sinkContext);
                }
            });
        }
    }
    return null;
}
Also used : NoopStageStatisticsCollector(co.cask.cdap.etl.common.NoopStageStatisticsCollector) MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) PluginContext(co.cask.cdap.api.plugin.PluginContext) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) SparkBatchSinkContext(co.cask.cdap.etl.spark.batch.SparkBatchSinkContext) BatchSinkFunction(co.cask.cdap.etl.spark.function.BatchSinkFunction) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) PluginFunctionContext(co.cask.cdap.etl.spark.function.PluginFunctionContext) SparkBatchSinkFactory(co.cask.cdap.etl.spark.batch.SparkBatchSinkFactory) TxRunnable(co.cask.cdap.api.TxRunnable) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(co.cask.cdap.etl.common.BasicArguments) DatasetContext(co.cask.cdap.api.data.DatasetContext)

Example 3 with PluginContext

use of co.cask.cdap.api.plugin.PluginContext in project cdap by caskdata.

the class StreamingSparkSinkFunction method call.

@Override
public Void call(JavaRDD<T> data, Time batchTime) throws Exception {
    if (data.isEmpty()) {
        return null;
    }
    final long logicalStartTime = batchTime.milliseconds();
    MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getNamespace());
    final PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), stageSpec.isStageLoggingEnabled(), stageSpec.isProcessTimingEnabled());
    final PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, batchTime.milliseconds());
    final String stageName = stageSpec.getName();
    final SparkSink<T> sparkSink = pluginContext.newPluginInstance(stageName, evaluator);
    boolean isPrepared = false;
    boolean isDone = false;
    try {
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext datasetContext) throws Exception {
                SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin());
                sparkSink.prepareRun(context);
            }
        });
        isPrepared = true;
        final SparkExecutionPluginContext sparkExecutionPluginContext = new SparkStreamingExecutionContext(sec, JavaSparkContext.fromSparkContext(data.rdd().context()), logicalStartTime, stageSpec);
        final JavaRDD<T> countedRDD = data.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache();
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext context) throws Exception {
                sparkSink.run(sparkExecutionPluginContext, countedRDD);
            }
        });
        isDone = true;
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext datasetContext) throws Exception {
                SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin());
                sparkSink.onRunFinish(true, context);
            }
        });
    } catch (Exception e) {
        LOG.error("Error while executing sink {} for the batch for time {}.", stageName, logicalStartTime, e);
    } finally {
        if (isPrepared && !isDone) {
            sec.execute(new TxRunnable() {

                @Override
                public void run(DatasetContext datasetContext) throws Exception {
                    SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin());
                    sparkSink.onRunFinish(false, context);
                }
            });
        }
    }
    return null;
}
Also used : MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) BasicSparkPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkPluginContext) SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) PluginContext(co.cask.cdap.api.plugin.PluginContext) SparkPluginContext(co.cask.cdap.etl.api.batch.SparkPluginContext) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) SparkStreamingExecutionContext(co.cask.cdap.etl.spark.streaming.SparkStreamingExecutionContext) CountingFunction(co.cask.cdap.etl.spark.function.CountingFunction) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) TxRunnable(co.cask.cdap.api.TxRunnable) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(co.cask.cdap.etl.common.BasicArguments) DatasetContext(co.cask.cdap.api.data.DatasetContext) BasicSparkPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkPluginContext) SparkPluginContext(co.cask.cdap.etl.api.batch.SparkPluginContext) BasicSparkPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkPluginContext)

Example 4 with PluginContext

use of co.cask.cdap.api.plugin.PluginContext in project cdap by caskdata.

the class PipelineCondition method apply.

@Override
public boolean apply(@Nullable WorkflowContext input) {
    if (input == null) {
        // should not happen
        throw new IllegalStateException("WorkflowContext for the Condition cannot be null.");
    }
    Map<String, String> properties = input.getConditionSpecification().getProperties();
    BatchPhaseSpec phaseSpec = GSON.fromJson(properties.get(Constants.PIPELINEID), BatchPhaseSpec.class);
    PipelinePhase phase = phaseSpec.getPhase();
    StageSpec stageSpec = phase.iterator().next();
    PluginContext pluginContext = new PipelinePluginContext(input, metrics, phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled());
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(input.getToken(), input.getRuntimeArguments()), input.getLogicalStartTime(), input, input.getNamespace());
    try {
        Condition condition = pluginContext.newPluginInstance(stageSpec.getName(), macroEvaluator);
        PipelineRuntime pipelineRuntime = new PipelineRuntime(input, metrics);
        ConditionContext conditionContext = new BasicConditionContext(input, pipelineRuntime, stageSpec);
        boolean result = condition.apply(conditionContext);
        WorkflowToken token = input.getToken();
        if (token == null) {
            throw new IllegalStateException("WorkflowToken cannot be null when Condition is executed through Workflow.");
        }
        for (Map.Entry<String, String> entry : pipelineRuntime.getArguments().getAddedArguments().entrySet()) {
            token.put(entry.getKey(), entry.getValue());
        }
        return result;
    } catch (Exception e) {
        String msg = String.format("Error executing condition '%s' in the pipeline.", stageSpec.getName());
        throw new RuntimeException(msg, e);
    }
}
Also used : Condition(co.cask.cdap.etl.api.condition.Condition) AbstractCondition(co.cask.cdap.api.workflow.AbstractCondition) MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) PluginContext(co.cask.cdap.api.plugin.PluginContext) PipelinePluginContext(co.cask.cdap.etl.common.plugin.PipelinePluginContext) WorkflowToken(co.cask.cdap.api.workflow.WorkflowToken) ConditionContext(co.cask.cdap.etl.api.condition.ConditionContext) PipelinePhase(co.cask.cdap.etl.common.PipelinePhase) StageSpec(co.cask.cdap.etl.spec.StageSpec) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) BatchPhaseSpec(co.cask.cdap.etl.batch.BatchPhaseSpec) BasicArguments(co.cask.cdap.etl.common.BasicArguments) HashMap(java.util.HashMap) Map(java.util.Map) PipelinePluginContext(co.cask.cdap.etl.common.plugin.PipelinePluginContext)

Example 5 with PluginContext

use of co.cask.cdap.api.plugin.PluginContext in project cdap by caskdata.

the class PipelineAction method run.

@Override
public void run() throws Exception {
    CustomActionContext context = getContext();
    Map<String, String> properties = context.getSpecification().getProperties();
    BatchPhaseSpec phaseSpec = GSON.fromJson(properties.get(Constants.PIPELINEID), BatchPhaseSpec.class);
    PipelinePhase phase = phaseSpec.getPhase();
    StageSpec stageSpec = phase.iterator().next();
    PluginContext pluginContext = new PipelinePluginContext(context, metrics, phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled());
    PipelineRuntime pipelineRuntime = new PipelineRuntime(context, metrics);
    Action action = pluginContext.newPluginInstance(stageSpec.getName(), new DefaultMacroEvaluator(pipelineRuntime.getArguments(), context.getLogicalStartTime(), context, context.getNamespace()));
    ActionContext actionContext = new BasicActionContext(context, pipelineRuntime, stageSpec);
    if (!context.getDataTracer(stageSpec.getName()).isEnabled()) {
        action.run(actionContext);
    }
    WorkflowToken token = context.getWorkflowToken();
    if (token == null) {
        throw new IllegalStateException("WorkflowToken cannot be null when action is executed through Workflow.");
    }
    for (Map.Entry<String, String> entry : pipelineRuntime.getArguments().getAddedArguments().entrySet()) {
        token.put(entry.getKey(), entry.getValue());
    }
}
Also used : Action(co.cask.cdap.etl.api.action.Action) CustomAction(co.cask.cdap.api.customaction.CustomAction) AbstractCustomAction(co.cask.cdap.api.customaction.AbstractCustomAction) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) PipelinePluginContext(co.cask.cdap.etl.common.plugin.PipelinePluginContext) PluginContext(co.cask.cdap.api.plugin.PluginContext) WorkflowToken(co.cask.cdap.api.workflow.WorkflowToken) CustomActionContext(co.cask.cdap.api.customaction.CustomActionContext) ActionContext(co.cask.cdap.etl.api.action.ActionContext) PipelinePhase(co.cask.cdap.etl.common.PipelinePhase) StageSpec(co.cask.cdap.etl.spec.StageSpec) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) CustomActionContext(co.cask.cdap.api.customaction.CustomActionContext) BatchPhaseSpec(co.cask.cdap.etl.batch.BatchPhaseSpec) HashMap(java.util.HashMap) Map(java.util.Map) PipelinePluginContext(co.cask.cdap.etl.common.plugin.PipelinePluginContext)

Aggregations

PluginContext (co.cask.cdap.api.plugin.PluginContext)10 MacroEvaluator (co.cask.cdap.api.macro.MacroEvaluator)8 DefaultMacroEvaluator (co.cask.cdap.etl.common.DefaultMacroEvaluator)7 PipelineRuntime (co.cask.cdap.etl.common.PipelineRuntime)7 SparkPipelinePluginContext (co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext)6 BasicArguments (co.cask.cdap.etl.common.BasicArguments)5 StageSpec (co.cask.cdap.etl.spec.StageSpec)4 HashMap (java.util.HashMap)4 Map (java.util.Map)4 DatasetContext (co.cask.cdap.api.data.DatasetContext)3 WorkflowToken (co.cask.cdap.api.workflow.WorkflowToken)3 BatchPhaseSpec (co.cask.cdap.etl.batch.BatchPhaseSpec)3 TxRunnable (co.cask.cdap.api.TxRunnable)2 DataTracer (co.cask.cdap.api.preview.DataTracer)2 AlertPublisher (co.cask.cdap.etl.api.AlertPublisher)2 SparkPluginContext (co.cask.cdap.etl.api.batch.SparkPluginContext)2 StreamingContext (co.cask.cdap.etl.api.streaming.StreamingContext)2 PipelinePhase (co.cask.cdap.etl.common.PipelinePhase)2 PipelinePluginContext (co.cask.cdap.etl.common.plugin.PipelinePluginContext)2 SparkPipelineRuntime (co.cask.cdap.etl.spark.SparkPipelineRuntime)2