Search in sources :

Example 1 with SparkExecutionPluginContext

use of co.cask.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.

the class RDDCollection method store.

@Override
public void store(StageInfo stageInfo, SparkSink<T> sink) throws Exception {
    String stageName = stageInfo.getName();
    SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, stageInfo);
    JavaRDD<T> countedRDD = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache();
    sink.run(sparkPluginContext, countedRDD);
}
Also used : SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) CountingFunction(co.cask.cdap.etl.spark.function.CountingFunction)

Example 2 with SparkExecutionPluginContext

use of co.cask.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.

the class DStreamCollection method compute.

@Override
public <U> SparkCollection<U> compute(final StageSpec stageSpec, SparkCompute<T, U> compute) throws Exception {
    final SparkCompute<T, U> wrappedCompute = new DynamicSparkCompute<>(new DynamicDriverContext(stageSpec, sec, new NoopStageStatisticsCollector()), compute);
    Transactionals.execute(sec, new TxRunnable() {

        @Override
        public void run(DatasetContext datasetContext) throws Exception {
            PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
            SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, JavaSparkContext.fromSparkContext(stream.context().sparkContext()), datasetContext, pipelineRuntime, stageSpec);
            wrappedCompute.initialize(sparkPluginContext);
        }
    }, Exception.class);
    return wrap(stream.transform(new ComputeTransformFunction<>(sec, stageSpec, wrappedCompute)));
}
Also used : DynamicSparkCompute(co.cask.cdap.etl.spark.streaming.function.DynamicSparkCompute) NoopStageStatisticsCollector(co.cask.cdap.etl.common.NoopStageStatisticsCollector) ComputeTransformFunction(co.cask.cdap.etl.spark.streaming.function.ComputeTransformFunction) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) BasicSparkExecutionPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkExecutionPluginContext) SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) BasicSparkExecutionPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkExecutionPluginContext) TxRunnable(co.cask.cdap.api.TxRunnable) DatasetContext(co.cask.cdap.api.data.DatasetContext)

Example 3 with SparkExecutionPluginContext

use of co.cask.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.

the class StreamingSparkSinkFunction method call.

@Override
public Void call(JavaRDD<T> data, Time batchTime) throws Exception {
    if (data.isEmpty()) {
        return null;
    }
    final long logicalStartTime = batchTime.milliseconds();
    MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getNamespace());
    final PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), stageSpec.isStageLoggingEnabled(), stageSpec.isProcessTimingEnabled());
    final PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, batchTime.milliseconds());
    final String stageName = stageSpec.getName();
    final SparkSink<T> sparkSink = pluginContext.newPluginInstance(stageName, evaluator);
    boolean isPrepared = false;
    boolean isDone = false;
    try {
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext datasetContext) throws Exception {
                SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin());
                sparkSink.prepareRun(context);
            }
        });
        isPrepared = true;
        final SparkExecutionPluginContext sparkExecutionPluginContext = new SparkStreamingExecutionContext(sec, JavaSparkContext.fromSparkContext(data.rdd().context()), logicalStartTime, stageSpec);
        final JavaRDD<T> countedRDD = data.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache();
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext context) throws Exception {
                sparkSink.run(sparkExecutionPluginContext, countedRDD);
            }
        });
        isDone = true;
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext datasetContext) throws Exception {
                SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin());
                sparkSink.onRunFinish(true, context);
            }
        });
    } catch (Exception e) {
        LOG.error("Error while executing sink {} for the batch for time {}.", stageName, logicalStartTime, e);
    } finally {
        if (isPrepared && !isDone) {
            sec.execute(new TxRunnable() {

                @Override
                public void run(DatasetContext datasetContext) throws Exception {
                    SparkPluginContext context = new BasicSparkPluginContext(null, pipelineRuntime, stageSpec, datasetContext, sec.getAdmin());
                    sparkSink.onRunFinish(false, context);
                }
            });
        }
    }
    return null;
}
Also used : MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) BasicSparkPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkPluginContext) SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) PluginContext(co.cask.cdap.api.plugin.PluginContext) SparkPluginContext(co.cask.cdap.etl.api.batch.SparkPluginContext) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) SparkStreamingExecutionContext(co.cask.cdap.etl.spark.streaming.SparkStreamingExecutionContext) CountingFunction(co.cask.cdap.etl.spark.function.CountingFunction) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) TxRunnable(co.cask.cdap.api.TxRunnable) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(co.cask.cdap.etl.common.BasicArguments) DatasetContext(co.cask.cdap.api.data.DatasetContext) BasicSparkPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkPluginContext) SparkPluginContext(co.cask.cdap.etl.api.batch.SparkPluginContext) BasicSparkPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkPluginContext)

Example 4 with SparkExecutionPluginContext

use of co.cask.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.

the class RDDCollection method compute.

@Override
public <U> SparkCollection<U> compute(StageInfo stageInfo, SparkCompute<T, U> compute) throws Exception {
    String stageName = stageInfo.getName();
    SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, stageInfo);
    compute.initialize(sparkPluginContext);
    JavaRDD<T> countedInput = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache();
    return wrap(compute.transform(sparkPluginContext, countedInput).map(new CountingFunction<U>(stageName, sec.getMetrics(), "records.out", sec.getDataTracer(stageName))));
}
Also used : SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) CountingFunction(co.cask.cdap.etl.spark.function.CountingFunction)

Example 5 with SparkExecutionPluginContext

use of co.cask.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.

the class DStreamCollection method compute.

@Override
public <U> SparkCollection<U> compute(final StageInfo stageInfo, SparkCompute<T, U> compute) throws Exception {
    final SparkCompute<T, U> wrappedCompute = new DynamicSparkCompute<>(new DynamicDriverContext(stageInfo, sec), compute);
    Transactionals.execute(sec, new TxRunnable() {

        @Override
        public void run(DatasetContext datasetContext) throws Exception {
            SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, JavaSparkContext.fromSparkContext(stream.context().sparkContext()), datasetContext, stageInfo);
            wrappedCompute.initialize(sparkPluginContext);
        }
    }, Exception.class);
    return wrap(stream.transform(new ComputeTransformFunction<>(sec, stageInfo, wrappedCompute)));
}
Also used : DynamicSparkCompute(co.cask.cdap.etl.spark.streaming.function.DynamicSparkCompute) BasicSparkExecutionPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkExecutionPluginContext) SparkExecutionPluginContext(co.cask.cdap.etl.api.batch.SparkExecutionPluginContext) BasicSparkExecutionPluginContext(co.cask.cdap.etl.spark.batch.BasicSparkExecutionPluginContext) ComputeTransformFunction(co.cask.cdap.etl.spark.streaming.function.ComputeTransformFunction) TxRunnable(co.cask.cdap.api.TxRunnable) DatasetContext(co.cask.cdap.api.data.DatasetContext) TransactionFailureException(org.apache.tephra.TransactionFailureException)

Aggregations

SparkExecutionPluginContext (co.cask.cdap.etl.api.batch.SparkExecutionPluginContext)9 CountingFunction (co.cask.cdap.etl.spark.function.CountingFunction)6 PipelineRuntime (co.cask.cdap.etl.common.PipelineRuntime)5 SparkPipelineRuntime (co.cask.cdap.etl.spark.SparkPipelineRuntime)5 TxRunnable (co.cask.cdap.api.TxRunnable)4 DatasetContext (co.cask.cdap.api.data.DatasetContext)4 BasicSparkExecutionPluginContext (co.cask.cdap.etl.spark.batch.BasicSparkExecutionPluginContext)3 SparkStreamingExecutionContext (co.cask.cdap.etl.spark.streaming.SparkStreamingExecutionContext)2 ComputeTransformFunction (co.cask.cdap.etl.spark.streaming.function.ComputeTransformFunction)2 DynamicSparkCompute (co.cask.cdap.etl.spark.streaming.function.DynamicSparkCompute)2 MacroEvaluator (co.cask.cdap.api.macro.MacroEvaluator)1 PluginContext (co.cask.cdap.api.plugin.PluginContext)1 JavaSparkExecutionContext (co.cask.cdap.api.spark.JavaSparkExecutionContext)1 SparkPluginContext (co.cask.cdap.etl.api.batch.SparkPluginContext)1 BasicArguments (co.cask.cdap.etl.common.BasicArguments)1 DefaultMacroEvaluator (co.cask.cdap.etl.common.DefaultMacroEvaluator)1 NoopStageStatisticsCollector (co.cask.cdap.etl.common.NoopStageStatisticsCollector)1 BasicSparkPluginContext (co.cask.cdap.etl.spark.batch.BasicSparkPluginContext)1 PluginFunctionContext (co.cask.cdap.etl.spark.function.PluginFunctionContext)1 SparkPipelinePluginContext (co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext)1