use of io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.
the class BaseRDDCollection method createStoreTask.
@Override
public Runnable createStoreTask(final StageSpec stageSpec, final SparkSink<T> sink) throws Exception {
return new Runnable() {
@Override
public void run() {
String stageName = stageSpec.getName();
PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec);
JavaRDD<T> countedRDD = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), Constants.Metrics.RECORDS_IN, null));
SparkConf sparkConf = jsc.getConf();
try {
sink.run(sparkPluginContext, countedRDD);
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
};
}
use of io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.
the class BaseRDDCollection method compute.
@Override
public <U> SparkCollection<U> compute(StageSpec stageSpec, SparkCompute<T, U> compute) throws Exception {
String stageName = stageSpec.getName();
PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec);
compute.initialize(sparkPluginContext);
JavaRDD<T> countedInput = rdd.map(new CountingFunction<T>(stageName, sec.getMetrics(), Constants.Metrics.RECORDS_IN, null));
SparkConf sparkConf = jsc.getConf();
return wrap(compute.transform(sparkPluginContext, countedInput).map(new CountingFunction<U>(stageName, sec.getMetrics(), Constants.Metrics.RECORDS_OUT, sec.getDataTracer(stageName))));
}
use of io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.
the class DStreamCollection method compute.
@Override
public <U> SparkCollection<U> compute(StageSpec stageSpec, SparkCompute<T, U> compute) throws Exception {
SparkCompute<T, U> wrappedCompute = new DynamicSparkCompute<>(new DynamicDriverContext(stageSpec, sec, new NoopStageStatisticsCollector()), compute);
Transactionals.execute(sec, new TxRunnable() {
@Override
public void run(DatasetContext datasetContext) throws Exception {
PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, JavaSparkContext.fromSparkContext(stream.context().sparkContext()), datasetContext, pipelineRuntime, stageSpec);
wrappedCompute.initialize(sparkPluginContext);
}
}, Exception.class);
return wrap(stream.transform(new ComputeTransformFunction<>(sec, stageSpec, wrappedCompute)));
}
use of io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.
the class ComputeTransformFunction method call.
@Override
public JavaRDD<U> call(JavaRDD<T> data, Time batchTime) throws Exception {
SparkExecutionPluginContext sparkPluginContext = new SparkStreamingExecutionContext(sec, JavaSparkContext.fromSparkContext(data.context()), batchTime.milliseconds(), stageSpec);
String stageName = stageSpec.getName();
data = data.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null));
return compute.transform(sparkPluginContext, data).map(new CountingFunction<U>(stageName, sec.getMetrics(), "records.out", sec.getDataTracer(stageName)));
}
use of io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext in project cdap by caskdata.
the class DynamicSparkCompute method lazyInit.
// when checkpointing is enabled, and Spark is loading DStream operations from an existing checkpoint,
// delegate will be null and the initialize() method won't have been called. So we need to instantiate
// the delegate and initialize it.
private void lazyInit(final JavaSparkContext jsc) throws Exception {
if (delegate == null) {
PluginFunctionContext pluginFunctionContext = dynamicDriverContext.getPluginFunctionContext();
delegate = pluginFunctionContext.createPlugin();
final StageSpec stageSpec = pluginFunctionContext.getStageSpec();
final JavaSparkExecutionContext sec = dynamicDriverContext.getSparkExecutionContext();
Transactionals.execute(sec, new TxRunnable() {
@Override
public void run(DatasetContext datasetContext) throws Exception {
PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, jsc, datasetContext, pipelineRuntime, stageSpec);
delegate.initialize(sparkPluginContext);
}
}, Exception.class);
}
}
Aggregations