Search in sources :

Example 1 with MultiSinkFunction

use of io.cdap.cdap.etl.spark.function.MultiSinkFunction in project cdap by caskdata.

the class BaseRDDCollection method createMultiStoreTask.

@Override
public Runnable createMultiStoreTask(PhaseSpec phaseSpec, Set<String> group, Set<String> sinks, Map<String, StageStatisticsCollector> collectors) {
    return new Runnable() {

        @Override
        public void run() {
            PairFlatMapFunction<T, String, KeyValue<Object, Object>> multiSinkFunction = (PairFlatMapFunction<T, String, KeyValue<Object, Object>>) new MultiSinkFunction(sec, phaseSpec, group, collectors);
            JavaPairRDD<String, KeyValue<Object, Object>> taggedOutput = rdd.flatMapToPair(multiSinkFunction);
            for (String outputName : sinkFactory.writeCombinedRDD(taggedOutput, sec, sinks)) {
                recordLineage(outputName);
            }
        }
    };
}
Also used : KeyValue(io.cdap.cdap.api.dataset.lib.KeyValue) PairFlatMapFunction(org.apache.spark.api.java.function.PairFlatMapFunction) MultiSinkFunction(io.cdap.cdap.etl.spark.function.MultiSinkFunction)

Example 2 with MultiSinkFunction

use of io.cdap.cdap.etl.spark.function.MultiSinkFunction in project cdap by caskdata.

the class StreamingMultiSinkFunction method call.

@Override
public void call(JavaRDD<RecordInfo<Object>> data, Time batchTime) throws Exception {
    long logicalStartTime = batchTime.milliseconds();
    MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getServiceDiscoverer(), sec.getNamespace());
    PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled());
    SparkBatchSinkFactory sinkFactory = new SparkBatchSinkFactory();
    PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, logicalStartTime);
    Map<String, SubmitterLifecycle<?>> stages = createStages(evaluator);
    // call prepareRun() on all the stages in the group
    // need to call it in an order that guarantees that inputs are called before outputs
    // this is because plugins can call getArguments().set() in the prepareRun() method,
    // which downstream stages should be able to read
    List<String> traversalOrder = new ArrayList(group.size());
    for (String stageName : phaseSpec.getPhase().getDag().getTopologicalOrder()) {
        if (group.contains(stageName)) {
            traversalOrder.add(stageName);
        }
    }
    for (String stageName : traversalOrder) {
        SubmitterLifecycle<?> plugin = stages.get(stageName);
        StageSpec stageSpec = phaseSpec.getPhase().getStage(stageName);
        try {
            prepareRun(pipelineRuntime, sinkFactory, stageSpec, plugin);
        } catch (Exception e) {
            LOG.error("Error preparing sink {} for the batch for time {}.", stageName, logicalStartTime, e);
            return;
        }
    }
    // run the actual transforms and sinks in this group
    boolean ranSuccessfully = true;
    try {
        MultiSinkFunction multiSinkFunction = new MultiSinkFunction(sec, phaseSpec, group, collectors);
        Set<String> outputNames = sinkFactory.writeCombinedRDD(data.flatMapToPair(multiSinkFunction), sec, sinkNames);
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext context) throws Exception {
                for (String outputName : outputNames) {
                    ExternalDatasets.registerLineage(sec.getAdmin(), outputName, AccessType.WRITE, null, () -> context.getDataset(outputName));
                }
            }
        });
    } catch (Exception e) {
        LOG.error("Error writing to sinks {} for the batch for time {}.", sinkNames, logicalStartTime, e);
        ranSuccessfully = false;
    }
    // run onRunFinish() for each sink
    for (String stageName : traversalOrder) {
        SubmitterLifecycle<?> plugin = stages.get(stageName);
        StageSpec stageSpec = phaseSpec.getPhase().getStage(stageName);
        try {
            onRunFinish(pipelineRuntime, sinkFactory, stageSpec, plugin, ranSuccessfully);
        } catch (Exception e) {
            LOG.warn("Unable to execute onRunFinish for sink {}", stageName, e);
        }
    }
}
Also used : SubmitterLifecycle(io.cdap.cdap.etl.api.SubmitterLifecycle) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) PipelineRuntime(io.cdap.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(io.cdap.cdap.etl.spark.SparkPipelineRuntime) SparkPipelinePluginContext(io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext) PluginContext(io.cdap.cdap.api.plugin.PluginContext) SparkPipelineRuntime(io.cdap.cdap.etl.spark.SparkPipelineRuntime) ArrayList(java.util.ArrayList) MultiSinkFunction(io.cdap.cdap.etl.spark.function.MultiSinkFunction) TransactionFailureException(org.apache.tephra.TransactionFailureException) SparkPipelinePluginContext(io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext) SparkBatchSinkFactory(io.cdap.cdap.etl.spark.batch.SparkBatchSinkFactory) TxRunnable(io.cdap.cdap.api.TxRunnable) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) DatasetContext(io.cdap.cdap.api.data.DatasetContext)

Aggregations

MultiSinkFunction (io.cdap.cdap.etl.spark.function.MultiSinkFunction)2 TxRunnable (io.cdap.cdap.api.TxRunnable)1 DatasetContext (io.cdap.cdap.api.data.DatasetContext)1 KeyValue (io.cdap.cdap.api.dataset.lib.KeyValue)1 MacroEvaluator (io.cdap.cdap.api.macro.MacroEvaluator)1 PluginContext (io.cdap.cdap.api.plugin.PluginContext)1 SubmitterLifecycle (io.cdap.cdap.etl.api.SubmitterLifecycle)1 BasicArguments (io.cdap.cdap.etl.common.BasicArguments)1 DefaultMacroEvaluator (io.cdap.cdap.etl.common.DefaultMacroEvaluator)1 PipelineRuntime (io.cdap.cdap.etl.common.PipelineRuntime)1 StageSpec (io.cdap.cdap.etl.proto.v2.spec.StageSpec)1 SparkPipelineRuntime (io.cdap.cdap.etl.spark.SparkPipelineRuntime)1 SparkBatchSinkFactory (io.cdap.cdap.etl.spark.batch.SparkBatchSinkFactory)1 SparkPipelinePluginContext (io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext)1 ArrayList (java.util.ArrayList)1 PairFlatMapFunction (org.apache.spark.api.java.function.PairFlatMapFunction)1 TransactionFailureException (org.apache.tephra.TransactionFailureException)1