Search in sources :

Example 1 with DatasetContext

use of io.cdap.cdap.api.data.DatasetContext in project cdap by caskdata.

the class NoSQLTransactionals method createTransactional.

/**
 * Create a transactional for an entity table. The regular {@link io.cdap.cdap.api.Transactionals} class cannot be
 * used due to cyclic dependency between dataset service and NoSQL StructuredTable.
 *
 * @param txClient transaction client
 * @param datasetSupplier supplies the dataset for the entity table
 * @return transactional for the entity table
 */
public static Transactional createTransactional(TransactionSystemClient txClient, TableDatasetSupplier datasetSupplier) {
    return new Transactional() {

        @Override
        public void execute(io.cdap.cdap.api.TxRunnable runnable) throws TransactionFailureException {
            TransactionContext txContext = new TransactionContext(txClient);
            try (EntityTableDatasetContext datasetContext = new EntityTableDatasetContext(txContext, datasetSupplier)) {
                txContext.start();
                finishExecute(txContext, datasetContext, runnable);
            } catch (Exception e) {
                Throwables.propagateIfPossible(e, TransactionFailureException.class);
            }
        }

        @Override
        public void execute(int timeout, io.cdap.cdap.api.TxRunnable runnable) throws TransactionFailureException {
            TransactionContext txContext = new TransactionContext(txClient);
            try (EntityTableDatasetContext datasetContext = new EntityTableDatasetContext(txContext, datasetSupplier)) {
                txContext.start(timeout);
                finishExecute(txContext, datasetContext, runnable);
            } catch (Exception e) {
                Throwables.propagateIfPossible(e, TransactionFailureException.class);
            }
        }

        private void finishExecute(TransactionContext txContext, DatasetContext dsContext, io.cdap.cdap.api.TxRunnable runnable) throws TransactionFailureException {
            try {
                runnable.run(dsContext);
            } catch (Exception e) {
                txContext.abort(new TransactionFailureException("Exception raised from TxRunnable.run() " + runnable, e));
            }
            // The call the txContext.abort above will always have exception thrown
            // Hence we'll only reach here if and only if the runnable.run() returns normally.
            txContext.finish();
        }
    };
}
Also used : TransactionFailureException(org.apache.tephra.TransactionFailureException) TransactionContext(org.apache.tephra.TransactionContext) DatasetContext(io.cdap.cdap.api.data.DatasetContext) TransactionFailureException(org.apache.tephra.TransactionFailureException) Transactional(io.cdap.cdap.api.Transactional)

Example 2 with DatasetContext

use of io.cdap.cdap.api.data.DatasetContext in project cdap by caskdata.

the class HiveExploreStructuredRecordTestRun method start.

@BeforeClass
public static void start() throws Exception {
    initialize(tmpFolder);
    DatasetModuleId moduleId = NAMESPACE_ID.datasetModule("email");
    datasetFramework.addModule(moduleId, new EmailTableDefinition.EmailTableModule());
    datasetFramework.addInstance("email", MY_TABLE, DatasetProperties.EMPTY);
    transactional = Transactions.createTransactional(new MultiThreadDatasetCache(new SystemDatasetInstantiator(datasetFramework), transactionSystemClient, NAMESPACE_ID, Collections.<String, String>emptyMap(), null, null));
    transactional.execute(new TxRunnable() {

        @Override
        public void run(DatasetContext context) throws Exception {
            // Accessing dataset instance to perform data operations
            EmailTableDefinition.EmailTable table = context.getDataset(MY_TABLE.getDataset());
            Assert.assertNotNull(table);
            table.writeEmail("email1", "this is the subject", "this is the body", "sljackson@boss.com");
        }
    });
    datasetFramework.addModule(NAMESPACE_ID.datasetModule("TableWrapper"), new TableWrapperDefinition.Module());
}
Also used : DatasetModuleId(io.cdap.cdap.proto.id.DatasetModuleId) MultiThreadDatasetCache(io.cdap.cdap.data2.dataset2.MultiThreadDatasetCache) SystemDatasetInstantiator(io.cdap.cdap.data.dataset.SystemDatasetInstantiator) TxRunnable(io.cdap.cdap.api.TxRunnable) TableWrapperDefinition(io.cdap.cdap.explore.service.datasets.TableWrapperDefinition) EmailTableDefinition(io.cdap.cdap.explore.service.datasets.EmailTableDefinition) DatasetContext(io.cdap.cdap.api.data.DatasetContext) BeforeClass(org.junit.BeforeClass)

Example 3 with DatasetContext

use of io.cdap.cdap.api.data.DatasetContext in project cdap by caskdata.

the class TxLookupProvider method executeLookup.

@Nullable
private <T, R> R executeLookup(final String table, final Map<String, String> arguments, final Function<Lookup<T>, R> func) {
    final AtomicReference<R> result = new AtomicReference<>();
    Transactionals.execute(tx, new TxRunnable() {

        @Override
        public void run(DatasetContext context) throws Exception {
            Lookup<T> lookup = getLookup(table, context.getDataset(table, arguments));
            result.set(func.apply(lookup));
        }
    });
    return result.get();
}
Also used : TxRunnable(io.cdap.cdap.api.TxRunnable) AtomicReference(java.util.concurrent.atomic.AtomicReference) Lookup(io.cdap.cdap.etl.api.Lookup) DatasetContext(io.cdap.cdap.api.data.DatasetContext) Nullable(javax.annotation.Nullable)

Example 4 with DatasetContext

use of io.cdap.cdap.api.data.DatasetContext in project cdap by caskdata.

the class DStreamCollection method compute.

@Override
public <U> SparkCollection<U> compute(StageSpec stageSpec, SparkCompute<T, U> compute) throws Exception {
    SparkCompute<T, U> wrappedCompute = new DynamicSparkCompute<>(new DynamicDriverContext(stageSpec, sec, new NoopStageStatisticsCollector()), compute);
    Transactionals.execute(sec, new TxRunnable() {

        @Override
        public void run(DatasetContext datasetContext) throws Exception {
            PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
            SparkExecutionPluginContext sparkPluginContext = new BasicSparkExecutionPluginContext(sec, JavaSparkContext.fromSparkContext(stream.context().sparkContext()), datasetContext, pipelineRuntime, stageSpec);
            wrappedCompute.initialize(sparkPluginContext);
        }
    }, Exception.class);
    return wrap(stream.transform(new ComputeTransformFunction<>(sec, stageSpec, wrappedCompute)));
}
Also used : DynamicSparkCompute(io.cdap.cdap.etl.spark.streaming.function.DynamicSparkCompute) NoopStageStatisticsCollector(io.cdap.cdap.etl.common.NoopStageStatisticsCollector) ComputeTransformFunction(io.cdap.cdap.etl.spark.streaming.function.ComputeTransformFunction) PipelineRuntime(io.cdap.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(io.cdap.cdap.etl.spark.SparkPipelineRuntime) SparkPipelineRuntime(io.cdap.cdap.etl.spark.SparkPipelineRuntime) BasicSparkExecutionPluginContext(io.cdap.cdap.etl.spark.batch.BasicSparkExecutionPluginContext) BasicSparkExecutionPluginContext(io.cdap.cdap.etl.spark.batch.BasicSparkExecutionPluginContext) SparkExecutionPluginContext(io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext) TxRunnable(io.cdap.cdap.api.TxRunnable) DatasetContext(io.cdap.cdap.api.data.DatasetContext)

Example 5 with DatasetContext

use of io.cdap.cdap.api.data.DatasetContext in project cdap by caskdata.

the class StreamingMultiSinkFunction method call.

@Override
public void call(JavaRDD<RecordInfo<Object>> data, Time batchTime) throws Exception {
    long logicalStartTime = batchTime.milliseconds();
    MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), logicalStartTime, sec.getSecureStore(), sec.getServiceDiscoverer(), sec.getNamespace());
    PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), phaseSpec.isStageLoggingEnabled(), phaseSpec.isProcessTimingEnabled());
    SparkBatchSinkFactory sinkFactory = new SparkBatchSinkFactory();
    PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, logicalStartTime);
    Map<String, SubmitterLifecycle<?>> stages = createStages(evaluator);
    // call prepareRun() on all the stages in the group
    // need to call it in an order that guarantees that inputs are called before outputs
    // this is because plugins can call getArguments().set() in the prepareRun() method,
    // which downstream stages should be able to read
    List<String> traversalOrder = new ArrayList(group.size());
    for (String stageName : phaseSpec.getPhase().getDag().getTopologicalOrder()) {
        if (group.contains(stageName)) {
            traversalOrder.add(stageName);
        }
    }
    for (String stageName : traversalOrder) {
        SubmitterLifecycle<?> plugin = stages.get(stageName);
        StageSpec stageSpec = phaseSpec.getPhase().getStage(stageName);
        try {
            prepareRun(pipelineRuntime, sinkFactory, stageSpec, plugin);
        } catch (Exception e) {
            LOG.error("Error preparing sink {} for the batch for time {}.", stageName, logicalStartTime, e);
            return;
        }
    }
    // run the actual transforms and sinks in this group
    boolean ranSuccessfully = true;
    try {
        MultiSinkFunction multiSinkFunction = new MultiSinkFunction(sec, phaseSpec, group, collectors);
        Set<String> outputNames = sinkFactory.writeCombinedRDD(data.flatMapToPair(multiSinkFunction), sec, sinkNames);
        sec.execute(new TxRunnable() {

            @Override
            public void run(DatasetContext context) throws Exception {
                for (String outputName : outputNames) {
                    ExternalDatasets.registerLineage(sec.getAdmin(), outputName, AccessType.WRITE, null, () -> context.getDataset(outputName));
                }
            }
        });
    } catch (Exception e) {
        LOG.error("Error writing to sinks {} for the batch for time {}.", sinkNames, logicalStartTime, e);
        ranSuccessfully = false;
    }
    // run onRunFinish() for each sink
    for (String stageName : traversalOrder) {
        SubmitterLifecycle<?> plugin = stages.get(stageName);
        StageSpec stageSpec = phaseSpec.getPhase().getStage(stageName);
        try {
            onRunFinish(pipelineRuntime, sinkFactory, stageSpec, plugin, ranSuccessfully);
        } catch (Exception e) {
            LOG.warn("Unable to execute onRunFinish for sink {}", stageName, e);
        }
    }
}
Also used : SubmitterLifecycle(io.cdap.cdap.etl.api.SubmitterLifecycle) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) PipelineRuntime(io.cdap.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(io.cdap.cdap.etl.spark.SparkPipelineRuntime) SparkPipelinePluginContext(io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext) PluginContext(io.cdap.cdap.api.plugin.PluginContext) SparkPipelineRuntime(io.cdap.cdap.etl.spark.SparkPipelineRuntime) ArrayList(java.util.ArrayList) MultiSinkFunction(io.cdap.cdap.etl.spark.function.MultiSinkFunction) TransactionFailureException(org.apache.tephra.TransactionFailureException) SparkPipelinePluginContext(io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext) SparkBatchSinkFactory(io.cdap.cdap.etl.spark.batch.SparkBatchSinkFactory) TxRunnable(io.cdap.cdap.api.TxRunnable) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) DatasetContext(io.cdap.cdap.api.data.DatasetContext)

Aggregations

DatasetContext (io.cdap.cdap.api.data.DatasetContext)16 TxRunnable (io.cdap.cdap.api.TxRunnable)15 PipelineRuntime (io.cdap.cdap.etl.common.PipelineRuntime)5 SparkPipelineRuntime (io.cdap.cdap.etl.spark.SparkPipelineRuntime)5 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)4 Put (io.cdap.cdap.api.dataset.table.Put)3 Table (io.cdap.cdap.api.dataset.table.Table)3 MacroEvaluator (io.cdap.cdap.api.macro.MacroEvaluator)3 PluginContext (io.cdap.cdap.api.plugin.PluginContext)3 SparkExecutionPluginContext (io.cdap.cdap.etl.api.batch.SparkExecutionPluginContext)3 BasicArguments (io.cdap.cdap.etl.common.BasicArguments)3 DefaultMacroEvaluator (io.cdap.cdap.etl.common.DefaultMacroEvaluator)3 StageSpec (io.cdap.cdap.etl.proto.v2.spec.StageSpec)3 SparkPipelinePluginContext (io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext)3 TransactionFailureException (org.apache.tephra.TransactionFailureException)3 FileSet (io.cdap.cdap.api.dataset.lib.FileSet)2 NoopStageStatisticsCollector (io.cdap.cdap.etl.common.NoopStageStatisticsCollector)2 BasicSparkExecutionPluginContext (io.cdap.cdap.etl.spark.batch.BasicSparkExecutionPluginContext)2 SparkBatchSinkFactory (io.cdap.cdap.etl.spark.batch.SparkBatchSinkFactory)2 PluginFunctionContext (io.cdap.cdap.etl.spark.function.PluginFunctionContext)2