Search in sources :

Example 6 with DefaultStageMetrics

use of co.cask.cdap.etl.common.DefaultStageMetrics in project cdap by caskdata.

the class RDDCollection method publishAlerts.

@Override
public void publishAlerts(StageSpec stageSpec, StageStatisticsCollector collector) throws Exception {
    PluginFunctionContext pluginFunctionContext = new PluginFunctionContext(stageSpec, sec, collector);
    AlertPublisher alertPublisher = pluginFunctionContext.createPlugin();
    PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec);
    AlertPublisherContext alertPublisherContext = new DefaultAlertPublisherContext(pipelineRuntime, stageSpec, sec.getMessagingContext(), sec.getAdmin());
    alertPublisher.initialize(alertPublisherContext);
    StageMetrics stageMetrics = new DefaultStageMetrics(sec.getMetrics(), stageSpec.getName());
    TrackedIterator<Alert> trackedAlerts = new TrackedIterator<>(((JavaRDD<Alert>) rdd).collect().iterator(), stageMetrics, Constants.Metrics.RECORDS_IN);
    alertPublisher.publish(trackedAlerts);
    alertPublisher.destroy();
}
Also used : PluginFunctionContext(co.cask.cdap.etl.spark.function.PluginFunctionContext) AlertPublisher(co.cask.cdap.etl.api.AlertPublisher) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) TrackedIterator(co.cask.cdap.etl.common.TrackedIterator) Alert(co.cask.cdap.etl.api.Alert) DefaultAlertPublisherContext(co.cask.cdap.etl.common.DefaultAlertPublisherContext) AlertPublisherContext(co.cask.cdap.etl.api.AlertPublisherContext) DefaultAlertPublisherContext(co.cask.cdap.etl.common.DefaultAlertPublisherContext) StageMetrics(co.cask.cdap.etl.api.StageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) JavaRDD(org.apache.spark.api.java.JavaRDD)

Example 7 with DefaultStageMetrics

use of co.cask.cdap.etl.common.DefaultStageMetrics in project cdap by caskdata.

the class StreamingAlertPublishFunction method call.

@Override
public Void call(JavaRDD<Alert> data, Time batchTime) throws Exception {
    MacroEvaluator evaluator = new DefaultMacroEvaluator(new BasicArguments(sec), batchTime.milliseconds(), sec.getSecureStore(), sec.getNamespace());
    PluginContext pluginContext = new SparkPipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), stageSpec.isStageLoggingEnabled(), stageSpec.isProcessTimingEnabled());
    String stageName = stageSpec.getName();
    AlertPublisher alertPublisher = pluginContext.newPluginInstance(stageName, evaluator);
    PipelineRuntime pipelineRuntime = new SparkPipelineRuntime(sec, batchTime.milliseconds());
    AlertPublisherContext alertPublisherContext = new DefaultAlertPublisherContext(pipelineRuntime, stageSpec, sec.getMessagingContext(), sec.getAdmin());
    alertPublisher.initialize(alertPublisherContext);
    StageMetrics stageMetrics = new DefaultStageMetrics(sec.getMetrics(), stageName);
    TrackedIterator<Alert> trackedAlerts = new TrackedIterator<>(data.collect().iterator(), stageMetrics, Constants.Metrics.RECORDS_IN);
    alertPublisher.publish(trackedAlerts);
    alertPublisher.destroy();
    return null;
}
Also used : MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) AlertPublisher(co.cask.cdap.etl.api.AlertPublisher) PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) PluginContext(co.cask.cdap.api.plugin.PluginContext) SparkPipelineRuntime(co.cask.cdap.etl.spark.SparkPipelineRuntime) TrackedIterator(co.cask.cdap.etl.common.TrackedIterator) SparkPipelinePluginContext(co.cask.cdap.etl.spark.plugin.SparkPipelinePluginContext) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) Alert(co.cask.cdap.etl.api.Alert) BasicArguments(co.cask.cdap.etl.common.BasicArguments) DefaultAlertPublisherContext(co.cask.cdap.etl.common.DefaultAlertPublisherContext) AlertPublisherContext(co.cask.cdap.etl.api.AlertPublisherContext) DefaultAlertPublisherContext(co.cask.cdap.etl.common.DefaultAlertPublisherContext) StageMetrics(co.cask.cdap.etl.api.StageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics)

Example 8 with DefaultStageMetrics

use of co.cask.cdap.etl.common.DefaultStageMetrics in project cdap by caskdata.

the class PipelinePluginContext method wrapPlugin.

private Object wrapPlugin(String pluginId, Object plugin) {
    Caller caller = getCaller(pluginId);
    StageMetrics stageMetrics = new DefaultStageMetrics(metrics, pluginId);
    OperationTimer operationTimer = processTimingEnabled ? new MetricsOperationTimer(stageMetrics) : NoOpOperationTimer.INSTANCE;
    if (plugin instanceof Action) {
        return new WrappedAction((Action) plugin, caller);
    } else if (plugin instanceof BatchSource) {
        return new WrappedBatchSource<>((BatchSource) plugin, caller, operationTimer);
    } else if (plugin instanceof BatchSink) {
        return new WrappedBatchSink<>((BatchSink) plugin, caller, operationTimer);
    } else if (plugin instanceof ErrorTransform) {
        return new WrappedErrorTransform<>((ErrorTransform) plugin, caller, operationTimer);
    } else if (plugin instanceof Transform) {
        return new WrappedTransform<>((Transform) plugin, caller, operationTimer);
    } else if (plugin instanceof BatchAggregator) {
        return new WrappedBatchAggregator<>((BatchAggregator) plugin, caller, operationTimer);
    } else if (plugin instanceof BatchJoiner) {
        return new WrappedBatchJoiner<>((BatchJoiner) plugin, caller, operationTimer);
    } else if (plugin instanceof PostAction) {
        return new WrappedPostAction((PostAction) plugin, caller);
    } else if (plugin instanceof SplitterTransform) {
        return new WrappedSplitterTransform<>((SplitterTransform) plugin, caller, operationTimer);
    }
    return wrapUnknownPlugin(pluginId, plugin, caller);
}
Also used : Action(co.cask.cdap.etl.api.action.Action) PostAction(co.cask.cdap.etl.api.batch.PostAction) BatchSource(co.cask.cdap.etl.api.batch.BatchSource) SplitterTransform(co.cask.cdap.etl.api.SplitterTransform) BatchJoiner(co.cask.cdap.etl.api.batch.BatchJoiner) ErrorTransform(co.cask.cdap.etl.api.ErrorTransform) BatchAggregator(co.cask.cdap.etl.api.batch.BatchAggregator) BatchSink(co.cask.cdap.etl.api.batch.BatchSink) StageMetrics(co.cask.cdap.etl.api.StageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) PostAction(co.cask.cdap.etl.api.batch.PostAction) ErrorTransform(co.cask.cdap.etl.api.ErrorTransform) Transform(co.cask.cdap.etl.api.Transform) SplitterTransform(co.cask.cdap.etl.api.SplitterTransform) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics)

Example 9 with DefaultStageMetrics

use of co.cask.cdap.etl.common.DefaultStageMetrics in project cdap by caskdata.

the class SmartWorkflow method destroy.

@Override
public void destroy() {
    WorkflowContext workflowContext = getContext();
    PipelineRuntime pipelineRuntime = new PipelineRuntime(workflowContext, workflowMetrics);
    // Execute the post actions only if pipeline is not running in preview mode.
    if (!workflowContext.getDataTracer(PostAction.PLUGIN_TYPE).isEnabled()) {
        for (Map.Entry<String, PostAction> endingActionEntry : postActions.entrySet()) {
            String name = endingActionEntry.getKey();
            PostAction action = endingActionEntry.getValue();
            StageSpec stageSpec = stageSpecs.get(name);
            BatchActionContext context = new WorkflowBackedActionContext(workflowContext, pipelineRuntime, stageSpec);
            try {
                action.run(context);
            } catch (Throwable t) {
                LOG.error("Error while running post action {}.", name, t);
            }
        }
    }
    // publish all alerts
    for (Map.Entry<String, AlertPublisher> alertPublisherEntry : alertPublishers.entrySet()) {
        String name = alertPublisherEntry.getKey();
        AlertPublisher alertPublisher = alertPublisherEntry.getValue();
        PartitionedFileSet alertConnector = workflowContext.getDataset(name);
        try (CloseableIterator<Alert> alerts = new AlertReader(alertConnector.getPartitions(PartitionFilter.ALWAYS_MATCH))) {
            if (!alerts.hasNext()) {
                continue;
            }
            StageMetrics stageMetrics = new DefaultStageMetrics(workflowMetrics, name);
            StageSpec stageSpec = stageSpecs.get(name);
            AlertPublisherContext alertContext = new DefaultAlertPublisherContext(pipelineRuntime, stageSpec, workflowContext, workflowContext.getAdmin());
            alertPublisher.initialize(alertContext);
            TrackedIterator<Alert> trackedIterator = new TrackedIterator<>(alerts, stageMetrics, Constants.Metrics.RECORDS_IN);
            alertPublisher.publish(trackedIterator);
        } catch (Exception e) {
            LOG.warn("Stage {} had errors publishing alerts. Alerts may not have been published.", name, e);
        } finally {
            try {
                alertPublisher.destroy();
            } catch (Exception e) {
                LOG.warn("Error destroying alert publisher for stage {}", name, e);
            }
        }
    }
    ProgramStatus status = getContext().getState().getStatus();
    if (status == ProgramStatus.FAILED) {
        WRAPPERLOGGER.error("Pipeline '{}' failed.", getContext().getApplicationSpecification().getName());
    } else {
        WRAPPERLOGGER.info("Pipeline '{}' {}.", getContext().getApplicationSpecification().getName(), status == ProgramStatus.COMPLETED ? "succeeded" : status.name().toLowerCase());
    }
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(pipelineRuntime.getArguments(), workflowContext.getLogicalStartTime(), workflowContext, workflowContext.getNamespace());
    // Get resolved plugin properties
    Map<String, Map<String, String>> resolvedProperties = new HashMap<>();
    for (StageSpec spec : stageSpecs.values()) {
        String stageName = spec.getName();
        resolvedProperties.put(stageName, workflowContext.getPluginProperties(stageName, macroEvaluator).getProperties());
    }
    // Add resolved plugin properties to workflow token as a JSON String
    workflowContext.getToken().put(RESOLVED_PLUGIN_PROPERTIES_MAP, GSON.toJson(resolvedProperties));
}
Also used : PipelineRuntime(co.cask.cdap.etl.common.PipelineRuntime) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) MacroEvaluator(co.cask.cdap.api.macro.MacroEvaluator) BatchActionContext(co.cask.cdap.etl.api.batch.BatchActionContext) WorkflowBackedActionContext(co.cask.cdap.etl.batch.WorkflowBackedActionContext) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) AlertReader(co.cask.cdap.etl.batch.connector.AlertReader) StageSpec(co.cask.cdap.etl.spec.StageSpec) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) StageMetrics(co.cask.cdap.etl.api.StageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) DefaultAlertPublisherContext(co.cask.cdap.etl.common.DefaultAlertPublisherContext) AlertPublisherContext(co.cask.cdap.etl.api.AlertPublisherContext) AlertPublisher(co.cask.cdap.etl.api.AlertPublisher) TrackedIterator(co.cask.cdap.etl.common.TrackedIterator) WorkflowContext(co.cask.cdap.api.workflow.WorkflowContext) PartitionedFileSet(co.cask.cdap.api.dataset.lib.PartitionedFileSet) DisjointConnectionsException(co.cask.cdap.etl.planner.DisjointConnectionsException) Alert(co.cask.cdap.etl.api.Alert) PostAction(co.cask.cdap.etl.api.batch.PostAction) DefaultAlertPublisherContext(co.cask.cdap.etl.common.DefaultAlertPublisherContext) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) ProgramStatus(co.cask.cdap.api.ProgramStatus)

Example 10 with DefaultStageMetrics

use of co.cask.cdap.etl.common.DefaultStageMetrics in project cdap by caskdata.

the class MapReduceTransformExecutorFactory method getTransformation.

@SuppressWarnings("unchecked")
private <IN, OUT> TrackedTransform<IN, OUT> getTransformation(StageSpec stageSpec) throws Exception {
    DefaultMacroEvaluator macroEvaluator = new DefaultMacroEvaluator(arguments, taskContext.getLogicalStartTime(), taskContext, taskContext.getNamespace());
    String stageName = stageSpec.getName();
    String pluginType = stageSpec.getPluginType();
    StageMetrics stageMetrics = new DefaultStageMetrics(metrics, stageName);
    TaskAttemptContext taskAttemptContext = (TaskAttemptContext) taskContext.getHadoopContext();
    StageStatisticsCollector collector = isPipelineContainsCondition ? new MapReduceStageStatisticsCollector(stageName, taskAttemptContext) : new NoopStageStatisticsCollector();
    if (BatchAggregator.PLUGIN_TYPE.equals(pluginType)) {
        BatchAggregator<?, ?, ?> batchAggregator = pluginInstantiator.newPluginInstance(stageName, macroEvaluator);
        BatchRuntimeContext runtimeContext = createRuntimeContext(stageSpec);
        batchAggregator.initialize(runtimeContext);
        if (isMapPhase) {
            return getTrackedEmitKeyStep(new MapperAggregatorTransformation(batchAggregator, mapOutputKeyClassName, mapOutputValClassName), stageMetrics, taskContext.getDataTracer(stageName), collector);
        } else {
            return getTrackedAggregateStep(new ReducerAggregatorTransformation(batchAggregator, mapOutputKeyClassName, mapOutputValClassName), stageMetrics, taskContext.getDataTracer(stageName), collector);
        }
    } else if (BatchJoiner.PLUGIN_TYPE.equals(pluginType)) {
        BatchJoiner<?, ?, ?> batchJoiner = pluginInstantiator.newPluginInstance(stageName, macroEvaluator);
        BatchJoinerRuntimeContext runtimeContext = createRuntimeContext(stageSpec);
        batchJoiner.initialize(runtimeContext);
        if (isMapPhase) {
            return getTrackedEmitKeyStep(new MapperJoinerTransformation(batchJoiner, mapOutputKeyClassName, mapOutputValClassName), stageMetrics, taskContext.getDataTracer(stageName), collector);
        } else {
            return getTrackedMergeStep(new ReducerJoinerTransformation(batchJoiner, mapOutputKeyClassName, mapOutputValClassName, runtimeContext.getInputSchemas().size()), stageMetrics, taskContext.getDataTracer(stageName), collector);
        }
    }
    Transformation transformation = getInitializedTransformation(stageSpec);
    boolean isLimitingSource = taskContext.getDataTracer(stageName).isEnabled() && BatchSource.PLUGIN_TYPE.equals(pluginType) && isMapPhase;
    transformation = isLimitingSource ? new LimitingTransform(transformation, numberOfRecordsPreview) : transformation;
    // we emit metrics for records into alert publishers when the actual alerts are published,
    // not when we write the alerts to the temporary dataset
    String recordsInMetric = AlertPublisher.PLUGIN_TYPE.equals(pluginType) ? null : Constants.Metrics.RECORDS_IN;
    return new TrackedTransform<>(transformation, stageMetrics, recordsInMetric, Constants.Metrics.RECORDS_OUT, taskContext.getDataTracer(stageName), collector);
}
Also used : BatchJoinerRuntimeContext(co.cask.cdap.etl.api.batch.BatchJoinerRuntimeContext) TrackedTransform(co.cask.cdap.etl.common.TrackedTransform) NoopStageStatisticsCollector(co.cask.cdap.etl.common.NoopStageStatisticsCollector) Transformation(co.cask.cdap.etl.api.Transformation) LimitingTransform(co.cask.cdap.etl.common.preview.LimitingTransform) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) BatchJoiner(co.cask.cdap.etl.api.batch.BatchJoiner) NoopStageStatisticsCollector(co.cask.cdap.etl.common.NoopStageStatisticsCollector) StageStatisticsCollector(co.cask.cdap.etl.common.StageStatisticsCollector) BatchRuntimeContext(co.cask.cdap.etl.api.batch.BatchRuntimeContext) DefaultMacroEvaluator(co.cask.cdap.etl.common.DefaultMacroEvaluator) StageMetrics(co.cask.cdap.etl.api.StageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics) DefaultStageMetrics(co.cask.cdap.etl.common.DefaultStageMetrics)

Aggregations

DefaultStageMetrics (co.cask.cdap.etl.common.DefaultStageMetrics)10 StageMetrics (co.cask.cdap.etl.api.StageMetrics)8 DefaultMacroEvaluator (co.cask.cdap.etl.common.DefaultMacroEvaluator)5 TrackedTransform (co.cask.cdap.etl.common.TrackedTransform)4 Alert (co.cask.cdap.etl.api.Alert)3 AlertPublisher (co.cask.cdap.etl.api.AlertPublisher)3 AlertPublisherContext (co.cask.cdap.etl.api.AlertPublisherContext)3 Transformation (co.cask.cdap.etl.api.Transformation)3 BatchJoiner (co.cask.cdap.etl.api.batch.BatchJoiner)3 DefaultAlertPublisherContext (co.cask.cdap.etl.common.DefaultAlertPublisherContext)3 PipelineRuntime (co.cask.cdap.etl.common.PipelineRuntime)3 TrackedIterator (co.cask.cdap.etl.common.TrackedIterator)3 MacroEvaluator (co.cask.cdap.api.macro.MacroEvaluator)2 BatchJoinerRuntimeContext (co.cask.cdap.etl.api.batch.BatchJoinerRuntimeContext)2 BatchRuntimeContext (co.cask.cdap.etl.api.batch.BatchRuntimeContext)2 PostAction (co.cask.cdap.etl.api.batch.PostAction)2 DefaultEmitter (co.cask.cdap.etl.common.DefaultEmitter)2 NoopStageStatisticsCollector (co.cask.cdap.etl.common.NoopStageStatisticsCollector)2 TransformDetail (co.cask.cdap.etl.common.TransformDetail)2 TxLookupProvider (co.cask.cdap.etl.common.TxLookupProvider)2