use of io.cdap.cdap.etl.spark.function.AlertPassFilter in project cdap by caskdata.
the class SparkPipelineRunner method addEmitted.
private EmittedRecords.Builder addEmitted(EmittedRecords.Builder builder, PipelinePhase pipelinePhase, StageSpec stageSpec, SparkCollection<RecordInfo<Object>> stageData, Dag dag, Set<String> branchers, Set<String> shufflers, boolean hasErrors, boolean hasAlerts) {
builder.setRawData(stageData);
if (shouldCache(dag, stageSpec.getName(), branchers, shufflers)) {
stageData = stageData.cache();
}
if (hasErrors) {
SparkCollection<ErrorRecord<Object>> errors = stageData.flatMap(stageSpec, new ErrorPassFilter<Object>());
builder.setErrors(errors);
}
if (hasAlerts) {
SparkCollection<Alert> alerts = stageData.flatMap(stageSpec, new AlertPassFilter());
builder.setAlerts(alerts);
}
if (SplitterTransform.PLUGIN_TYPE.equals(stageSpec.getPluginType())) {
// set collections for each port, implemented as a filter on the port.
for (StageSpec.Port portSpec : stageSpec.getOutputPorts().values()) {
String port = portSpec.getPort();
SparkCollection<Object> portData = filterPortRecords(stageSpec, stageData, port);
builder.addPort(port, portData);
}
} else {
SparkCollection<Object> outputs = filterPortRecords(stageSpec, stageData, null);
builder.setOutput(outputs);
}
return builder;
}
Aggregations