Search in sources :

Example 1 with PortablePipelineResult

use of org.apache.beam.runners.jobsubmission.PortablePipelineResult in project beam by apache.

the class SparkPipelineRunner method run.

@Override
public PortablePipelineResult run(RunnerApi.Pipeline pipeline, JobInfo jobInfo) {
    SparkPortablePipelineTranslator translator;
    boolean isStreaming = pipelineOptions.isStreaming() || hasUnboundedPCollections(pipeline);
    if (isStreaming) {
        translator = new SparkStreamingPortablePipelineTranslator();
    } else {
        translator = new SparkBatchPortablePipelineTranslator();
    }
    // Expand any splittable DoFns within the graph to enable sizing and splitting of bundles.
    Pipeline pipelineWithSdfExpanded = ProtoOverrides.updateTransform(PTransformTranslation.PAR_DO_TRANSFORM_URN, pipeline, SplittableParDoExpander.createSizedReplacement());
    // Don't let the fuser fuse any subcomponents of native transforms.
    Pipeline trimmedPipeline = TrivialNativeTransformExpander.forKnownUrns(pipelineWithSdfExpanded, translator.knownUrns());
    // Fused pipeline proto.
    // TODO: Consider supporting partially-fused graphs.
    RunnerApi.Pipeline fusedPipeline = trimmedPipeline.getComponents().getTransformsMap().values().stream().anyMatch(proto -> ExecutableStage.URN.equals(proto.getSpec().getUrn())) ? trimmedPipeline : GreedyPipelineFuser.fuse(trimmedPipeline).toPipeline();
    prepareFilesToStage(pipelineOptions);
    PortablePipelineResult result;
    final JavaSparkContext jsc = SparkContextFactory.getSparkContext(pipelineOptions);
    final long startTime = Instant.now().getMillis();
    EventLoggingListener eventLoggingListener = startEventLoggingListener(jsc, pipelineOptions, startTime);
    // Initialize accumulators.
    AggregatorsAccumulator.init(pipelineOptions, jsc);
    MetricsEnvironment.setMetricsSupported(true);
    MetricsAccumulator.init(pipelineOptions, jsc);
    final SparkTranslationContext context = translator.createTranslationContext(jsc, pipelineOptions, jobInfo);
    final ExecutorService executorService = Executors.newSingleThreadExecutor();
    LOG.info(String.format("Running job %s on Spark master %s", jobInfo.jobId(), jsc.master()));
    if (isStreaming) {
        final JavaStreamingContext jssc = ((SparkStreamingTranslationContext) context).getStreamingContext();
        jssc.addStreamingListener(new JavaStreamingListenerWrapper(new AggregatorsAccumulator.AccumulatorCheckpointingSparkListener()));
        jssc.addStreamingListener(new JavaStreamingListenerWrapper(new MetricsAccumulator.AccumulatorCheckpointingSparkListener()));
        // Register user-defined listeners.
        for (JavaStreamingListener listener : pipelineOptions.as(SparkContextOptions.class).getListeners()) {
            LOG.info("Registered listener {}." + listener.getClass().getSimpleName());
            jssc.addStreamingListener(new JavaStreamingListenerWrapper(listener));
        }
        // Register Watermarks listener to broadcast the advanced WMs.
        jssc.addStreamingListener(new JavaStreamingListenerWrapper(new GlobalWatermarkHolder.WatermarkAdvancingStreamingListener()));
        jssc.checkpoint(pipelineOptions.getCheckpointDir());
        // Obtain timeout from options.
        Long timeout = pipelineOptions.as(SparkPortableStreamingPipelineOptions.class).getStreamingTimeoutMs();
        final Future<?> submissionFuture = executorService.submit(() -> {
            translator.translate(fusedPipeline, context);
            LOG.info(String.format("Job %s: Pipeline translated successfully. Computing outputs", jobInfo.jobId()));
            context.computeOutputs();
            jssc.start();
            try {
                jssc.awaitTerminationOrTimeout(timeout);
            } catch (InterruptedException e) {
                LOG.warn("Streaming context interrupted, shutting down.", e);
            }
            jssc.stop();
            LOG.info(String.format("Job %s finished.", jobInfo.jobId()));
        });
        result = new SparkPipelineResult.PortableStreamingMode(submissionFuture, jssc);
    } else {
        final Future<?> submissionFuture = executorService.submit(() -> {
            translator.translate(fusedPipeline, context);
            LOG.info(String.format("Job %s: Pipeline translated successfully. Computing outputs", jobInfo.jobId()));
            context.computeOutputs();
            LOG.info(String.format("Job %s finished.", jobInfo.jobId()));
        });
        result = new SparkPipelineResult.PortableBatchMode(submissionFuture, jsc);
    }
    executorService.shutdown();
    result.waitUntilFinish();
    MetricsPusher metricsPusher = new MetricsPusher(MetricsAccumulator.getInstance().value(), pipelineOptions.as(MetricsOptions.class), result);
    metricsPusher.start();
    if (eventLoggingListener != null) {
        eventLoggingListener.onApplicationStart(SparkCompat.buildSparkListenerApplicationStart(jsc, pipelineOptions, startTime, result));
        eventLoggingListener.onApplicationEnd(new SparkListenerApplicationEnd(Instant.now().getMillis()));
        eventLoggingListener.stop();
    }
    return result;
}
Also used : MetricsAccumulator(org.apache.beam.runners.spark.metrics.MetricsAccumulator) ArtifactApi(org.apache.beam.model.jobmanagement.v1.ArtifactApi) LoggerFactory(org.slf4j.LoggerFactory) GreedyPipelineFuser(org.apache.beam.runners.core.construction.graph.GreedyPipelineFuser) PortablePipelineRunner(org.apache.beam.runners.jobsubmission.PortablePipelineRunner) SparkCompat(org.apache.beam.runners.spark.util.SparkCompat) Future(java.util.concurrent.Future) JobInfo(org.apache.beam.runners.fnexecution.provisioning.JobInfo) SparkListenerApplicationEnd(org.apache.spark.scheduler.SparkListenerApplicationEnd) SparkStreamingPortablePipelineTranslator(org.apache.beam.runners.spark.translation.SparkStreamingPortablePipelineTranslator) CmdLineParser(org.kohsuke.args4j.CmdLineParser) PTransformTranslation(org.apache.beam.runners.core.construction.PTransformTranslation) SparkPortablePipelineTranslator(org.apache.beam.runners.spark.translation.SparkPortablePipelineTranslator) Struct(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.Struct) UUID(java.util.UUID) TrivialNativeTransformExpander(org.apache.beam.runners.core.construction.graph.TrivialNativeTransformExpander) Option(org.kohsuke.args4j.Option) ExecutableStage(org.apache.beam.runners.core.construction.graph.ExecutableStage) Executors(java.util.concurrent.Executors) MetricsPusher(org.apache.beam.runners.core.metrics.MetricsPusher) CmdLineException(org.kohsuke.args4j.CmdLineException) ProtoOverrides(org.apache.beam.runners.core.construction.graph.ProtoOverrides) AggregatorsAccumulator(org.apache.beam.runners.spark.aggregators.AggregatorsAccumulator) JavaStreamingContext(org.apache.spark.streaming.api.java.JavaStreamingContext) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) PipelineOptionsTranslation(org.apache.beam.runners.core.construction.PipelineOptionsTranslation) PipelineOptionsFactory(org.apache.beam.sdk.options.PipelineOptionsFactory) SparkCommon.startEventLoggingListener(org.apache.beam.runners.spark.util.SparkCommon.startEventLoggingListener) SparkBatchPortablePipelineTranslator(org.apache.beam.runners.spark.translation.SparkBatchPortablePipelineTranslator) PortablePipelineResult(org.apache.beam.runners.jobsubmission.PortablePipelineResult) SparkTranslationContext(org.apache.beam.runners.spark.translation.SparkTranslationContext) PipelineTranslatorUtils.hasUnboundedPCollections(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.hasUnboundedPCollections) GlobalWatermarkHolder(org.apache.beam.runners.spark.util.GlobalWatermarkHolder) JavaStreamingListenerWrapper(org.apache.spark.streaming.api.java.JavaStreamingListenerWrapper) ExecutorService(java.util.concurrent.ExecutorService) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) JavaStreamingListener(org.apache.spark.streaming.api.java.JavaStreamingListener) Logger(org.slf4j.Logger) PortablePipelineJarUtils(org.apache.beam.runners.jobsubmission.PortablePipelineJarUtils) SparkStreamingTranslationContext(org.apache.beam.runners.spark.translation.SparkStreamingTranslationContext) SparkContextFactory(org.apache.beam.runners.spark.translation.SparkContextFactory) SplittableParDoExpander(org.apache.beam.runners.core.construction.graph.SplittableParDoExpander) MetricsEnvironment(org.apache.beam.sdk.metrics.MetricsEnvironment) MetricsOptions(org.apache.beam.sdk.metrics.MetricsOptions) Pipeline(org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline) SparkCommonPipelineOptions.prepareFilesToStage(org.apache.beam.runners.spark.SparkCommonPipelineOptions.prepareFilesToStage) Preconditions(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions) Instant(org.joda.time.Instant) Nullable(edu.umd.cs.findbugs.annotations.Nullable) EventLoggingListener(org.apache.spark.scheduler.EventLoggingListener) FileSystems(org.apache.beam.sdk.io.FileSystems) MetricsOptions(org.apache.beam.sdk.metrics.MetricsOptions) SparkPortablePipelineTranslator(org.apache.beam.runners.spark.translation.SparkPortablePipelineTranslator) SparkBatchPortablePipelineTranslator(org.apache.beam.runners.spark.translation.SparkBatchPortablePipelineTranslator) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) JavaStreamingContext(org.apache.spark.streaming.api.java.JavaStreamingContext) SparkListenerApplicationEnd(org.apache.spark.scheduler.SparkListenerApplicationEnd) PortablePipelineResult(org.apache.beam.runners.jobsubmission.PortablePipelineResult) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) MetricsPusher(org.apache.beam.runners.core.metrics.MetricsPusher) SparkCommon.startEventLoggingListener(org.apache.beam.runners.spark.util.SparkCommon.startEventLoggingListener) EventLoggingListener(org.apache.spark.scheduler.EventLoggingListener) SparkTranslationContext(org.apache.beam.runners.spark.translation.SparkTranslationContext) SparkStreamingPortablePipelineTranslator(org.apache.beam.runners.spark.translation.SparkStreamingPortablePipelineTranslator) JavaStreamingListenerWrapper(org.apache.spark.streaming.api.java.JavaStreamingListenerWrapper) Pipeline(org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline) JavaStreamingListener(org.apache.spark.streaming.api.java.JavaStreamingListener) Pipeline(org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline) SparkStreamingTranslationContext(org.apache.beam.runners.spark.translation.SparkStreamingTranslationContext) ExecutorService(java.util.concurrent.ExecutorService)

Example 2 with PortablePipelineResult

use of org.apache.beam.runners.jobsubmission.PortablePipelineResult in project beam by apache.

the class FlinkPipelineRunner method runPipelineWithTranslator.

private <T extends FlinkPortablePipelineTranslator.TranslationContext> PortablePipelineResult runPipelineWithTranslator(final Pipeline pipeline, JobInfo jobInfo, FlinkPortablePipelineTranslator<T> translator) throws Exception {
    LOG.info("Translating pipeline to Flink program.");
    // Expand any splittable ParDos within the graph to enable sizing and splitting of bundles.
    Pipeline pipelineWithSdfExpanded = ProtoOverrides.updateTransform(PTransformTranslation.PAR_DO_TRANSFORM_URN, pipeline, SplittableParDoExpander.createSizedReplacement());
    // Don't let the fuser fuse any subcomponents of native transforms.
    Pipeline trimmedPipeline = TrivialNativeTransformExpander.forKnownUrns(pipelineWithSdfExpanded, translator.knownUrns());
    // Fused pipeline proto.
    // TODO: Consider supporting partially-fused graphs.
    RunnerApi.Pipeline fusedPipeline = trimmedPipeline.getComponents().getTransformsMap().values().stream().anyMatch(proto -> ExecutableStage.URN.equals(proto.getSpec().getUrn())) ? trimmedPipeline : GreedyPipelineFuser.fuse(trimmedPipeline).toPipeline();
    FlinkPortablePipelineTranslator.Executor executor = translator.translate(translator.createTranslationContext(jobInfo, pipelineOptions, confDir, filesToStage), fusedPipeline);
    final JobExecutionResult result = executor.execute(pipelineOptions.getJobName());
    return createPortablePipelineResult(result, pipelineOptions);
}
Also used : ArtifactApi(org.apache.beam.model.jobmanagement.v1.ArtifactApi) LoggerFactory(org.slf4j.LoggerFactory) PipelineOptionsTranslation(org.apache.beam.runners.core.construction.PipelineOptionsTranslation) GreedyPipelineFuser(org.apache.beam.runners.core.construction.graph.GreedyPipelineFuser) PipelineOptionsFactory(org.apache.beam.sdk.options.PipelineOptionsFactory) PortablePipelineRunner(org.apache.beam.runners.jobsubmission.PortablePipelineRunner) Map(java.util.Map) JobInfo(org.apache.beam.runners.fnexecution.provisioning.JobInfo) PortablePipelineResult(org.apache.beam.runners.jobsubmission.PortablePipelineResult) PipelineTranslatorUtils.hasUnboundedPCollections(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.hasUnboundedPCollections) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) Nullable(org.checkerframework.checker.nullness.qual.Nullable) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) CmdLineParser(org.kohsuke.args4j.CmdLineParser) PTransformTranslation(org.apache.beam.runners.core.construction.PTransformTranslation) Logger(org.slf4j.Logger) PortablePipelineJarUtils(org.apache.beam.runners.jobsubmission.PortablePipelineJarUtils) PipelineResources.detectClassPathResourcesToStage(org.apache.beam.runners.core.construction.resources.PipelineResources.detectClassPathResourcesToStage) Struct(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.Struct) UUID(java.util.UUID) TrivialNativeTransformExpander(org.apache.beam.runners.core.construction.graph.TrivialNativeTransformExpander) Option(org.kohsuke.args4j.Option) ExecutableStage(org.apache.beam.runners.core.construction.graph.ExecutableStage) SplittableParDoExpander(org.apache.beam.runners.core.construction.graph.SplittableParDoExpander) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) MetricsPusher(org.apache.beam.runners.core.metrics.MetricsPusher) MetricsEnvironment(org.apache.beam.sdk.metrics.MetricsEnvironment) MetricsOptions(org.apache.beam.sdk.metrics.MetricsOptions) CmdLineException(org.kohsuke.args4j.CmdLineException) List(java.util.List) Pipeline(org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline) ProtoOverrides(org.apache.beam.runners.core.construction.graph.ProtoOverrides) Preconditions(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions) FileSystems(org.apache.beam.sdk.io.FileSystems) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) Pipeline(org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline) Pipeline(org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline)

Example 3 with PortablePipelineResult

use of org.apache.beam.runners.jobsubmission.PortablePipelineResult in project beam by apache.

the class SamzaPipelineRunner method run.

@Override
public PortablePipelineResult run(final RunnerApi.Pipeline pipeline, JobInfo jobInfo) {
    // Expand any splittable DoFns within the graph to enable sizing and splitting of bundles.
    RunnerApi.Pipeline pipelineWithSdfExpanded = ProtoOverrides.updateTransform(PTransformTranslation.PAR_DO_TRANSFORM_URN, pipeline, SplittableParDoExpander.createSizedReplacement());
    // Don't let the fuser fuse any subcomponents of native transforms.
    RunnerApi.Pipeline trimmedPipeline = TrivialNativeTransformExpander.forKnownUrns(pipelineWithSdfExpanded, SamzaPortablePipelineTranslator.knownUrns());
    // Fused pipeline proto.
    // TODO: Consider supporting partially-fused graphs.
    RunnerApi.Pipeline fusedPipeline = trimmedPipeline.getComponents().getTransformsMap().values().stream().anyMatch(proto -> ExecutableStage.URN.equals(proto.getSpec().getUrn())) ? trimmedPipeline : GreedyPipelineFuser.fuse(trimmedPipeline).toPipeline();
    LOG.info("Portable pipeline to run:");
    LOG.info(PipelineDotRenderer.toDotString(fusedPipeline));
    // the pipeline option coming from sdk will set the sdk specific runner which will break
    // serialization
    // so we need to reset the runner here to a valid Java runner
    options.setRunner(SamzaRunner.class);
    try {
        final SamzaRunner runner = SamzaRunner.fromOptions(options);
        final PortablePipelineResult result = runner.runPortablePipeline(fusedPipeline, jobInfo);
        final SamzaExecutionEnvironment exeEnv = options.getSamzaExecutionEnvironment();
        if (exeEnv == SamzaExecutionEnvironment.LOCAL || exeEnv == SamzaExecutionEnvironment.STANDALONE) {
            // Make run() sync for local mode
            result.waitUntilFinish();
        }
        return result;
    } catch (Exception e) {
        throw new RuntimeException("Failed to invoke samza job", e);
    }
}
Also used : RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) PTransformTranslation(org.apache.beam.runners.core.construction.PTransformTranslation) PipelineDotRenderer(org.apache.beam.runners.core.construction.renderer.PipelineDotRenderer) Logger(org.slf4j.Logger) LoggerFactory(org.slf4j.LoggerFactory) GreedyPipelineFuser(org.apache.beam.runners.core.construction.graph.GreedyPipelineFuser) TrivialNativeTransformExpander(org.apache.beam.runners.core.construction.graph.TrivialNativeTransformExpander) ExecutableStage(org.apache.beam.runners.core.construction.graph.ExecutableStage) SamzaPortablePipelineTranslator(org.apache.beam.runners.samza.translation.SamzaPortablePipelineTranslator) PortablePipelineRunner(org.apache.beam.runners.jobsubmission.PortablePipelineRunner) SplittableParDoExpander(org.apache.beam.runners.core.construction.graph.SplittableParDoExpander) ProtoOverrides(org.apache.beam.runners.core.construction.graph.ProtoOverrides) JobInfo(org.apache.beam.runners.fnexecution.provisioning.JobInfo) PortablePipelineResult(org.apache.beam.runners.jobsubmission.PortablePipelineResult) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) PortablePipelineResult(org.apache.beam.runners.jobsubmission.PortablePipelineResult)

Example 4 with PortablePipelineResult

use of org.apache.beam.runners.jobsubmission.PortablePipelineResult in project beam by apache.

the class SamzaRunner method runPortablePipeline.

public PortablePipelineResult runPortablePipeline(RunnerApi.Pipeline pipeline, JobInfo jobInfo) {
    final String dotGraph = PipelineDotRenderer.toDotString(pipeline);
    LOG.info("Portable pipeline to run DOT graph:\n{}", dotGraph);
    final ConfigBuilder configBuilder = new ConfigBuilder(options);
    SamzaPortablePipelineTranslator.createConfig(pipeline, configBuilder, options);
    configBuilder.put(BEAM_DOT_GRAPH, dotGraph);
    final Config config = configBuilder.build();
    options.setConfigOverride(config);
    if (listener != null) {
        listener.onInit(config, options);
    }
    final SamzaExecutionContext executionContext = new SamzaExecutionContext(options);
    final Map<String, MetricsReporterFactory> reporterFactories = getMetricsReporters();
    final StreamApplication app = appDescriptor -> {
        appDescriptor.withApplicationContainerContextFactory(executionContext.new Factory()).withMetricsReporterFactories(reporterFactories);
        SamzaPortablePipelineTranslator.translate(pipeline, new PortableTranslationContext(appDescriptor, options, jobInfo));
    };
    ApplicationRunner runner = runSamzaApp(app, config);
    return new SamzaPortablePipelineResult(app, runner, executionContext, listener, config);
}
Also used : PViewToIdMapper(org.apache.beam.runners.samza.translation.PViewToIdMapper) PortableTranslationContext(org.apache.beam.runners.samza.translation.PortableTranslationContext) ExperimentalOptions(org.apache.beam.sdk.options.ExperimentalOptions) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) PipelineJsonRenderer(org.apache.beam.runners.samza.util.PipelineJsonRenderer) PipelineRunner(org.apache.beam.sdk.PipelineRunner) Map(java.util.Map) SamzaPipelineTranslator(org.apache.beam.runners.samza.translation.SamzaPipelineTranslator) MetricsReporter(org.apache.samza.metrics.MetricsReporter) MetricsReporterFactory(org.apache.samza.metrics.MetricsReporterFactory) JobInfo(org.apache.beam.runners.fnexecution.provisioning.JobInfo) PortablePipelineResult(org.apache.beam.runners.jobsubmission.PortablePipelineResult) Pipeline(org.apache.beam.sdk.Pipeline) Iterators(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterators) ApplicationRunners(org.apache.samza.runtime.ApplicationRunners) ExternalContext(org.apache.samza.context.ExternalContext) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) ApplicationRunner(org.apache.samza.runtime.ApplicationRunner) PipelineDotRenderer(org.apache.beam.runners.core.construction.renderer.PipelineDotRenderer) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) TranslationContext(org.apache.beam.runners.samza.translation.TranslationContext) ServiceLoader(java.util.ServiceLoader) SplittableParDo(org.apache.beam.runners.core.construction.SplittableParDo) SamzaPortablePipelineTranslator(org.apache.beam.runners.samza.translation.SamzaPortablePipelineTranslator) PipelineOptionsValidator(org.apache.beam.sdk.options.PipelineOptionsValidator) MetricsEnvironment(org.apache.beam.sdk.metrics.MetricsEnvironment) PValue(org.apache.beam.sdk.values.PValue) ConfigBuilder(org.apache.beam.runners.samza.translation.ConfigBuilder) Config(org.apache.samza.config.Config) SamzaTransformOverrides(org.apache.beam.runners.samza.translation.SamzaTransformOverrides) StreamApplication(org.apache.samza.application.StreamApplication) Collections(java.util.Collections) ApplicationRunner(org.apache.samza.runtime.ApplicationRunner) Config(org.apache.samza.config.Config) StreamApplication(org.apache.samza.application.StreamApplication) MetricsReporterFactory(org.apache.samza.metrics.MetricsReporterFactory) ConfigBuilder(org.apache.beam.runners.samza.translation.ConfigBuilder) LoggerFactory(org.slf4j.LoggerFactory) MetricsReporterFactory(org.apache.samza.metrics.MetricsReporterFactory) PortableTranslationContext(org.apache.beam.runners.samza.translation.PortableTranslationContext)

Aggregations

RunnerApi (org.apache.beam.model.pipeline.v1.RunnerApi)4 JobInfo (org.apache.beam.runners.fnexecution.provisioning.JobInfo)4 PortablePipelineResult (org.apache.beam.runners.jobsubmission.PortablePipelineResult)4 PTransformTranslation (org.apache.beam.runners.core.construction.PTransformTranslation)3 ExecutableStage (org.apache.beam.runners.core.construction.graph.ExecutableStage)3 GreedyPipelineFuser (org.apache.beam.runners.core.construction.graph.GreedyPipelineFuser)3 ProtoOverrides (org.apache.beam.runners.core.construction.graph.ProtoOverrides)3 SplittableParDoExpander (org.apache.beam.runners.core.construction.graph.SplittableParDoExpander)3 TrivialNativeTransformExpander (org.apache.beam.runners.core.construction.graph.TrivialNativeTransformExpander)3 PortablePipelineRunner (org.apache.beam.runners.jobsubmission.PortablePipelineRunner)3 MetricsEnvironment (org.apache.beam.sdk.metrics.MetricsEnvironment)3 Logger (org.slf4j.Logger)3 LoggerFactory (org.slf4j.LoggerFactory)3 Map (java.util.Map)2 UUID (java.util.UUID)2 ArtifactApi (org.apache.beam.model.jobmanagement.v1.ArtifactApi)2 Pipeline (org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline)2 PipelineOptionsTranslation (org.apache.beam.runners.core.construction.PipelineOptionsTranslation)2 PipelineDotRenderer (org.apache.beam.runners.core.construction.renderer.PipelineDotRenderer)2 MetricsPusher (org.apache.beam.runners.core.metrics.MetricsPusher)2