use of org.apache.spark.scheduler.cluster.ExecutorInfo in project beam by apache.
the class SparkCommon method startEventLoggingListener.
/**
* Starts an EventLoggingListener to save Beam Metrics on Spark's History Server if event logging
* is enabled.
*
* @return The associated EventLoggingListener or null if it could not be started.
*/
@Nullable
public static EventLoggingListener startEventLoggingListener(final JavaSparkContext jsc, SparkPipelineOptions pipelineOptions, long startTime) {
EventLoggingListener eventLoggingListener = null;
try {
if (jsc.getConf().getBoolean("spark.eventLog.enabled", false)) {
eventLoggingListener = new EventLoggingListener(jsc.getConf().getAppId(), scala.Option.apply("1"), new URI(jsc.getConf().get("spark.eventLog.dir", null)), jsc.getConf(), jsc.hadoopConfiguration());
eventLoggingListener.initializeLogIfNecessary(false, false);
eventLoggingListener.start();
scala.collection.immutable.Map<String, String> logUrlMap = new scala.collection.immutable.HashMap<>();
Tuple2<String, String>[] sparkMasters = jsc.getConf().getAllWithPrefix("spark.master");
Tuple2<String, String>[] sparkExecutors = jsc.getConf().getAllWithPrefix("spark.executor.id");
for (Tuple2<String, String> sparkExecutor : sparkExecutors) {
eventLoggingListener.onExecutorAdded(new SparkListenerExecutorAdded(startTime, sparkExecutor._2(), new ExecutorInfo(sparkMasters[0]._2(), 0, logUrlMap)));
}
return eventLoggingListener;
}
} catch (URISyntaxException e) {
throw new RuntimeException("The URI syntax in the Spark config \"spark.eventLog.dir\" is not correct", e);
}
return eventLoggingListener;
}
Aggregations