use of com.microsoft.azure.hdinsight.spark.run.action.SparkBatchJobDisconnectAction in project azure-tools-for-java by Microsoft.
the class SparkBatchJobRunner method doExecute.
@Nullable
@Override
protected RunContentDescriptor doExecute(@NotNull RunProfileState state, @NotNull ExecutionEnvironment environment) throws ExecutionException {
final SparkBatchRemoteRunProfileState submissionState = (SparkBatchRemoteRunProfileState) state;
final SparkSubmitModel submitModel = submissionState.getSubmitModel();
final Project project = submitModel.getProject();
// Prepare the run table console view UI
final SparkJobLogConsoleView jobOutputView = new SparkJobLogConsoleView(project);
final String artifactPath = submitModel.getArtifactPath().orElse(null);
assert artifactPath != null : "artifactPath should be checked in LivySparkBatchJobRunConfiguration::checkSubmissionConfigurationBeforeRun";
// To address issue https://github.com/microsoft/azure-tools-for-java/issues/4021.
// In this issue, when user click rerun button, we are still using the legacy ctrlSubject which has already sent
// "onComplete" message when the job is done in the previous time. To avoid this issue, We clone a new Spark
// batch job instance to re-initialize everything in the object.
final ISparkBatchJob sparkBatch = submissionState.getSparkBatch().clone();
final PublishSubject<SparkLogLine> ctrlSubject = (PublishSubject<SparkLogLine>) sparkBatch.getCtrlSubject();
final SparkBatchJobRemoteProcess remoteProcess = new SparkBatchJobRemoteProcess(new IdeaSchedulers(project), sparkBatch, artifactPath, submitModel.getSubmissionParameter().getMainClassName(), ctrlSubject);
final SparkBatchJobRunProcessHandler processHandler = new SparkBatchJobRunProcessHandler(remoteProcess, "Package and deploy the job to Spark cluster", null);
// After attaching, the console view can read the process inputStreams and display them
jobOutputView.attachToProcess(processHandler);
remoteProcess.start();
final Operation operation = environment.getUserData(TelemetryKeys.OPERATION);
// After we define a new AnAction class, IntelliJ will construct a new AnAction instance for us.
// Use one action instance can keep behaviours like isEnabled() consistent
final SparkBatchJobDisconnectAction disconnectAction = (SparkBatchJobDisconnectAction) ActionManager.getInstance().getAction("Actions.SparkJobDisconnect");
disconnectAction.init(remoteProcess, operation);
sendTelemetryForParameters(submitModel, operation);
final ExecutionResult result = new DefaultExecutionResult(jobOutputView, processHandler, Separator.getInstance(), disconnectAction);
submissionState.setExecutionResult(result);
final ConsoleView consoleView = jobOutputView.getSecondaryConsoleView();
submissionState.setConsoleView(consoleView);
addConsoleViewFilter(remoteProcess.getSparkJob(), consoleView);
submissionState.setRemoteProcessCtrlLogHandler(processHandler);
ctrlSubject.subscribe(messageWithType -> {
}, err -> disconnectAction.setEnabled(false), () -> disconnectAction.setEnabled(false));
return super.doExecute(state, environment);
}
Aggregations