use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class HDInsightRootModuleImpl method refreshWithoutAsync.
@Override
public void refreshWithoutAsync() {
synchronized (this) {
removeAllChildNodes();
clusterDetailList = ClusterManagerEx.getInstance().getClusterDetailsWithoutAsync(getProject());
if (clusterDetailList != null) {
for (IClusterDetail clusterDetail : clusterDetailList) {
addChildNode(new ClusterNode(this, clusterDetail));
}
}
}
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class SparkBatchJobDebuggerRunner method execute.
@Override
protected void execute(@NotNull ExecutionEnvironment environment, @Nullable Callback callback, @NotNull RunProfileState state) throws ExecutionException {
SparkBatchJobSubmissionState submissionState = (SparkBatchJobSubmissionState) state;
SparkSubmitModel submitModel = submissionState.getSubmitModel();
SparkSubmissionParameter submissionParameter = submitModel.getSubmissionParameter();
IClusterDetail clusterDetail = submitModel.getSelectedClusterDetail();
Map<String, String> postEventProperty = new HashMap<>();
submitModel.buildArtifactObservable(submissionParameter.getArtifactName()).flatMap((artifact) -> submitModel.deployArtifactObservable(artifact, clusterDetail).subscribeOn(Schedulers.io())).map((selectedClusterDetail) -> {
// Create Batch Spark Debug Job
try {
return submitModel.tryToCreateBatchSparkDebugJob(selectedClusterDetail);
} catch (Exception e) {
HDInsightUtil.setJobRunningStatus(submitModel.getProject(), false);
throw Exceptions.propagate(e);
}
}).flatMap((remoteDebugJob) -> startDebuggerObservable(environment, callback, submissionState, remoteDebugJob).subscribeOn(Schedulers.computation()).zipWith(submitModel.jobLogObservable(remoteDebugJob.getBatchId(), clusterDetail).subscribeOn(Schedulers.computation()), (session, ignore) -> session).doOnError(err -> {
try {
HDInsightUtil.showErrorMessageOnSubmissionMessageWindow(submitModel.getProject(), "Error : Spark batch debugging job is killed, got exception " + err);
remoteDebugJob.killBatchJob();
HDInsightUtil.setJobRunningStatus(submitModel.getProject(), false);
} catch (IOException ignore) {
}
})).subscribe(sparkBatchDebugSession -> {
HDInsightUtil.showInfoOnSubmissionMessageWindow(submitModel.getProject(), "Info : Debugging Spark batch job in cluster is done.");
sparkBatchDebugSession.close();
HDInsightUtil.setJobRunningStatus(submitModel.getProject(), false);
postEventProperty.put("IsSubmitSucceed", "true");
AppInsightsClient.create(HDInsightBundle.message("SparkRunConfigDebugButtonClick"), null, postEventProperty);
}, (throwable) -> {
// set the running flag to false
HDInsightUtil.setJobRunningStatus(submitModel.getProject(), false);
String errorMessage;
if (throwable instanceof CompositeException) {
CompositeException exceptions = (CompositeException) throwable;
errorMessage = exceptions.getExceptions().stream().map(Throwable::getMessage).collect(Collectors.joining("; "));
} else {
errorMessage = throwable.getMessage();
}
HDInsightUtil.showErrorMessageOnSubmissionMessageWindow(submitModel.getProject(), "Error : Spark batch Job remote debug failed, got exception: " + errorMessage);
postEventProperty.put("IsSubmitSucceed", "false");
postEventProperty.put("SubmitFailedReason", errorMessage.substring(0, 50));
AppInsightsClient.create(HDInsightBundle.message("SparkRunConfigDebugButtonClick"), null, postEventProperty);
});
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class SparkSubmissionContentPanel method constructSubmissionParameter.
public SparkSubmissionParameter constructSubmissionParameter() {
IClusterDetail selectedClusterDetail = submitModel.getSelectedClusterDetail();
Object selectedItem = selectedArtifactComboBox.getSelectedItem();
String selectedArtifactName = selectedItem == null ? "" : selectedItem.toString();
String className = mainClassTextField.getText().trim();
String commandLine = commandLineTextField.getText().trim();
String localArtifactPath = selectedArtifactTextField.getText();
String selectedClusterName = selectedClusterDetail != null ? selectedClusterDetail.getName() : "";
java.util.List<String> referencedFileList = new ArrayList<>();
for (String singleReferencedFile : referencedFilesTextField.getText().split(";")) {
singleReferencedFile = singleReferencedFile.trim();
if (!StringHelper.isNullOrWhiteSpace(singleReferencedFile)) {
referencedFileList.add(singleReferencedFile);
}
}
java.util.List<String> uploadedFilePathList = new ArrayList<>();
for (String singleReferencedJars : referencedJarsTextField.getText().split(";")) {
singleReferencedJars = singleReferencedJars.trim();
if (!StringHelper.isNullOrWhiteSpace(singleReferencedJars)) {
uploadedFilePathList.add(singleReferencedJars);
}
}
java.util.List<String> argsList = new ArrayList<>();
for (String singleArs : commandLine.split(" ")) {
if (!StringHelper.isNullOrWhiteSpace(singleArs)) {
argsList.add(singleArs.trim());
}
}
Map<String, Object> jobConfigMap = submitModel.getJobConfigMap();
return new SparkSubmissionParameter(selectedClusterName, localArtifactRadioButton.isSelected(), selectedArtifactName, localArtifactPath, null, className, referencedFileList, uploadedFilePathList, argsList, jobConfigMap);
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class SparkSubmitModel method setClusterComboBoxModel.
public void setClusterComboBoxModel(List<IClusterDetail> cachedClusterDetails) {
this.cachedClusterDetails = cachedClusterDetails;
clusterComboBoxModel.removeAllElements();
mapClusterNameToClusterDetail.clear();
for (IClusterDetail clusterDetail : cachedClusterDetails) {
String title = getCluserTitle(clusterDetail);
mapClusterNameToClusterDetail.put(title, clusterDetail);
clusterComboBoxModel.addElement(title);
}
int index = -1;
if (submissionParameter != null) {
String title = getCluserTitle(submissionParameter.getClusterName());
index = clusterComboBoxModel.getIndexOf(title);
if (index != -1) {
clusterComboBoxModel.setSelectedItem(getCluserTitle(submissionParameter.getClusterName()));
}
}
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class SparkSubmissionExDialog method constructSubmissionParameter.
private SparkSubmissionParameter constructSubmissionParameter() {
IClusterDetail selectedClusterDetail = getSelectedCluster(clustersListComboBox.getText());
String selectedArtifactName = projectArtifactSelectComboBox.getText();
String className = mainClassCombo.getText().trim();
String commandLine = commandLineTextField.getText().trim();
String localArtifactPath = localArtifactInput.getText();
String selectedClusterName = selectedClusterDetail.getName();
List<String> referencedFileList = new ArrayList<>();
for (String singleReferencedFile : referencedFilesTextField.getText().split(";")) {
singleReferencedFile = singleReferencedFile.trim();
if (!StringHelper.isNullOrWhiteSpace(singleReferencedFile)) {
referencedFileList.add(singleReferencedFile);
}
}
List<String> uploadedFilePathList = new ArrayList<>();
for (String singleReferencedJars : referencedJarsTextField.getText().split(";")) {
singleReferencedJars = singleReferencedJars.trim();
if (!StringHelper.isNullOrWhiteSpace(singleReferencedJars)) {
uploadedFilePathList.add(singleReferencedJars);
}
}
List<String> argsList = new ArrayList<>();
for (String singleArs : commandLine.split(" ")) {
if (!StringHelper.isNullOrWhiteSpace(singleArs)) {
argsList.add(singleArs.trim());
}
}
// FIXME: need a duplicated keys check when creating a new row is allowed
final Map<String, Object> jobConfigMap = jobConfigs.stream().collect(Collectors.toMap(Pair::first, Pair::second));
return new SparkSubmissionParameter(selectedClusterName, localArtifactRadioButton.getSelection(), selectedArtifactName, localArtifactPath, null, className, referencedFileList, uploadedFilePathList, argsList, jobConfigMap);
}
Aggregations