use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class SparkBatchJobDebuggerRunner method getSparkJobUrl.
private String getSparkJobUrl(@NotNull final SparkSubmitModel submitModel) throws ExecutionException, IOException {
final String clusterName = submitModel.getSubmissionParameter().getClusterName();
final IClusterDetail clusterDetail = ClusterManagerEx.getInstance().getClusterDetailByName(clusterName).orElseThrow(() -> new ExecutionException("No cluster name matched selection: " + clusterName));
final String sparkJobUrl = clusterDetail instanceof LivyCluster ? ((LivyCluster) clusterDetail).getLivyBatchUrl() : null;
if (sparkJobUrl == null) {
throw new IOException("Can't get livy connection URL. Cluster: " + clusterName);
}
return sparkJobUrl;
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class HDInsightHelperImpl method openJobViewEditor.
public void openJobViewEditor(final Object projectObject, final String uuid) {
final IClusterDetail clusterDetail = JobViewManager.getCluster(uuid);
final Project project = (Project) projectObject;
final VirtualFile openedFile = getOpenedItem(project);
// TODO: Fix the issue of clusterDetail may be null
if (openedFile == null || isNeedReopen(openedFile, clusterDetail)) {
openItem(project, clusterDetail, uuid, openedFile);
} else {
openItem(project, openedFile, null);
}
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class SparkBatchJobRunner method buildSparkBatchJob.
@Override
@NotNull
public Observable<ISparkBatchJob> buildSparkBatchJob(@NotNull SparkSubmitModel submitModel) {
return Observable.fromCallable(() -> {
final String clusterName = submitModel.getSubmissionParameter().getClusterName();
updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
progressIndicator.setFraction(0.2f);
progressIndicator.setText("Get Spark cluster [" + clusterName + "] information from subscriptions");
});
final IClusterDetail clusterDetail = ClusterManagerEx.getInstance().getClusterDetailByName(clusterName).orElseThrow(() -> new ExecutionException("Can't find cluster named " + clusterName));
updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
progressIndicator.setFraction(0.7f);
progressIndicator.setText("Get the storage configuration for artifacts deployment");
});
final Deployable jobDeploy = SparkBatchJobDeployFactory.getInstance().buildSparkBatchJobDeploy(submitModel, clusterDetail);
final SparkSubmissionParameter submissionParameter = updateStorageConfigForSubmissionParameter(submitModel);
updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
progressIndicator.setFraction(1.0f);
progressIndicator.setText("All checks are passed.");
});
return new SparkBatchJob(clusterDetail, submissionParameter, getClusterSubmission(clusterDetail), jobDeploy);
});
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class HDInsightHelperImpl method openJobViewEditor.
public void openJobViewEditor(Object projectObject, @NotNull String clusterName) {
try {
loadHDInsightPlugin();
} catch (BundleException bundleException) {
Activator.getDefault().log("Error loading plugin " + HDINSIHGT_BUNDLE_ID, bundleException);
}
IClusterDetail clusterDetail = JobViewManager.getCluster(clusterName);
IWorkbench workbench = PlatformUI.getWorkbench();
IEditorDescriptor editorDescriptor = workbench.getEditorRegistry().findEditor("com.microsoft.azure.hdinsight.jobview");
try {
IWorkbenchPage page = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage();
IEditorPart newEditor = page.openEditor(new JobViewInput(clusterDetail), editorDescriptor.getId());
} catch (PartInitException e2) {
Activator.getDefault().log("Error opening " + clusterDetail.getName(), e2);
}
}
use of com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail in project azure-tools-for-java by Microsoft.
the class JobUtils method deployArtifact.
public static Single<SimpleImmutableEntry<IClusterDetail, String>> deployArtifact(@NotNull String artifactLocalPath, @NotNull String clusterName, @NotNull Observer<SparkLogLine> logSubject) {
return Single.create(ob -> {
try {
final IClusterDetail clusterDetail = ClusterManagerEx.getInstance().getClusterDetailByName(clusterName).orElseThrow(() -> new HDIException("No cluster name matched selection: " + clusterName));
final String jobArtifactUri = JobUtils.uploadFileToCluster(clusterDetail, artifactLocalPath, logSubject);
ob.onSuccess(new SimpleImmutableEntry<>(clusterDetail, jobArtifactUri));
} catch (final Exception e) {
ob.onError(e);
}
});
}
Aggregations