use of com.microsoft.azure.hdinsight.sdk.common.HDIException in project azure-tools-for-java by Microsoft.
the class SparkSubmitModel method tryToCreateBatchSparkDebugJob.
public SparkBatchRemoteDebugJob tryToCreateBatchSparkDebugJob(@NotNull final IClusterDetail selectedClusterDetail) throws HDIException, IOException {
SparkBatchSubmission.getInstance().setCredentialsProvider(selectedClusterDetail.getHttpUserName(), selectedClusterDetail.getHttpPassword());
try {
SparkBatchRemoteDebugJob debugJob = SparkBatchRemoteDebugJob.factory(SparkSubmitHelper.getLivyConnectionURL(selectedClusterDetail), submissionParameter, SparkBatchSubmission.getInstance());
debugJob.createBatchSparkJobWithDriverDebugging();
return debugJob;
} catch (URISyntaxException ex) {
throw new HDIException("Bad Livy Connection URL " + SparkSubmitHelper.getLivyConnectionURL(selectedClusterDetail), ex);
} catch (IOException ex) {
HDInsightUtil.showErrorMessageOnSubmissionMessageWindow(project, String.format("Error : Failed to submit to spark cluster. error message : %s.", ex.getMessage()));
throw ex;
}
}
use of com.microsoft.azure.hdinsight.sdk.common.HDIException in project azure-tools-for-java by Microsoft.
the class YarnHistoryTask method call.
@Override
public String call() throws Exception {
WEB_CLIENT.setCredentialsProvider(credentialsProvider);
HtmlPage htmlPage = WEB_CLIENT.getPage(path);
// parse pre tag from html response
// there's only one 'pre' in response
DomNodeList<DomElement> preTagElements = htmlPage.getElementsByTagName("pre");
if (preTagElements.size() == 0) {
throw new HDIException("No logs here or logs not available");
} else {
return preTagElements.get(0).asText();
}
}
use of com.microsoft.azure.hdinsight.sdk.common.HDIException in project azure-tools-for-java by Microsoft.
the class ClusterManager method getClusterDetails.
private List<IClusterDetail> getClusterDetails(List<SubscriptionDetail> subscriptions, final Object project) throws AggregatedException {
ExecutorService taskExecutor = Executors.newFixedThreadPool(MAX_CONCURRENT);
final List<IClusterDetail> cachedClusterList = new ArrayList<>();
final List<Exception> aggregateExceptions = new ArrayList<>();
for (SubscriptionDetail subscription : subscriptions) {
taskExecutor.execute(new CommonRunnable<SubscriptionDetail, Exception>(subscription) {
@Override
public void runSpecificParameter(SubscriptionDetail parameter) throws IOException, HDIException, AzureCmdException {
IClusterOperation clusterOperation = new ClusterOperationImpl(project);
List<ClusterRawInfo> clusterRawInfoList = clusterOperation.listCluster(parameter);
if (clusterRawInfoList != null) {
for (ClusterRawInfo item : clusterRawInfoList) {
IClusterDetail tempClusterDetail = new ClusterDetail(parameter, item);
synchronized (ClusterManager.class) {
cachedClusterList.add(tempClusterDetail);
}
}
}
}
@Override
public void exceptionHandle(Exception e) {
synchronized (aggregateExceptions) {
aggregateExceptions.add(e);
}
}
});
}
taskExecutor.shutdown();
try {
taskExecutor.awaitTermination(TIME_OUT, TimeUnit.SECONDS);
} catch (InterruptedException exception) {
aggregateExceptions.add(exception);
}
if (aggregateExceptions.size() > 0) {
throw new AggregatedException(aggregateExceptions);
}
return cachedClusterList;
}
use of com.microsoft.azure.hdinsight.sdk.common.HDIException in project azure-tools-for-java by Microsoft.
the class AddHDInsightAdditionalClusterImpl method getMessageByAmbari.
private static String getMessageByAmbari(String clusterName, String userName, String passwd) throws HDIException {
String linuxClusterConfigureFileUrl = String.format(clusterConfigureFileUrl, clusterName, clusterName);
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName.trim(), passwd));
CloseableHttpClient httpClient = HttpClients.custom().setDefaultCredentialsProvider(credentialsProvider).build();
CloseableHttpResponse response = null;
int responseCode = -1;
try {
response = tryGetHttpResponse(httpClient, linuxClusterConfigureFileUrl);
} catch (UnknownHostException e1) {
throw new HDIException("Invalid Cluster Name");
} catch (Exception e3) {
throw new HDIException("Something wrong with the cluster! Please try again later");
}
responseCode = response.getStatusLine().getStatusCode();
if (responseCode == 200) {
try {
return StreamUtil.getResultFromHttpResponse(response).getMessage();
} catch (IOException e) {
throw new HDIException("Not support cluster");
}
} else if (responseCode == 401 || responseCode == 403) {
throw new HDIException("Invalid Cluster Name or Password");
} else {
throw new HDIException("Something wrong with the cluster! Please try again later");
}
}
use of com.microsoft.azure.hdinsight.sdk.common.HDIException in project azure-tools-for-java by Microsoft.
the class SparkSubmitModel method deployArtifactObservable.
public Single<IClusterDetail> deployArtifactObservable(Artifact artifact, IClusterDetail clusterDetail) {
return Single.create(em -> {
IClusterDetail selectedClusterDetail = getClusterConfiguration(clusterDetail, true);
if (selectedClusterDetail == null) {
String errorMessage = "Selected Cluster can not found. Please login in first in HDInsight Explorer and try submit job again";
HDInsightUtil.showErrorMessageOnSubmissionMessageWindow(project, errorMessage);
em.onError(new HDIException(errorMessage));
return;
}
try {
uploadFileToCluster(selectedClusterDetail, artifact.getName());
em.onSuccess(selectedClusterDetail);
} catch (Exception exception) {
showFailedSubmitErrorMessage(exception);
em.onError(exception);
}
});
}
Aggregations