use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class AddNewEmulatorForm method checkSparkHistoryEndpoint.
private boolean checkSparkHistoryEndpoint() {
SparkBatchSubmission.getInstance().setCredentialsProvider(userName, password);
HttpResponse httpResponse = null;
try {
httpResponse = SparkBatchSubmission.getInstance().getHttpResponseViaGet(sparkHistoryEndpoint + "/api/v1/applications");
} catch (Exception e) {
return false;
}
return httpResponse.getCode() == 201 || httpResponse.getCode() == 200 || httpResponse.getCode() == 500;
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class AddNewEmulatorForm method checkAmbariEndpoint.
private boolean checkAmbariEndpoint() {
SparkBatchSubmission.getInstance().setCredentialsProvider("admin", "admin");
HttpResponse httpResponse = null;
try {
httpResponse = SparkBatchSubmission.getInstance().getHttpResponseViaGet(ambariEndpoint);
} catch (Exception e) {
return false;
}
return httpResponse.getCode() == 201 || httpResponse.getCode() == 200;
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class SparkSubmitModel method tryToCreateBatchSparkJob.
private void tryToCreateBatchSparkJob(@NotNull final IClusterDetail selectedClusterDetail) throws HDIException, IOException {
SparkBatchSubmission.getInstance().setCredentialsProvider(selectedClusterDetail.getHttpUserName(), selectedClusterDetail.getHttpPassword());
HttpResponse response = SparkBatchSubmission.getInstance().createBatchSparkJob(SparkSubmitHelper.getLivyConnectionURL(selectedClusterDetail), submissionParameter);
if (response.getCode() == 201 || response.getCode() == 200) {
HDInsightUtil.showInfoOnSubmissionMessageWindow("Info : Submit to spark cluster successfully.");
postEventProperty.put("IsSubmitSucceed", "true");
String jobLink = String.format("%s/sparkhistory", selectedClusterDetail.getConnectionUrl());
HDInsightUtil.setHyperLinkWithText("See spark job view from ", jobLink, jobLink);
@SuppressWarnings("serial") final SparkSubmitResponse sparkSubmitResponse = new Gson().fromJson(response.getMessage(), new TypeToken<SparkSubmitResponse>() {
}.getType());
// Set submitted spark application id and http request info for stopping running application
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
SparkSubmissionToolWindowView view = HDInsightUtil.getSparkSubmissionToolWindowView();
view.setSparkApplicationStopInfo(selectedClusterDetail.getConnectionUrl(), sparkSubmitResponse.getId());
view.setStopButtonState(true);
view.getJobStatusManager().resetJobStateManager();
}
});
SparkSubmitHelper.getInstance().printRunningLogStreamingly(sparkSubmitResponse.getId(), selectedClusterDetail, postEventProperty);
} else {
HDInsightUtil.showErrorMessageOnSubmissionMessageWindow(String.format("Error : Failed to submit to spark cluster. error code : %d, reason : %s.", response.getCode(), response.getContent()));
postEventProperty.put("IsSubmitSucceed", "false");
postEventProperty.put("SubmitFailedReason", response.getContent());
AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, null, postEventProperty);
}
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class SparkSubmitHelper method printRunningLogStreamingly.
public void printRunningLogStreamingly(/*Project project,*/
int id, IClusterDetail clusterDetail, Map<String, String> postEventProperty) throws IOException {
try {
boolean isFailedJob = false;
boolean isKilledJob = false;
int from_index = 0;
int pre_index;
int times = 0;
HDInsightUtil.getSparkSubmissionToolWindowView().setInfo("======================Begin printing out spark job log.=======================");
while (true) {
pre_index = from_index;
if (HDInsightUtil.getSparkSubmissionToolWindowView().getJobStatusManager().isJobKilled()) {
isKilledJob = true;
break;
}
from_index = printoutJobLog(/*project, */
id, from_index, clusterDetail);
HttpResponse statusHttpResponse = SparkBatchSubmission.getInstance().getBatchSparkJobStatus(clusterDetail.getConnectionUrl() + "/livy/batches", id);
SparkSubmitResponse status = new Gson().fromJson(statusHttpResponse.getMessage(), new TypeToken<SparkSubmitResponse>() {
}.getType());
// only the lines of the log are same between two http requests, we try to get the job status
if (from_index == pre_index) {
String finalStatus = status.getState().toLowerCase();
if (finalStatus.equals("error") || finalStatus.equals("success") || finalStatus.equals("dead")) {
if (finalStatus.equals("error") || finalStatus.equals("dead")) {
isFailedJob = true;
}
if (!HDInsightUtil.getSparkSubmissionToolWindowView().getJobStatusManager().isJobKilled()) {
printoutJobLog(id, from_index, clusterDetail);
HDInsightUtil.getSparkSubmissionToolWindowView().setInfo("======================Finish printing out spark job log.=======================");
} else {
isKilledJob = true;
}
break;
}
}
Thread.sleep(getIntervalTime(times));
times++;
}
if (isKilledJob) {
postEventProperty.put("IsKilled", "true");
AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, Activator.getDefault().getBundle().getVersion().toString(), postEventProperty);
return;
}
if (isFailedJob) {
postEventProperty.put("IsRunningSucceed", "false");
HDInsightUtil.getSparkSubmissionToolWindowView().setError("Error : Your submitted job run failed");
} else {
postEventProperty.put("IsRunningSucceed", "true");
HDInsightUtil.getSparkSubmissionToolWindowView().setInfo("The Spark application completed successfully");
}
AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, Activator.getDefault().getBundle().getVersion().toString(), postEventProperty);
} catch (Exception e) {
if (HDInsightUtil.getSparkSubmissionToolWindowView().getJobStatusManager().isJobKilled() == false) {
HDInsightUtil.getSparkSubmissionToolWindowView().setError("Error : Failed to getting running log. Exception : " + e.toString());
} else {
postEventProperty.put("IsKilled", "true");
}
AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, Activator.getDefault().getBundle().getVersion().toString(), postEventProperty);
}
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class SparkSubmissionToolWindowView method createPartControl.
@Override
public void createPartControl(Composite parent) {
GridLayout layout = new GridLayout();
layout.numColumns = 2;
parent.setLayout(layout);
Composite composite = new Composite(parent, SWT.NONE);
layout = new GridLayout();
composite.setLayout(layout);
GridData gridData = new GridData();
gridData.verticalAlignment = SWT.TOP;
composite.setLayoutData(gridData);
stopButton = new Button(composite, SWT.PUSH);
stopButton.setToolTipText("Stop execution of current application");
stopButton.setImage(Activator.getImageDescriptor(CommonConst.StopIconPath).createImage());
stopButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent evt) {
DefaultLoader.getIdeHelper().executeOnPooledThread(new Runnable() {
@Override
public void run() {
if (!StringHelper.isNullOrWhiteSpace(connectionUrl)) {
AppInsightsClient.create(Messages.SparkSubmissionStopButtionClickEvent, null);
try {
HttpResponse deleteResponse = SparkBatchSubmission.getInstance().killBatchJob(connectionUrl + "/livy/batches", batchId);
if (deleteResponse.getCode() == 201 || deleteResponse.getCode() == 200) {
jobStatusManager.setJobKilled();
setInfo("========================Stop application successfully=======================");
} else {
setError(String.format("Error : Failed to stop spark application. error code : %d, reason : %s.", deleteResponse.getCode(), deleteResponse.getContent()));
}
} catch (IOException exception) {
setError("Error : Failed to stop spark application. exception : " + exception.toString());
}
}
}
});
}
});
openSparkUIButton = new Button(composite, SWT.PUSH);
openSparkUIButton.setToolTipText("Open the corresponding Spark UI page");
openSparkUIButton.setImage(Activator.getImageDescriptor(CommonConst.OpenSparkUIIconPath).createImage());
openSparkUIButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent arg0) {
try {
if (jobStatusManager.isApplicationGenerated()) {
String sparkApplicationUrl = String.format(yarnRunningUIUrlFormat, connectionUrl, jobStatusManager.getApplicationId());
PlatformUI.getWorkbench().getBrowserSupport().getExternalBrowser().openURL(new URL(sparkApplicationUrl));
}
} catch (Exception browseException) {
DefaultLoader.getUIHelper().showError("Failed to browse spark application yarn url", "Spark Submission");
}
}
});
gridData = new GridData();
gridData.horizontalAlignment = SWT.FILL;
gridData.verticalAlignment = SWT.FILL;
gridData.grabExcessVerticalSpace = true;
gridData.grabExcessHorizontalSpace = true;
outputPanel = new Browser(parent, SWT.BORDER);
outputPanel.setLayoutData(gridData);
PropertyChangeListener propertyChangeListener = new PropertyChangeListener() {
@Override
public void propertyChange(final PropertyChangeEvent evt) {
// if (ApplicationManager.getApplication().isDispatchThread()) {
changeSupportHandler(evt);
//// } else {
// try {
// SwingUtilities.invokeAndWait(new Runnable() {
// @Override
// public void run() {
// changeSupportHandler(evt);
// }
// }
// );
// } catch (InterruptedException e) {
// e.printStackTrace();
// } catch (InvocationTargetException e) {
// e.printStackTrace();
// }
// }
}
private void changeSupportHandler(PropertyChangeEvent evt) {
if (evt.getPropertyName().equals("toolWindowText")) {
outputPanel.setText(evt.getNewValue().toString());
} else if (evt.getPropertyName().equals("isStopButtonEnable")) {
stopButton.setEnabled(Boolean.parseBoolean(evt.getNewValue().toString()));
} else if (evt.getPropertyName().equals("isBrowserButtonEnable")) {
openSparkUIButton.setEnabled(Boolean.parseBoolean(evt.getNewValue().toString()));
}
}
};
// outputPanel.addPropertyChangeListener(propertyChangeListener);
changeSupport = new PropertyChangeSupport(outputPanel);
changeSupport.addPropertyChangeListener(propertyChangeListener);
}
Aggregations