use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class SparkSubmitHelper method printRunningLogStreamingly.
public void printRunningLogStreamingly(Project project, int id, IClusterDetail clusterDetail, Map<String, String> postEventProperty) throws IOException {
try {
boolean isFailedJob = false;
boolean isKilledJob = false;
int from_index = 0;
int pre_index;
int times = 0;
HDInsightUtil.getSparkSubmissionToolWindowManager(project).setInfo("======================Begin printing out spark job log.=======================");
while (true) {
pre_index = from_index;
if (HDInsightUtil.getSparkSubmissionToolWindowManager(project).getJobStatusManager().isJobKilled()) {
isKilledJob = true;
break;
}
from_index = printoutJobLog(project, id, from_index, clusterDetail);
HttpResponse statusHttpResponse = SparkBatchSubmission.getInstance().getBatchSparkJobStatus(getLivyConnectionURL(clusterDetail), id);
SparkSubmitResponse status = new Gson().fromJson(statusHttpResponse.getMessage(), new TypeToken<SparkSubmitResponse>() {
}.getType());
// only the lines of the log are same between two http requests, we try to get the job status
if (from_index == pre_index) {
String finalStatus = status.getState().toLowerCase();
if (finalStatus.equals("error") || finalStatus.equals("success") || finalStatus.equals("dead")) {
if (finalStatus.equals("error") || finalStatus.equals("dead")) {
isFailedJob = true;
}
if (!HDInsightUtil.getSparkSubmissionToolWindowManager(project).getJobStatusManager().isJobKilled()) {
printoutJobLog(project, id, from_index, clusterDetail);
HDInsightUtil.getSparkSubmissionToolWindowManager(project).setInfo("======================Finish printing out spark job log.=======================");
} else {
isKilledJob = true;
}
break;
}
}
Thread.sleep(getIntervalTime(times));
times++;
}
if (isKilledJob) {
postEventProperty.put("IsKilled", "true");
AppInsightsClient.create(HDInsightBundle.message("SparkSubmissionButtonClickEvent"), null, postEventProperty);
return;
}
if (isFailedJob) {
postEventProperty.put("IsRunningSucceed", "false");
HDInsightUtil.getSparkSubmissionToolWindowManager(project).setError("Error : Your submitted job run failed");
} else {
postEventProperty.put("IsRunningSucceed", "true");
HDInsightUtil.getSparkSubmissionToolWindowManager(project).setInfo("The Spark application completed successfully");
}
AppInsightsClient.create(HDInsightBundle.message("SparkSubmissionButtonClickEvent"), null, postEventProperty);
} catch (Exception e) {
if (HDInsightUtil.getSparkSubmissionToolWindowManager(project).getJobStatusManager().isJobKilled() == false) {
HDInsightUtil.getSparkSubmissionToolWindowManager(project).setError("Error : Failed to getting running log. Exception : " + e.toString());
} else {
postEventProperty.put("IsKilled", "true");
AppInsightsClient.create(HDInsightBundle.message("SparkSubmissionButtonClickEvent"), null, postEventProperty);
}
}
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class AddNewEmulatorForm method checkLivyEndpoint.
private boolean checkLivyEndpoint() {
SparkBatchSubmission.getInstance().setCredentialsProvider(userName, password);
HttpResponse httpResponse = null;
try {
httpResponse = SparkBatchSubmission.getInstance().getAllBatchesSparkJobs(livyEndpoint + "/batches");
} catch (Exception e) {
return false;
}
return httpResponse.getCode() == 201 || httpResponse.getCode() == 200;
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class AddNewEmulatorForm method checkSparkHistoryEndpoint.
private boolean checkSparkHistoryEndpoint() {
SparkBatchSubmission.getInstance().setCredentialsProvider(userName, password);
HttpResponse httpResponse = null;
try {
httpResponse = SparkBatchSubmission.getInstance().getHttpResponseViaGet(sparkHistoryEndpoint + "/api/v1/applications");
} catch (Exception e) {
return false;
}
return httpResponse.getCode() == 201 || httpResponse.getCode() == 200 || httpResponse.getCode() == 500;
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class SparkSubmissionToolWindowProcessor method initialize.
public void initialize() {
ApplicationManager.getApplication().assertIsDispatchThread();
UISettings.getInstance().addUISettingsListener(new UISettingsListener() {
@Override
public void uiSettingsChanged(UISettings uiSettings) {
synchronized (this) {
for (IHtmlElement htmlElement : cachedInfo) {
htmlElement.ChangeTheme();
}
setToolWindowText(parserHtmlElementList(cachedInfo));
}
}
}, ApplicationManager.getApplication());
fontFace = jEditorPanel.getFont().getFamily();
JPanel jPanel = new JPanel();
jPanel.setLayout(new GridBagLayout());
jEditorPanel.setMargin(new Insets(0, 10, 0, 0));
JBScrollPane scrollPane = new JBScrollPane(jEditorPanel);
stopButton = new JButton(PluginUtil.getIcon(CommonConst.StopIconPath));
stopButton.setDisabledIcon(PluginUtil.getIcon(CommonConst.StopDisableIconPath));
stopButton.setEnabled(false);
stopButton.setToolTipText("stop execution of current application");
stopButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
DefaultLoader.getIdeHelper().executeOnPooledThread(new Runnable() {
@Override
public void run() {
if (clusterDetail != null) {
AppInsightsClient.create(HDInsightBundle.message("SparkSubmissionStopButtionClickEvent"), null);
try {
HttpResponse deleteResponse = SparkBatchSubmission.getInstance().killBatchJob(SparkSubmitHelper.getLivyConnectionURL(clusterDetail), batchId);
if (deleteResponse.getCode() == 201 || deleteResponse.getCode() == 200) {
jobStatusManager.setJobKilled();
setInfo("========================Stop application successfully=======================");
} else {
setError(String.format("Error : Failed to stop spark application. error code : %d, reason : %s.", deleteResponse.getCode(), deleteResponse.getContent()));
}
} catch (IOException exception) {
setError("Error : Failed to stop spark application. exception : " + exception.toString());
}
}
}
});
}
});
openSparkUIButton = new JButton(PluginUtil.getIcon(CommonConst.OpenSparkUIIconPath));
openSparkUIButton.setDisabledIcon(PluginUtil.getIcon(CommonConst.OpenSparkUIDisableIconPath));
openSparkUIButton.setEnabled(false);
openSparkUIButton.setToolTipText("open the corresponding Spark UI page");
openSparkUIButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (Desktop.isDesktopSupported()) {
try {
if (jobStatusManager.isApplicationGenerated()) {
String connectionURL = clusterDetail.getConnectionUrl();
String sparkApplicationUrl = clusterDetail.isEmulator() ? String.format(yarnRunningUIEmulatorUrlFormat, ((EmulatorClusterDetail) clusterDetail).getSparkHistoryEndpoint(), jobStatusManager.getApplicationId()) : String.format(yarnRunningUIUrlFormat, connectionURL, jobStatusManager.getApplicationId());
Desktop.getDesktop().browse(new URI(sparkApplicationUrl));
}
} catch (Exception browseException) {
DefaultLoader.getUIHelper().showError("Failed to browse spark application yarn url", "Spark Submission");
}
}
}
});
JPanel buttonPanel = new JPanel();
buttonPanel.setLayout(new BoxLayout(buttonPanel, BoxLayout.Y_AXIS));
buttonPanel.add(stopButton);
buttonPanel.add(openSparkUIButton);
GridBagConstraints c00 = new GridBagConstraints();
c00.fill = GridBagConstraints.VERTICAL;
c00.weighty = 1;
c00.gridx = 0;
c00.gridy = 0;
jPanel.add(buttonPanel, c00);
GridBagConstraints c10 = new GridBagConstraints();
c10.fill = GridBagConstraints.BOTH;
c10.weightx = 1;
c10.weighty = 1;
c10.gridx = 1;
c10.gridy = 0;
jPanel.add(scrollPane, c10);
toolWindow.getComponent().add(jPanel);
jEditorPanel.setEditable(false);
jEditorPanel.setOpaque(false);
jEditorPanel.setEditorKit(JEditorPane.createEditorKitForContentType("text/html"));
jEditorPanel.addHyperlinkListener(new HyperlinkListener() {
@Override
public void hyperlinkUpdate(HyperlinkEvent e) {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
if (Desktop.isDesktopSupported()) {
try {
String protocol = e.getURL().getProtocol();
if (protocol.equals("https") || protocol.equals("http")) {
Desktop.getDesktop().browse(e.getURL().toURI());
} else if (protocol.equals("file")) {
String path = e.getURL().getFile();
File localFile = new File(path);
File parentFile = localFile.getParentFile();
if (parentFile.exists() && parentFile.isDirectory()) {
Desktop.getDesktop().open(parentFile);
}
}
} catch (Exception exception) {
DefaultLoader.getUIHelper().showError(exception.getMessage(), "Open Local Folder Error");
}
}
}
}
});
PropertyChangeListener propertyChangeListener = new PropertyChangeListener() {
@Override
public void propertyChange(final PropertyChangeEvent evt) {
if (ApplicationManager.getApplication().isDispatchThread()) {
changeSupportHandler(evt);
} else {
try {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
changeSupportHandler(evt);
}
});
} catch (InterruptedException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
private void changeSupportHandler(PropertyChangeEvent evt) {
if (evt.getPropertyName().equals("toolWindowText")) {
jEditorPanel.setText(evt.getNewValue().toString());
} else if (evt.getPropertyName().equals("isStopButtonEnable")) {
stopButton.setEnabled(Boolean.parseBoolean(evt.getNewValue().toString()));
} else if (evt.getPropertyName().equals("isBrowserButtonEnable")) {
openSparkUIButton.setEnabled(Boolean.parseBoolean(evt.getNewValue().toString()));
}
}
};
jEditorPanel.addPropertyChangeListener(propertyChangeListener);
changeSupport = new PropertyChangeSupport(jEditorPanel);
changeSupport.addPropertyChangeListener(propertyChangeListener);
}
use of com.microsoft.azure.hdinsight.sdk.common.HttpResponse in project azure-tools-for-java by Microsoft.
the class StreamUtil method getResultFromHttpResponse.
public static HttpResponse getResultFromHttpResponse(CloseableHttpResponse response) throws IOException {
int code = response.getStatusLine().getStatusCode();
String reason = response.getStatusLine().getReasonPhrase();
HttpEntity entity = response.getEntity();
try (InputStream inputStream = entity.getContent()) {
String response_content = getResultFromInputStream(inputStream);
return new HttpResponse(code, response_content, new HashMap<String, List<String>>(), reason);
}
}
Aggregations