use of com.microsoft.azure.hdinsight.sdk.cluster.LivyCluster in project azure-tools-for-java by Microsoft.
the class JobUtils method uploadFileToHDFSBase.
public static String uploadFileToHDFSBase(IClusterDetail selectedClusterDetail, String buildJarPath, @Nullable Observer<SimpleImmutableEntry<MessageInfoType, String>> legacyLogSubject, @Nullable Observer<SparkLogLine> newLogSubject) throws HDIException {
ctrlInfo(legacyLogSubject, newLogSubject, String.format("Get target jar from %s.", buildJarPath));
final File srcJarFile = new File(buildJarPath);
final URI destUri = URI.create(String.format("/SparkSubmission/%s/%s", getFormatPathByDate(), srcJarFile.getName()));
final String username = selectedClusterDetail.getHttpUserName();
final String password = selectedClusterDetail.getHttpPassword();
final String sessionName = "Helper session to upload " + destUri.toString();
final URI livyUri = selectedClusterDetail instanceof LivyCluster ? URI.create(((LivyCluster) selectedClusterDetail).getLivyConnectionUrl()) : URI.create(selectedClusterDetail.getConnectionUrl());
ctrlInfo(legacyLogSubject, newLogSubject, "Create Spark helper interactive session...");
try {
return Observable.using(() -> new SparkSession(sessionName, livyUri, username, password), SparkSession::create, SparkSession::close).map(sparkSession -> {
sparkSession.getCtrlSubject().subscribe(logLine -> ctrlInfo(legacyLogSubject, newLogSubject, logLine.getRawLog()), err -> ctrlError(legacyLogSubject, newLogSubject, err), () -> {
});
ClusterFileBase64BufferedOutputStream clusterFileBase64Out = new ClusterFileBase64BufferedOutputStream(sparkSession, destUri);
Base64OutputStream base64Enc = new Base64OutputStream(clusterFileBase64Out, true);
InputStream inFile;
try {
inFile = new BufferedInputStream(new FileInputStream(srcJarFile));
ctrlInfo(legacyLogSubject, newLogSubject, String.format("Uploading %s...", srcJarFile));
IOUtils.copy(inFile, base64Enc);
inFile.close();
base64Enc.close();
} catch (FileNotFoundException fnfEx) {
throw propagate(new HDIException(String.format("Source file %s not found.", srcJarFile), fnfEx));
} catch (IOException ioEx) {
throw propagate(new HDIException(String.format("Failed to upload file %s.", destUri), ioEx));
}
ctrlInfo(legacyLogSubject, newLogSubject, String.format("Uploaded to %s.", destUri));
return destUri.toString();
}).toBlocking().single();
} catch (final NoSuchElementException ignored) {
// The cause exception will be thrown inside
throw new HDIException("Failed to upload file to HDFS (Should Not Reach).");
}
}
use of com.microsoft.azure.hdinsight.sdk.cluster.LivyCluster in project azure-tools-for-java by Microsoft.
the class SparkBatchJobDebuggerRunner method getSparkJobUrl.
private String getSparkJobUrl(@NotNull final SparkSubmitModel submitModel) throws ExecutionException, IOException {
final String clusterName = submitModel.getSubmissionParameter().getClusterName();
final IClusterDetail clusterDetail = ClusterManagerEx.getInstance().getClusterDetailByName(clusterName).orElseThrow(() -> new ExecutionException("No cluster name matched selection: " + clusterName));
final String sparkJobUrl = clusterDetail instanceof LivyCluster ? ((LivyCluster) clusterDetail).getLivyBatchUrl() : null;
if (sparkJobUrl == null) {
throw new IOException("Can't get livy connection URL. Cluster: " + clusterName);
}
return sparkJobUrl;
}
use of com.microsoft.azure.hdinsight.sdk.cluster.LivyCluster in project azure-tools-for-java by Microsoft.
the class SparkSubmissionToolWindowProcessor method initialize.
public void initialize() {
ApplicationManager.getApplication().assertIsDispatchThread();
// TODO: Fix deprecated API "addUISettingsListener"
UISettings.getInstance().addUISettingsListener(new UISettingsListener() {
@Override
public void uiSettingsChanged(final UISettings uiSettings) {
synchronized (this) {
for (final IHtmlElement htmlElement : cachedInfo) {
htmlElement.changeTheme();
}
setToolWindowText(parserHtmlElementList(cachedInfo));
}
}
}, ApplicationManager.getApplication());
fontFace = jEditorPanel.getFont().getFamily();
final JPanel jPanel = new JPanel();
jPanel.setLayout(new GridBagLayout());
jEditorPanel.setMargin(JBUI.insetsLeft(10));
final JBScrollPane scrollPane = new JBScrollPane(jEditorPanel);
stopButton = new JButton(PluginUtil.getIcon(CommonConst.StopIconPath));
stopButton.setDisabledIcon(PluginUtil.getIcon(CommonConst.StopDisableIconPath));
stopButton.setEnabled(false);
stopButton.setToolTipText("stop execution of current application");
stopButton.addActionListener(e -> DefaultLoader.getIdeHelper().executeOnPooledThread(() -> {
if (clusterDetail != null) {
AppInsightsClient.create(HDInsightBundle.message("SparkSubmissionStopButtionClickEvent"), null);
EventUtil.logEvent(EventType.info, HDINSIGHT, HDInsightBundle.message("SparkSubmissionStopButtionClickEvent"), null);
try {
final String livyUrl = clusterDetail instanceof LivyCluster ? ((LivyCluster) clusterDetail).getLivyBatchUrl() : null;
final HttpResponse deleteResponse = SparkBatchSubmission.getInstance().killBatchJob(livyUrl, batchId);
if (deleteResponse.getCode() == 201 || deleteResponse.getCode() == 200) {
jobStatusManager.setJobKilled();
setInfo("========================Stop application successfully" + "=======================");
} else {
setError(String.format("Error : Failed to stop spark application. error code : %d, reason : %s.", deleteResponse.getCode(), deleteResponse.getContent()));
}
} catch (final IOException exception) {
setError("Error : Failed to stop spark application. exception : " + exception.toString());
}
}
}));
openSparkUIButton = new JButton(PluginUtil.getIcon(IconPathBuilder.custom(CommonConst.OpenSparkUIIconName).build()));
openSparkUIButton.setDisabledIcon(PluginUtil.getIcon(CommonConst.OpenSparkUIDisableIconPath));
openSparkUIButton.setEnabled(false);
openSparkUIButton.setToolTipText("open the corresponding Spark UI page");
openSparkUIButton.addActionListener(e -> {
if (Desktop.isDesktopSupported()) {
try {
if (jobStatusManager.isApplicationGenerated()) {
final String connectionURL = clusterDetail.getConnectionUrl();
final String sparkApplicationUrl = clusterDetail.isEmulator() ? String.format(yarnRunningUIEmulatorUrlFormat, ((EmulatorClusterDetail) clusterDetail).getSparkHistoryEndpoint(), jobStatusManager.getApplicationId()) : String.format(yarnRunningUIUrlFormat, connectionURL, jobStatusManager.getApplicationId());
Desktop.getDesktop().browse(new URI(sparkApplicationUrl));
}
} catch (final Exception browseException) {
DefaultLoader.getUIHelper().showError("Failed to browse spark application yarn url", "Spark Submission");
}
}
});
final JPanel buttonPanel = new JPanel();
buttonPanel.setLayout(new BoxLayout(buttonPanel, BoxLayout.Y_AXIS));
buttonPanel.add(stopButton);
buttonPanel.add(openSparkUIButton);
final GridBagConstraints c00 = new GridBagConstraints();
c00.fill = GridBagConstraints.VERTICAL;
c00.weighty = 1;
c00.gridx = 0;
c00.gridy = 0;
jPanel.add(buttonPanel, c00);
final GridBagConstraints c10 = new GridBagConstraints();
c10.fill = GridBagConstraints.BOTH;
c10.weightx = 1;
c10.weighty = 1;
c10.gridx = 1;
c10.gridy = 0;
jPanel.add(scrollPane, c10);
toolWindow.getComponent().add(jPanel);
jEditorPanel.setEditable(false);
jEditorPanel.setOpaque(false);
jEditorPanel.setEditorKit(JEditorPane.createEditorKitForContentType("text/html"));
jEditorPanel.addHyperlinkListener(e -> {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
if (Desktop.isDesktopSupported()) {
try {
final String protocol = e.getURL().getProtocol();
if ("https".equals(protocol) || "http".equals(protocol)) {
Desktop.getDesktop().browse(e.getURL().toURI());
} else if ("file".equals(protocol)) {
final String path = e.getURL().getFile();
final File localFile = new File(path);
final File parentFile = localFile.getParentFile();
if (parentFile.exists() && parentFile.isDirectory()) {
Desktop.getDesktop().open(parentFile);
}
}
} catch (final Exception exception) {
DefaultLoader.getUIHelper().showError(exception.getMessage(), "Open Local Folder Error");
}
}
}
});
final PropertyChangeListener propertyChangeListener = new PropertyChangeListener() {
@Override
public void propertyChange(final PropertyChangeEvent evt) {
if (ApplicationManager.getApplication().isDispatchThread()) {
changeSupportHandler(evt);
} else {
try {
SwingUtilities.invokeAndWait(() -> changeSupportHandler(evt));
} catch (final InterruptedException ignore) {
} catch (final InvocationTargetException e) {
e.printStackTrace();
}
}
}
private void changeSupportHandler(final PropertyChangeEvent evt) {
if ("toolWindowText".equals(evt.getPropertyName())) {
jEditorPanel.setText(evt.getNewValue().toString());
} else if ("isStopButtonEnable".equals(evt.getPropertyName())) {
stopButton.setEnabled(Boolean.parseBoolean(evt.getNewValue().toString()));
} else if ("isBrowserButtonEnable".equals(evt.getPropertyName())) {
openSparkUIButton.setEnabled(Boolean.parseBoolean(evt.getNewValue().toString()));
}
}
};
jEditorPanel.addPropertyChangeListener(propertyChangeListener);
changeSupport = new PropertyChangeSupport(jEditorPanel);
changeSupport.addPropertyChangeListener(propertyChangeListener);
}
use of com.microsoft.azure.hdinsight.sdk.cluster.LivyCluster in project azure-tools-for-java by Microsoft.
the class JobUtils method authenticate.
public static AbstractMap.SimpleImmutableEntry<Integer, List<Header>> authenticate(IClusterDetail clusterDetail) throws HDIException, IOException {
final SparkBatchSubmission submission = SparkBatchSubmission.getInstance();
final String livyUrl = clusterDetail instanceof LivyCluster ? ((LivyCluster) clusterDetail).getLivyBatchUrl() : null;
if (livyUrl == null) {
throw new IOException("Can't get livy connection Url");
}
if (!StringUtils.isEmpty(clusterDetail.getHttpUserName()) && !StringUtils.isEmpty(clusterDetail.getHttpPassword())) {
submission.setUsernamePasswordCredential(clusterDetail.getHttpUserName(), clusterDetail.getHttpPassword());
}
final com.microsoft.azure.hdinsight.sdk.common.HttpResponse response = clusterDetail instanceof MfaEspCluster ? submission.negotiateAuthMethodWithResp(livyUrl) : submission.getHttpResponseViaHead(livyUrl);
final int statusCode = response.getCode();
if (statusCode >= 200 && statusCode <= 302) {
return new AbstractMap.SimpleImmutableEntry<>(statusCode, response.getHeaders());
}
throw new AuthenticationException(response.getContent(), statusCode);
}
Aggregations