Search in sources :

Example 1 with SparkSubmitResponse

use of com.microsoft.azure.hdinsight.spark.common.SparkSubmitResponse in project azure-tools-for-java by Microsoft.

the class SparkSubmitModel method tryToCreateBatchSparkJob.

private void tryToCreateBatchSparkJob(@NotNull final IClusterDetail selectedClusterDetail) throws HDIException, IOException {
    SparkBatchSubmission.getInstance().setCredentialsProvider(selectedClusterDetail.getHttpUserName(), selectedClusterDetail.getHttpPassword());
    HttpResponse response = SparkBatchSubmission.getInstance().createBatchSparkJob(SparkSubmitHelper.getLivyConnectionURL(selectedClusterDetail), submissionParameter);
    if (response.getCode() == 201 || response.getCode() == 200) {
        HDInsightUtil.showInfoOnSubmissionMessageWindow("Info : Submit to spark cluster successfully.");
        postEventProperty.put("IsSubmitSucceed", "true");
        String jobLink = String.format("%s/sparkhistory", selectedClusterDetail.getConnectionUrl());
        HDInsightUtil.setHyperLinkWithText("See spark job view from ", jobLink, jobLink);
        @SuppressWarnings("serial") final SparkSubmitResponse sparkSubmitResponse = new Gson().fromJson(response.getMessage(), new TypeToken<SparkSubmitResponse>() {
        }.getType());
        // Set submitted spark application id and http request info for stopping running application
        Display.getDefault().syncExec(new Runnable() {

            @Override
            public void run() {
                SparkSubmissionToolWindowView view = HDInsightUtil.getSparkSubmissionToolWindowView();
                view.setSparkApplicationStopInfo(selectedClusterDetail.getConnectionUrl(), sparkSubmitResponse.getId());
                view.setStopButtonState(true);
                view.getJobStatusManager().resetJobStateManager();
            }
        });
        SparkSubmitHelper.getInstance().printRunningLogStreamingly(sparkSubmitResponse.getId(), selectedClusterDetail, postEventProperty);
    } else {
        HDInsightUtil.showErrorMessageOnSubmissionMessageWindow(String.format("Error : Failed to submit to spark cluster. error code : %d, reason :  %s.", response.getCode(), response.getContent()));
        postEventProperty.put("IsSubmitSucceed", "false");
        postEventProperty.put("SubmitFailedReason", response.getContent());
        AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, null, postEventProperty);
    }
}
Also used : SparkSubmitResponse(com.microsoft.azure.hdinsight.spark.common.SparkSubmitResponse) TypeToken(com.google.common.reflect.TypeToken) HttpResponse(com.microsoft.azure.hdinsight.sdk.common.HttpResponse) Gson(com.google.gson.Gson) SparkSubmissionToolWindowView(com.microsoft.azuretools.hdinsight.SparkSubmissionToolWindowView)

Example 2 with SparkSubmitResponse

use of com.microsoft.azure.hdinsight.spark.common.SparkSubmitResponse in project azure-tools-for-java by Microsoft.

the class SparkSubmitHelper method printRunningLogStreamingly.

public void printRunningLogStreamingly(/*Project project,*/
int id, IClusterDetail clusterDetail, Map<String, String> postEventProperty) throws IOException {
    try {
        boolean isFailedJob = false;
        boolean isKilledJob = false;
        int from_index = 0;
        int pre_index;
        int times = 0;
        HDInsightUtil.getSparkSubmissionToolWindowView().setInfo("======================Begin printing out spark job log.=======================");
        while (true) {
            pre_index = from_index;
            if (HDInsightUtil.getSparkSubmissionToolWindowView().getJobStatusManager().isJobKilled()) {
                isKilledJob = true;
                break;
            }
            from_index = printoutJobLog(/*project, */
            id, from_index, clusterDetail);
            HttpResponse statusHttpResponse = SparkBatchSubmission.getInstance().getBatchSparkJobStatus(clusterDetail.getConnectionUrl() + "/livy/batches", id);
            SparkSubmitResponse status = new Gson().fromJson(statusHttpResponse.getMessage(), new TypeToken<SparkSubmitResponse>() {
            }.getType());
            // only the lines of the log are same between two http requests, we try to get the job status
            if (from_index == pre_index) {
                String finalStatus = status.getState().toLowerCase();
                if (finalStatus.equals("error") || finalStatus.equals("success") || finalStatus.equals("dead")) {
                    if (finalStatus.equals("error") || finalStatus.equals("dead")) {
                        isFailedJob = true;
                    }
                    if (!HDInsightUtil.getSparkSubmissionToolWindowView().getJobStatusManager().isJobKilled()) {
                        printoutJobLog(id, from_index, clusterDetail);
                        HDInsightUtil.getSparkSubmissionToolWindowView().setInfo("======================Finish printing out spark job log.=======================");
                    } else {
                        isKilledJob = true;
                    }
                    break;
                }
            }
            Thread.sleep(getIntervalTime(times));
            times++;
        }
        if (isKilledJob) {
            postEventProperty.put("IsKilled", "true");
            AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, Activator.getDefault().getBundle().getVersion().toString(), postEventProperty);
            return;
        }
        if (isFailedJob) {
            postEventProperty.put("IsRunningSucceed", "false");
            HDInsightUtil.getSparkSubmissionToolWindowView().setError("Error : Your submitted job run failed");
        } else {
            postEventProperty.put("IsRunningSucceed", "true");
            HDInsightUtil.getSparkSubmissionToolWindowView().setInfo("The Spark application completed successfully");
        }
        AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, Activator.getDefault().getBundle().getVersion().toString(), postEventProperty);
    } catch (Exception e) {
        if (HDInsightUtil.getSparkSubmissionToolWindowView().getJobStatusManager().isJobKilled() == false) {
            HDInsightUtil.getSparkSubmissionToolWindowView().setError("Error : Failed to getting running log. Exception : " + e.toString());
        } else {
            postEventProperty.put("IsKilled", "true");
        }
        AppInsightsClient.create(Messages.SparkSubmissionButtonClickEvent, Activator.getDefault().getBundle().getVersion().toString(), postEventProperty);
    }
}
Also used : SparkSubmitResponse(com.microsoft.azure.hdinsight.spark.common.SparkSubmitResponse) TypeToken(com.google.common.reflect.TypeToken) HttpResponse(com.microsoft.azure.hdinsight.sdk.common.HttpResponse) Gson(com.google.gson.Gson) SftpException(com.jcraft.jsch.SftpException) HDIException(com.microsoft.azure.hdinsight.sdk.common.HDIException) IOException(java.io.IOException) AzureCmdException(com.microsoft.azuretools.azurecommons.helpers.AzureCmdException) JSchException(com.jcraft.jsch.JSchException)

Aggregations

TypeToken (com.google.common.reflect.TypeToken)2 Gson (com.google.gson.Gson)2 HttpResponse (com.microsoft.azure.hdinsight.sdk.common.HttpResponse)2 SparkSubmitResponse (com.microsoft.azure.hdinsight.spark.common.SparkSubmitResponse)2 JSchException (com.jcraft.jsch.JSchException)1 SftpException (com.jcraft.jsch.SftpException)1 HDIException (com.microsoft.azure.hdinsight.sdk.common.HDIException)1 AzureCmdException (com.microsoft.azuretools.azurecommons.helpers.AzureCmdException)1 SparkSubmissionToolWindowView (com.microsoft.azuretools.hdinsight.SparkSubmissionToolWindowView)1 IOException (java.io.IOException)1