use of com.microsoft.azure.hdinsight.sdk.rest.spark.Application in project azure-tools-for-java by Microsoft.
the class SparkJobHttpHandler method handle.
@Override
public void handle(HttpExchange httpExchange) throws IOException {
httpExchange.getResponseHeaders().add("Access-Control-Allow-Origin", "*");
JobRequestDetails requestDetail = JobRequestDetails.getJobRequestDetail(httpExchange);
try {
String path = requestDetail.getRequestPath();
if (path.equalsIgnoreCase("/applications/") && requestDetail.getAppId().equalsIgnoreCase("0")) {
try {
List<Application> applications = SparkRestUtil.getSparkApplications(requestDetail.getCluster());
Optional<String> responseString = ObjectConvertUtils.convertObjectToJsonString(applications);
JobUtils.setResponse(httpExchange, responseString.orElseThrow(IOException::new));
} catch (HDIException e) {
DefaultLoader.getUIHelper().logError("get applications list error", e);
}
} else if (path.contains("application_graph")) {
ApplicationKey key = new ApplicationKey(requestDetail.getCluster(), requestDetail.getAppId());
List<Job> jobs = JobViewCacheManager.getJob(key);
App app = JobViewCacheManager.getYarnApp(key);
List<JobStartEventLog> jobStartEventLogs = JobViewCacheManager.getJobStartEventLogs(key);
YarnAppWithJobs yarnAppWithJobs = new YarnAppWithJobs(app, jobs, jobStartEventLogs);
Optional<String> responseString = ObjectConvertUtils.convertObjectToJsonString(yarnAppWithJobs);
JobUtils.setResponse(httpExchange, responseString.orElseThrow(IOException::new));
} else if (path.contains("stages_summary")) {
List<Stage> stages = JobViewCacheManager.getStages(new ApplicationKey(requestDetail.getCluster(), requestDetail.getAppId()));
Optional<String> responseString = ObjectConvertUtils.convertObjectToJsonString(stages);
JobUtils.setResponse(httpExchange, responseString.orElseThrow(IOException::new));
} else if (path.contains("executors_summary")) {
List<Executor> executors = JobViewCacheManager.getExecutors(new ApplicationKey(requestDetail.getCluster(), requestDetail.getAppId()));
Optional<String> responseString = ObjectConvertUtils.convertObjectToJsonString(executors);
JobUtils.setResponse(httpExchange, responseString.orElseThrow(IOException::new));
} else if (path.contains("tasks_summary")) {
List<Task> tasks = JobViewCacheManager.getTasks(new ApplicationKey(requestDetail.getCluster(), requestDetail.getAppId()));
Optional<String> responseString = ObjectConvertUtils.convertObjectToJsonString(tasks);
JobUtils.setResponse(httpExchange, responseString.orElseThrow(IOException::new));
}
} catch (ExecutionException e) {
JobUtils.setResponse(httpExchange, e.getMessage(), 500);
}
}
use of com.microsoft.azure.hdinsight.sdk.rest.spark.Application in project azure-tools-for-java by Microsoft.
the class ActionHttpHandler method handle.
@Override
public void handle(HttpExchange httpExchange) throws IOException {
httpExchange.getResponseHeaders().add("Access-Control-Allow-Origin", "*");
JobRequestDetails requestDetail = JobRequestDetails.getJobRequestDetail(httpExchange);
final String path = requestDetail.getRequestPath();
final String clusterConnectString = requestDetail.getCluster().getConnectionUrl();
if (path.contains("yarnui")) {
JobUtils.openYarnUIHistory(clusterConnectString, requestDetail.getAppId());
} else if (path.contains("sparkui")) {
try {
Application application = JobViewCacheManager.getSingleSparkApplication(new ApplicationKey(requestDetail.getCluster(), requestDetail.getAppId()));
JobUtils.openSparkUIHistory(clusterConnectString, requestDetail.getAppId(), application.getLastAttemptId());
JobUtils.setResponse(httpExchange, "open browser successfully");
} catch (ExecutionException e) {
JobUtils.setResponse(httpExchange, "open browser error", 500);
DefaultLoader.getUIHelper().showError(e.getMessage(), "open browser error");
}
}
}
use of com.microsoft.azure.hdinsight.sdk.rest.spark.Application in project azure-tools-for-java by Microsoft.
the class SparkRestUtil method getSparkApplications.
@NotNull
public static List<Application> getSparkApplications(@NotNull IClusterDetail clusterDetail) throws HDIException, IOException {
HttpEntity entity = getSparkRestEntity(clusterDetail, "");
Optional<List<Application>> apps = ObjectConvertUtils.convertEntityToList(entity, Application.class);
// spark job has at least one attempt
return apps.orElse(RestUtil.getEmptyList(Application.class)).stream().filter(app -> app.getAttempts().size() != 0 && app.getAttempts().get(0).getAttemptId() != null).collect(Collectors.toList());
}
use of com.microsoft.azure.hdinsight.sdk.rest.spark.Application in project azure-tools-for-java by Microsoft.
the class SparkRestUtil method getSparkEventLogs.
public static List<JobStartEventLog> getSparkEventLogs(@NotNull ApplicationKey key) throws HDIException, IOException {
String url = String.format("%s/logs", key.getAppId());
String eventLogsPath = String.format("%s/SparkEventLogs/%s/eventLogs.zip", HDInsightLoader.getHDInsightHelper().getPluginRootPath(), key.getAppId());
File file = new File(eventLogsPath);
HttpEntity entity = getSparkRestEntity(key.getClusterDetails(), url);
InputStream inputStream = entity.getContent();
FileUtils.copyInputStreamToFile(inputStream, file);
IOUtils.closeQuietly(inputStream);
ZipFile zipFile = new ZipFile(file);
List<? extends ZipEntry> entities = Collections.list(zipFile.entries());
// every application has an attempt in event log
// and the entity name should be in formation "{appId}_{attemptId}"
String entityName = String.format("%s_%s", key.getAppId(), entities.size());
ZipEntry lastEntity = zipFile.getEntry(entityName);
if (lastEntity == null) {
throw new HDIException(String.format("No Spark event log entity found for app: %s", key.getAppId()));
}
InputStream zipFileInputStream = zipFile.getInputStream(lastEntity);
String entityContent = IOUtils.toString(zipFileInputStream, Charset.forName("utf-8"));
String[] lines = entityContent.split("\n");
List<JobStartEventLog> jobStartEvents = Arrays.stream(lines).filter(line -> {
JSONObject jsonObject = new JSONObject(line);
String eventName = jsonObject.getString("Event");
return eventName.equalsIgnoreCase("SparkListenerJobStart");
}).map(oneLine -> ObjectConvertUtils.convertToObjectQuietly(oneLine, JobStartEventLog.class)).filter(Objects::nonNull).collect(Collectors.toList());
return jobStartEvents;
}
Aggregations