use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class JobUtils method deployArtifactToADLS.
// Have to catch IOException in subscribe
@NotNull
public static Observable<String> deployArtifactToADLS(@NotNull String artifactLocalPath, @NotNull String adlRootPath, @NotNull String accessToken) {
return Observable.fromCallable(() -> {
final File localFile = new File(artifactLocalPath);
final URI remote = URI.create(adlRootPath).resolve("SparkSubmission/").resolve(getFormatPathByDate() + "/").resolve(localFile.getName());
final ADLStoreClient storeClient = ADLStoreClient.createClient(remote.getHost(), accessToken);
try (final OutputStream adlsOutputStream = storeClient.createFile(remote.getPath(), IfExists.OVERWRITE, "755", true)) {
final long size = IOUtils.copyLarge(new FileInputStream(localFile), adlsOutputStream);
adlsOutputStream.flush();
adlsOutputStream.close();
return remote.toString();
}
});
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class JobUtils method getInformationFromYarnLogDom.
private static String getInformationFromYarnLogDom(@NotNull WebClient client, @NotNull String baseUrl, @NotNull String type, long start, int size) {
URI url = null;
try {
url = new URI(baseUrl + "/").resolve(String.format("%s?start=%d", type, start) + (size <= 0 ? "" : String.format("&&end=%d", start + size)));
final HtmlPage htmlPage = client.getPage(url.toString());
final Iterator<DomElement> iterator = htmlPage.getElementById("navcell").getNextElementSibling().getChildElements().iterator();
final HashMap<String, String> logTypeMap = new HashMap<>();
final AtomicReference<String> logType = new AtomicReference<>();
String logs = "";
while (iterator.hasNext()) {
final DomElement node = iterator.next();
if (node instanceof HtmlParagraph) {
// In history server, need to read log type paragraph in page
final Pattern logTypePattern = Pattern.compile("Log Type:\\s+(\\S+)");
Optional.ofNullable(node.getFirstChild()).map(DomNode::getTextContent).map(String::trim).map(logTypePattern::matcher).filter(Matcher::matches).map(matcher -> matcher.group(1)).ifPresent(logType::set);
} else if (node instanceof HtmlPreformattedText) {
// In running, no log type paragraph in page
logs = Optional.ofNullable(node.getFirstChild()).map(DomNode::getTextContent).orElse("");
if (logType.get() != null) {
// Only get the first <pre>...</pre>
logTypeMap.put(logType.get(), logs);
logType.set(null);
}
}
}
return logTypeMap.getOrDefault(type, logs);
} catch (final FailingHttpStatusCodeException httpError) {
// the log is moving to job history server, just wait and retry.
if (httpError.getStatusCode() != HttpStatus.SC_FORBIDDEN) {
LOGGER.warn("The GET request to " + url + " responded error: " + httpError.getMessage());
}
} catch (final URISyntaxException e) {
LOGGER.error("baseUrl has syntax error: " + baseUrl);
} catch (final Exception e) {
LOGGER.warn("get Spark job log Error", e);
}
return "";
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class SparkRestUtil method getSparkApplications.
@NotNull
public static List<Application> getSparkApplications(@NotNull IClusterDetail clusterDetail) throws HDIException, IOException {
HttpEntity entity = getSparkRestEntity(clusterDetail, "");
Optional<List<Application>> apps = ObjectConvertUtils.convertEntityToList(entity, Application.class);
// spark job has at least one attempt
return apps.orElse(RestUtil.getEmptyList(Application.class)).stream().filter(app -> app.getAttempts().size() != 0 && app.getAttempts().get(0).getAttemptId() != null).collect(Collectors.toList());
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class SparkRestUtil method getSparkEventLogs.
public static List<JobStartEventLog> getSparkEventLogs(@NotNull ApplicationKey key) throws HDIException, IOException {
String url = String.format("%s/logs", key.getAppId());
String eventLogsPath = String.format("%s/SparkEventLogs/%s/eventLogs.zip", HDInsightLoader.getHDInsightHelper().getPluginRootPath(), key.getAppId());
File file = new File(eventLogsPath);
HttpEntity entity = getSparkRestEntity(key.getClusterDetails(), url);
InputStream inputStream = entity.getContent();
FileUtils.copyInputStreamToFile(inputStream, file);
IOUtils.closeQuietly(inputStream);
ZipFile zipFile = new ZipFile(file);
List<? extends ZipEntry> entities = Collections.list(zipFile.entries());
// every application has an attempt in event log
// and the entity name should be in formation "{appId}_{attemptId}"
String entityName = String.format("%s_%s", key.getAppId(), entities.size());
ZipEntry lastEntity = zipFile.getEntry(entityName);
if (lastEntity == null) {
throw new HDIException(String.format("No Spark event log entity found for app: %s", key.getAppId()));
}
InputStream zipFileInputStream = zipFile.getInputStream(lastEntity);
String entityContent = IOUtils.toString(zipFileInputStream, Charset.forName("utf-8"));
String[] lines = entityContent.split("\n");
List<JobStartEventLog> jobStartEvents = Arrays.stream(lines).filter(line -> {
JSONObject jsonObject = new JSONObject(line);
String eventName = jsonObject.getString("Event");
return eventName.equalsIgnoreCase("SparkListenerJobStart");
}).map(oneLine -> ObjectConvertUtils.convertToObjectQuietly(oneLine, JobStartEventLog.class)).filter(Objects::nonNull).collect(Collectors.toList());
return jobStartEvents;
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class ClusterOperationImpl method requestWithToken.
@NotNull
public <T> T requestWithToken(@NotNull String tenantId, @NotNull final RequestCallback<T> requestCallback) throws Throwable {
AzureManager azureManager = AuthMethodManager.getInstance().getAzureManager();
// not signed in
if (azureManager == null) {
return null;
}
String accessToken = azureManager.getAccessToken(tenantId);
return requestCallback.execute(accessToken);
}
Aggregations