use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class SparkBatchJobRunner method buildSparkBatchJob.
@Override
@NotNull
public Observable<ISparkBatchJob> buildSparkBatchJob(@NotNull SparkSubmitModel submitModel) {
return Observable.fromCallable(() -> {
final String clusterName = submitModel.getSubmissionParameter().getClusterName();
updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
progressIndicator.setFraction(0.2f);
progressIndicator.setText("Get Spark cluster [" + clusterName + "] information from subscriptions");
});
final IClusterDetail clusterDetail = ClusterManagerEx.getInstance().getClusterDetailByName(clusterName).orElseThrow(() -> new ExecutionException("Can't find cluster named " + clusterName));
updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
progressIndicator.setFraction(0.7f);
progressIndicator.setText("Get the storage configuration for artifacts deployment");
});
final Deployable jobDeploy = SparkBatchJobDeployFactory.getInstance().buildSparkBatchJobDeploy(submitModel, clusterDetail);
final SparkSubmissionParameter submissionParameter = updateStorageConfigForSubmissionParameter(submitModel);
updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
progressIndicator.setFraction(1.0f);
progressIndicator.setText("All checks are passed.");
});
return new SparkBatchJob(clusterDetail, submissionParameter, getClusterSubmission(clusterDetail), jobDeploy);
});
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class LivySparkBatchJobRunConfiguration method getState.
@Nullable
@Override
public RunProfileState getState(@NotNull final Executor executor, @NotNull final ExecutionEnvironment executionEnvironment) throws ExecutionException {
Operation operation = executionEnvironment.getUserData(TelemetryKeys.OPERATION);
final String debugTarget = executionEnvironment.getUserData(SparkBatchJobDebuggerRunner.DEBUG_TARGET_KEY);
final boolean isExecutor = StringUtils.equals(debugTarget, SparkBatchJobDebuggerRunner.DEBUG_EXECUTOR);
RunProfileStateWithAppInsightsEvent state = null;
final Artifact selectedArtifact = ArtifactUtil.getArtifactWithOutputPaths(getProject()).stream().filter(artifact -> artifact.getName().equals(getSubmitModel().getArtifactName())).findFirst().orElse(null);
if (executor instanceof SparkBatchJobDebugExecutor) {
final ISparkBatchJob remoteDebugBatch = sparkRemoteBatch;
if (!(remoteDebugBatch instanceof SparkBatchRemoteDebugJob)) {
throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
}
if (isExecutor) {
setRunMode(RunMode.REMOTE_DEBUG_EXECUTOR);
state = new SparkBatchRemoteDebugExecutorState(getModel().getSubmitModel(), operation, remoteDebugBatch);
} else {
if (selectedArtifact != null) {
BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
}
setRunMode(RunMode.REMOTE);
state = new SparkBatchRemoteDebugState(getModel().getSubmitModel(), operation, sparkRemoteBatch);
}
} else if (executor instanceof SparkBatchJobRunExecutor) {
final ISparkBatchJob remoteBatch = sparkRemoteBatch;
if (remoteBatch == null) {
throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
}
if (selectedArtifact != null) {
BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
}
setRunMode(RunMode.REMOTE);
state = new SparkBatchRemoteRunState(getModel().getSubmitModel(), operation, remoteBatch);
} else if (executor instanceof DefaultDebugExecutor) {
setRunMode(RunMode.LOCAL);
if (operation == null) {
operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.DEBUG_LOCAL_SPARK_JOB);
operation.start();
}
state = new SparkBatchLocalDebugState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
} else if (executor instanceof DefaultRunExecutor) {
setRunMode(RunMode.LOCAL);
if (operation == null) {
operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.RUN_LOCAL_SPARK_JOB);
operation.start();
}
state = new SparkBatchLocalRunState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
}
if (state != null) {
final Map<String, String> props = getActionProperties().entrySet().stream().collect(Collectors.toMap((Map.Entry<Object, Object> entry) -> entry.getKey() == null ? null : entry.getKey().toString(), (Map.Entry<Object, Object> entry) -> entry.getValue() == null ? "" : entry.getValue().toString()));
final String configurationId = Optional.ofNullable(executionEnvironment.getRunnerAndConfigurationSettings()).map(settings -> settings.getType().getId()).orElse("");
props.put("configurationId", configurationId);
state.createAppInsightEvent(executor, props);
EventUtil.logEvent(EventType.info, operation, props);
// Clear the action properties
getActionProperties().clear();
}
return state;
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class ADLSGen2FileSystem method listFiles.
@NotNull
public VirtualFile[] listFiles(AdlsGen2VirtualFile vf) {
List<AdlsGen2VirtualFile> childrenList = new ArrayList<>();
if (vf.isDirectory()) {
// sample fileSystemRootPath: https://accountName.dfs.core.windows.net/fileSystem/
String fileSystemRootPath = rootPathUri.resolve("/").getUrl().toString();
// sample directoryParam: sub/path/to
String directoryParam = vf.getAbfsUri().getDirectoryParam();
childrenList = this.op.list(fileSystemRootPath, directoryParam).map(remoteFile -> new AdlsGen2VirtualFile((AbfsUri) AbfsUri.parse(fileSystemRootPath).resolveAsRoot(AzureStorageUri.encodeAndNormalizePath(remoteFile.getName())), remoteFile.isDirectory(), this)).doOnNext(file -> file.setParent(vf)).onErrorResumeNext(err -> {
String errorMessage = "Failed to list folders and files with error " + err.getMessage() + ". ";
if (err instanceof ForbiddenHttpErrorStatus) {
errorMessage += ADLSGen2Deploy.getForbiddenErrorHints(vf.toString());
}
return Observable.error(new IOException(errorMessage));
}).toList().toBlocking().lastOrDefault(new ArrayList<>());
}
return childrenList.toArray(new VirtualFile[0]);
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class ServerNameTextField method doValidateValue.
@Override
@NotNull
public AzureValidationInfo doValidateValue() {
if (StringUtils.isBlank(subscriptionId)) {
return AzureValidationInfo.UNINITIALIZED;
}
final AzureValidationInfo info = super.doValidateValue();
if (!AzureValidationInfo.OK.equals(info)) {
return info;
}
final String value = this.getValue();
// validate length
if (StringUtils.length(value) < minLength || StringUtils.length(value) > maxLength) {
return AzureValidationInfo.builder().input(this).message(String.format("Server name must be at least %s characters and at most %s characters.", minLength, maxLength)).type(AzureValidationInfo.Type.ERROR).build();
}
// validate special character
if (!PATTERN.matcher(value).matches()) {
return AzureValidationInfo.builder().input(this).message("Your server name can contain only lowercase letters, numbers, and '-', but can't start or end with '-'.").type(AzureValidationInfo.Type.ERROR).build();
}
// validate availability
if (Objects.nonNull(validateFunction)) {
return validateFunction.apply(this);
}
return AzureValidationInfo.OK;
}
use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.
the class StorageClientSDKManager method getBlobItems.
@NotNull
public List<BlobItem> getBlobItems(@NotNull String connectionString, @NotNull BlobDirectory blobDirectory) throws AzureCmdException {
List<BlobItem> biList = new ArrayList<BlobItem>();
try {
CloudBlobClient client = getCloudBlobClient(connectionString);
String containerName = blobDirectory.getContainerName();
String delimiter = client.getDirectoryDelimiter();
CloudBlobContainer container = client.getContainerReference(containerName);
CloudBlobDirectory directory = container.getDirectoryReference(blobDirectory.getPath());
for (ListBlobItem item : directory.listBlobs()) {
String uri = item.getUri() != null ? item.getUri().toString() : "";
if (item instanceof CloudBlobDirectory) {
CloudBlobDirectory subDirectory = (CloudBlobDirectory) item;
String name = extractBlobItemName(subDirectory.getPrefix(), delimiter);
String path = Strings.nullToEmpty(subDirectory.getPrefix());
biList.add(new BlobDirectory(name, uri, containerName, path));
} else if (item instanceof CloudBlob) {
CloudBlob blob = (CloudBlob) item;
String name = extractBlobItemName(blob.getName(), delimiter);
String path = Strings.nullToEmpty(blob.getName());
String type = "";
String cacheControlHeader = "";
String contentEncoding = "";
String contentLanguage = "";
String contentType = "";
String contentMD5Header = "";
String eTag = "";
Calendar lastModified = new GregorianCalendar();
long size = 0;
BlobProperties properties = blob.getProperties();
if (properties != null) {
if (properties.getBlobType() != null) {
type = properties.getBlobType().toString();
}
cacheControlHeader = Strings.nullToEmpty(properties.getCacheControl());
contentEncoding = Strings.nullToEmpty(properties.getContentEncoding());
contentLanguage = Strings.nullToEmpty(properties.getContentLanguage());
contentType = Strings.nullToEmpty(properties.getContentType());
contentMD5Header = Strings.nullToEmpty(properties.getContentMD5());
eTag = Strings.nullToEmpty(properties.getEtag());
if (properties.getLastModified() != null) {
lastModified.setTime(properties.getLastModified());
}
size = properties.getLength();
}
biList.add(new BlobFile(name, uri, containerName, path, type, cacheControlHeader, contentEncoding, contentLanguage, contentType, contentMD5Header, eTag, lastModified, size));
}
}
return biList;
} catch (Throwable t) {
throw new AzureCmdException("Error retrieving the Blob Item list", t);
}
}
Aggregations