Search in sources :

Example 36 with NotNull

use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.

the class SparkBatchJobRunner method buildSparkBatchJob.

@Override
@NotNull
public Observable<ISparkBatchJob> buildSparkBatchJob(@NotNull SparkSubmitModel submitModel) {
    return Observable.fromCallable(() -> {
        final String clusterName = submitModel.getSubmissionParameter().getClusterName();
        updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
            progressIndicator.setFraction(0.2f);
            progressIndicator.setText("Get Spark cluster [" + clusterName + "] information from subscriptions");
        });
        final IClusterDetail clusterDetail = ClusterManagerEx.getInstance().getClusterDetailByName(clusterName).orElseThrow(() -> new ExecutionException("Can't find cluster named " + clusterName));
        updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
            progressIndicator.setFraction(0.7f);
            progressIndicator.setText("Get the storage configuration for artifacts deployment");
        });
        final Deployable jobDeploy = SparkBatchJobDeployFactory.getInstance().buildSparkBatchJobDeploy(submitModel, clusterDetail);
        final SparkSubmissionParameter submissionParameter = updateStorageConfigForSubmissionParameter(submitModel);
        updateCurrentBackgroundableTaskIndicator(progressIndicator -> {
            progressIndicator.setFraction(1.0f);
            progressIndicator.setText("All checks are passed.");
        });
        return new SparkBatchJob(clusterDetail, submissionParameter, getClusterSubmission(clusterDetail), jobDeploy);
    });
}
Also used : ExecutionException(com.intellij.execution.ExecutionException) IClusterDetail(com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull)

Example 37 with NotNull

use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.

the class LivySparkBatchJobRunConfiguration method getState.

@Nullable
@Override
public RunProfileState getState(@NotNull final Executor executor, @NotNull final ExecutionEnvironment executionEnvironment) throws ExecutionException {
    Operation operation = executionEnvironment.getUserData(TelemetryKeys.OPERATION);
    final String debugTarget = executionEnvironment.getUserData(SparkBatchJobDebuggerRunner.DEBUG_TARGET_KEY);
    final boolean isExecutor = StringUtils.equals(debugTarget, SparkBatchJobDebuggerRunner.DEBUG_EXECUTOR);
    RunProfileStateWithAppInsightsEvent state = null;
    final Artifact selectedArtifact = ArtifactUtil.getArtifactWithOutputPaths(getProject()).stream().filter(artifact -> artifact.getName().equals(getSubmitModel().getArtifactName())).findFirst().orElse(null);
    if (executor instanceof SparkBatchJobDebugExecutor) {
        final ISparkBatchJob remoteDebugBatch = sparkRemoteBatch;
        if (!(remoteDebugBatch instanceof SparkBatchRemoteDebugJob)) {
            throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
        }
        if (isExecutor) {
            setRunMode(RunMode.REMOTE_DEBUG_EXECUTOR);
            state = new SparkBatchRemoteDebugExecutorState(getModel().getSubmitModel(), operation, remoteDebugBatch);
        } else {
            if (selectedArtifact != null) {
                BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
            }
            setRunMode(RunMode.REMOTE);
            state = new SparkBatchRemoteDebugState(getModel().getSubmitModel(), operation, sparkRemoteBatch);
        }
    } else if (executor instanceof SparkBatchJobRunExecutor) {
        final ISparkBatchJob remoteBatch = sparkRemoteBatch;
        if (remoteBatch == null) {
            throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
        }
        if (selectedArtifact != null) {
            BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
        }
        setRunMode(RunMode.REMOTE);
        state = new SparkBatchRemoteRunState(getModel().getSubmitModel(), operation, remoteBatch);
    } else if (executor instanceof DefaultDebugExecutor) {
        setRunMode(RunMode.LOCAL);
        if (operation == null) {
            operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.DEBUG_LOCAL_SPARK_JOB);
            operation.start();
        }
        state = new SparkBatchLocalDebugState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
    } else if (executor instanceof DefaultRunExecutor) {
        setRunMode(RunMode.LOCAL);
        if (operation == null) {
            operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.RUN_LOCAL_SPARK_JOB);
            operation.start();
        }
        state = new SparkBatchLocalRunState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
    }
    if (state != null) {
        final Map<String, String> props = getActionProperties().entrySet().stream().collect(Collectors.toMap((Map.Entry<Object, Object> entry) -> entry.getKey() == null ? null : entry.getKey().toString(), (Map.Entry<Object, Object> entry) -> entry.getValue() == null ? "" : entry.getValue().toString()));
        final String configurationId = Optional.ofNullable(executionEnvironment.getRunnerAndConfigurationSettings()).map(settings -> settings.getType().getId()).orElse("");
        props.put("configurationId", configurationId);
        state.createAppInsightEvent(executor, props);
        EventUtil.logEvent(EventType.info, operation, props);
        // Clear the action properties
        getActionProperties().clear();
    }
    return state;
}
Also used : ISecureStore(com.microsoft.azure.toolkit.ide.common.store.ISecureStore) java.util(java.util) BLOB(com.microsoft.azure.hdinsight.spark.common.SparkSubmitStorageType.BLOB) TelemetryKeys(com.microsoft.intellij.telemetry.TelemetryKeys) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull) ExecutionException(com.intellij.execution.ExecutionException) Artifact(com.intellij.packaging.artifacts.Artifact) CompileStepBeforeRun(com.intellij.compiler.options.CompileStepBeforeRun) InvalidDataException(com.intellij.openapi.util.InvalidDataException) StringUtils(org.apache.commons.lang3.StringUtils) com.intellij.execution.configurations(com.intellij.execution.configurations) SparkSubmitJobUploadStorageModelKt.getSecureStoreServiceOf(com.microsoft.azure.hdinsight.spark.common.SparkSubmitJobUploadStorageModelKt.getSecureStoreServiceOf) ILogger(com.microsoft.azure.hdinsight.common.logger.ILogger) Observable(rx.Observable) ExecutionEnvironment(com.intellij.execution.runners.ExecutionEnvironment) BuildArtifactsBeforeRunTaskProvider(com.intellij.packaging.impl.run.BuildArtifactsBeforeRunTaskProvider) SparkSubmissionJobUploadStorageWithUploadPathPanel(com.microsoft.azure.hdinsight.spark.ui.SparkSubmissionJobUploadStorageWithUploadPathPanel) SparkApplicationType(com.microsoft.azure.hdinsight.spark.run.action.SparkApplicationType) Disposer(com.intellij.openapi.util.Disposer) SparkBatchJobConfigurable(com.microsoft.azure.hdinsight.spark.ui.SparkBatchJobConfigurable) Project(com.intellij.openapi.project.Project) BuildArtifactsBeforeRunTask(com.intellij.packaging.impl.run.BuildArtifactsBeforeRunTask) Module(com.intellij.openapi.module.Module) BeforeRunTask(com.intellij.execution.BeforeRunTask) SettingsEditor(com.intellij.openapi.options.SettingsEditor) DefaultRunExecutor(com.intellij.execution.executors.DefaultRunExecutor) DefaultDebugExecutor(com.intellij.execution.executors.DefaultDebugExecutor) ProgramRunner(com.intellij.execution.runners.ProgramRunner) Nullable(com.microsoft.azuretools.azurecommons.helpers.Nullable) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) ADLS_GEN2(com.microsoft.azure.hdinsight.spark.common.SparkSubmitStorageType.ADLS_GEN2) AzureStoreManager(com.microsoft.azure.toolkit.ide.common.store.AzureStoreManager) Executor(com.intellij.execution.Executor) com.microsoft.azure.hdinsight.spark.run(com.microsoft.azure.hdinsight.spark.run) Collectors(java.util.stream.Collectors) File(java.io.File) JavaExecutionUtil(com.intellij.execution.JavaExecutionUtil) EventType(com.microsoft.azuretools.telemetrywrapper.EventType) Stream(java.util.stream.Stream) ArtifactUtil(com.intellij.packaging.impl.artifacts.ArtifactUtil) TelemetryConstants(com.microsoft.azuretools.telemetry.TelemetryConstants) TelemetryManager(com.microsoft.azuretools.telemetrywrapper.TelemetryManager) UsePassword(com.microsoft.azure.hdinsight.spark.common.SparkBatchRemoteDebugJobSshAuth.SSHAuthType.UsePassword) EventUtil(com.microsoft.azuretools.telemetrywrapper.EventUtil) WriteExternalException(com.intellij.openapi.util.WriteExternalException) Element(org.jdom.Element) com.microsoft.azure.hdinsight.spark.common(com.microsoft.azure.hdinsight.spark.common) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) Artifact(com.intellij.packaging.artifacts.Artifact) DefaultDebugExecutor(com.intellij.execution.executors.DefaultDebugExecutor) ExecutionException(com.intellij.execution.ExecutionException) DefaultRunExecutor(com.intellij.execution.executors.DefaultRunExecutor) Nullable(com.microsoft.azuretools.azurecommons.helpers.Nullable)

Example 38 with NotNull

use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.

the class ADLSGen2FileSystem method listFiles.

@NotNull
public VirtualFile[] listFiles(AdlsGen2VirtualFile vf) {
    List<AdlsGen2VirtualFile> childrenList = new ArrayList<>();
    if (vf.isDirectory()) {
        // sample fileSystemRootPath: https://accountName.dfs.core.windows.net/fileSystem/
        String fileSystemRootPath = rootPathUri.resolve("/").getUrl().toString();
        // sample directoryParam: sub/path/to
        String directoryParam = vf.getAbfsUri().getDirectoryParam();
        childrenList = this.op.list(fileSystemRootPath, directoryParam).map(remoteFile -> new AdlsGen2VirtualFile((AbfsUri) AbfsUri.parse(fileSystemRootPath).resolveAsRoot(AzureStorageUri.encodeAndNormalizePath(remoteFile.getName())), remoteFile.isDirectory(), this)).doOnNext(file -> file.setParent(vf)).onErrorResumeNext(err -> {
            String errorMessage = "Failed to list folders and files with error " + err.getMessage() + ". ";
            if (err instanceof ForbiddenHttpErrorStatus) {
                errorMessage += ADLSGen2Deploy.getForbiddenErrorHints(vf.toString());
            }
            return Observable.error(new IOException(errorMessage));
        }).toList().toBlocking().lastOrDefault(new ArrayList<>());
    }
    return childrenList.toArray(new VirtualFile[0]);
}
Also used : ForbiddenHttpErrorStatus(com.microsoft.azure.hdinsight.sdk.common.errorresponse.ForbiddenHttpErrorStatus) Nullable(com.microsoft.azuretools.azurecommons.helpers.Nullable) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull) VirtualFile(com.intellij.openapi.vfs.VirtualFile) ADLSGen2Deploy(com.microsoft.azure.hdinsight.spark.common.ADLSGen2Deploy) IOException(java.io.IOException) VirtualFileListener(com.intellij.openapi.vfs.VirtualFileListener) AbfsUri(com.microsoft.azure.hdinsight.common.AbfsUri) HttpObservable(com.microsoft.azure.hdinsight.sdk.common.HttpObservable) ADLSGen2FSOperation(com.microsoft.azure.hdinsight.sdk.storage.adlsgen2.ADLSGen2FSOperation) Observable(rx.Observable) ArrayList(java.util.ArrayList) List(java.util.List) AzureStorageUri(com.microsoft.azure.hdinsight.common.AzureStorageUri) ArrayList(java.util.ArrayList) ForbiddenHttpErrorStatus(com.microsoft.azure.hdinsight.sdk.common.errorresponse.ForbiddenHttpErrorStatus) IOException(java.io.IOException) AbfsUri(com.microsoft.azure.hdinsight.common.AbfsUri) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull)

Example 39 with NotNull

use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.

the class ServerNameTextField method doValidateValue.

@Override
@NotNull
public AzureValidationInfo doValidateValue() {
    if (StringUtils.isBlank(subscriptionId)) {
        return AzureValidationInfo.UNINITIALIZED;
    }
    final AzureValidationInfo info = super.doValidateValue();
    if (!AzureValidationInfo.OK.equals(info)) {
        return info;
    }
    final String value = this.getValue();
    // validate length
    if (StringUtils.length(value) < minLength || StringUtils.length(value) > maxLength) {
        return AzureValidationInfo.builder().input(this).message(String.format("Server name must be at least %s characters and at most %s characters.", minLength, maxLength)).type(AzureValidationInfo.Type.ERROR).build();
    }
    // validate special character
    if (!PATTERN.matcher(value).matches()) {
        return AzureValidationInfo.builder().input(this).message("Your server name can contain only lowercase letters, numbers, and '-', but can't start or end with '-'.").type(AzureValidationInfo.Type.ERROR).build();
    }
    // validate availability
    if (Objects.nonNull(validateFunction)) {
        return validateFunction.apply(this);
    }
    return AzureValidationInfo.OK;
}
Also used : AzureValidationInfo(com.microsoft.azure.toolkit.lib.common.form.AzureValidationInfo) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull)

Example 40 with NotNull

use of com.microsoft.azuretools.azurecommons.helpers.NotNull in project azure-tools-for-java by Microsoft.

the class StorageClientSDKManager method getBlobItems.

@NotNull
public List<BlobItem> getBlobItems(@NotNull String connectionString, @NotNull BlobDirectory blobDirectory) throws AzureCmdException {
    List<BlobItem> biList = new ArrayList<BlobItem>();
    try {
        CloudBlobClient client = getCloudBlobClient(connectionString);
        String containerName = blobDirectory.getContainerName();
        String delimiter = client.getDirectoryDelimiter();
        CloudBlobContainer container = client.getContainerReference(containerName);
        CloudBlobDirectory directory = container.getDirectoryReference(blobDirectory.getPath());
        for (ListBlobItem item : directory.listBlobs()) {
            String uri = item.getUri() != null ? item.getUri().toString() : "";
            if (item instanceof CloudBlobDirectory) {
                CloudBlobDirectory subDirectory = (CloudBlobDirectory) item;
                String name = extractBlobItemName(subDirectory.getPrefix(), delimiter);
                String path = Strings.nullToEmpty(subDirectory.getPrefix());
                biList.add(new BlobDirectory(name, uri, containerName, path));
            } else if (item instanceof CloudBlob) {
                CloudBlob blob = (CloudBlob) item;
                String name = extractBlobItemName(blob.getName(), delimiter);
                String path = Strings.nullToEmpty(blob.getName());
                String type = "";
                String cacheControlHeader = "";
                String contentEncoding = "";
                String contentLanguage = "";
                String contentType = "";
                String contentMD5Header = "";
                String eTag = "";
                Calendar lastModified = new GregorianCalendar();
                long size = 0;
                BlobProperties properties = blob.getProperties();
                if (properties != null) {
                    if (properties.getBlobType() != null) {
                        type = properties.getBlobType().toString();
                    }
                    cacheControlHeader = Strings.nullToEmpty(properties.getCacheControl());
                    contentEncoding = Strings.nullToEmpty(properties.getContentEncoding());
                    contentLanguage = Strings.nullToEmpty(properties.getContentLanguage());
                    contentType = Strings.nullToEmpty(properties.getContentType());
                    contentMD5Header = Strings.nullToEmpty(properties.getContentMD5());
                    eTag = Strings.nullToEmpty(properties.getEtag());
                    if (properties.getLastModified() != null) {
                        lastModified.setTime(properties.getLastModified());
                    }
                    size = properties.getLength();
                }
                biList.add(new BlobFile(name, uri, containerName, path, type, cacheControlHeader, contentEncoding, contentLanguage, contentType, contentMD5Header, eTag, lastModified, size));
            }
        }
        return biList;
    } catch (Throwable t) {
        throw new AzureCmdException("Error retrieving the Blob Item list", t);
    }
}
Also used : BlobDirectory(com.microsoft.tooling.msservices.model.storage.BlobDirectory) BlobFile(com.microsoft.tooling.msservices.model.storage.BlobFile) BlobItem(com.microsoft.tooling.msservices.model.storage.BlobItem) AzureCmdException(com.microsoft.azuretools.azurecommons.helpers.AzureCmdException) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull)

Aggregations

NotNull (com.microsoft.azuretools.azurecommons.helpers.NotNull)65 AzureCmdException (com.microsoft.azuretools.azurecommons.helpers.AzureCmdException)21 Nullable (com.microsoft.azuretools.azurecommons.helpers.Nullable)8 File (java.io.File)8 IOException (java.io.IOException)8 IClusterDetail (com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail)7 java.util (java.util)7 Observable (rx.Observable)7 URI (java.net.URI)6 HDIException (com.microsoft.azure.hdinsight.sdk.common.HDIException)5 ObjectConvertUtils (com.microsoft.azure.hdinsight.sdk.rest.ObjectConvertUtils)5 List (java.util.List)5 StringUtils (org.apache.commons.lang3.StringUtils)5 Project (com.intellij.openapi.project.Project)4 CloudQueue (com.microsoft.azure.storage.queue.CloudQueue)4 CloudQueueClient (com.microsoft.azure.storage.queue.CloudQueueClient)4 ExecutionException (java.util.concurrent.ExecutionException)4 Collectors (java.util.stream.Collectors)4 ExecutionException (com.intellij.execution.ExecutionException)3 Artifact (com.intellij.packaging.artifacts.Artifact)3