Search in sources :

Example 16 with Operation

use of com.microsoft.azuretools.telemetrywrapper.Operation in project azure-tools-for-java by Microsoft.

the class CreateRedisCacheForm method onOK.

private void onOK() {
    final Operation operation = TelemetryManager.createOperation(REDIS, CREATE_REDIS);
    try {
        operation.start();
        Azure azure = azureManager.getAzure(currentSub.getId());
        setSubscription(currentSub);
        ProcessingStrategy processor = RedisCacheUtil.doGetProcessor(azure, skus, redisCacheNameValue, selectedLocationValue, selectedResGrpValue, selectedPriceTierValue, noSSLPort, newResGrp);
        ExecutorService executor = Executors.newSingleThreadExecutor();
        ListeningExecutorService executorService = MoreExecutors.listeningDecorator(executor);
        ListenableFuture<Void> futureTask = executorService.submit(new CreateRedisCallable(processor));
        final ProcessingStrategy processorInner = processor;
        Futures.addCallback(futureTask, new FutureCallback<Void>() {

            @Override
            public void onSuccess(Void arg0) {
                if (onCreate != null) {
                    onCreate.run();
                    operation.complete();
                }
            }

            @Override
            public void onFailure(Throwable throwable) {
                DefaultLoader.getUIHelper().showError(throwable.getMessage(), "Error occurred when creating Redis Cache: " + redisCacheNameValue);
                EventUtil.logError(operation, ErrorType.userError, new Exception(throwable), null, null);
                operation.complete();
                try {
                    // notify the waitting thread the thread being waited incurred exception to clear blocking queue
                    processorInner.notifyCompletion();
                } catch (InterruptedException ex) {
                    String msg = String.format(CREATING_ERROR_INDICATOR, "notifyCompletion", ex.getMessage());
                    PluginUtil.displayErrorDialogAndLog(message("errTtl"), msg, ex);
                }
            }
        }, MoreExecutors.directExecutor());
        close(DialogWrapper.OK_EXIT_CODE, true);
    } catch (Exception ex) {
        ex.printStackTrace();
        EventUtil.logError(operation, ErrorType.userError, ex, null, null);
        operation.complete();
    }
}
Also used : Azure(com.microsoft.azure.management.Azure) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) ProcessingStrategy(com.microsoft.azuretools.azurecommons.rediscacheprocessors.ProcessingStrategy) IOException(java.io.IOException) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) ExecutorService(java.util.concurrent.ExecutorService) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService)

Example 17 with Operation

use of com.microsoft.azuretools.telemetrywrapper.Operation in project azure-tools-for-java by Microsoft.

the class LivySparkBatchJobRunConfiguration method getState.

@Nullable
@Override
public RunProfileState getState(@NotNull final Executor executor, @NotNull final ExecutionEnvironment executionEnvironment) throws ExecutionException {
    Operation operation = executionEnvironment.getUserData(TelemetryKeys.OPERATION);
    final String debugTarget = executionEnvironment.getUserData(SparkBatchJobDebuggerRunner.DEBUG_TARGET_KEY);
    final boolean isExecutor = StringUtils.equals(debugTarget, SparkBatchJobDebuggerRunner.DEBUG_EXECUTOR);
    RunProfileStateWithAppInsightsEvent state = null;
    final Artifact selectedArtifact = ArtifactUtil.getArtifactWithOutputPaths(getProject()).stream().filter(artifact -> artifact.getName().equals(getSubmitModel().getArtifactName())).findFirst().orElse(null);
    if (executor instanceof SparkBatchJobDebugExecutor) {
        final ISparkBatchJob remoteDebugBatch = sparkRemoteBatch;
        if (!(remoteDebugBatch instanceof SparkBatchRemoteDebugJob)) {
            throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
        }
        if (isExecutor) {
            setRunMode(RunMode.REMOTE_DEBUG_EXECUTOR);
            state = new SparkBatchRemoteDebugExecutorState(getModel().getSubmitModel(), operation, remoteDebugBatch);
        } else {
            if (selectedArtifact != null) {
                BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
            }
            setRunMode(RunMode.REMOTE);
            state = new SparkBatchRemoteDebugState(getModel().getSubmitModel(), operation, sparkRemoteBatch);
        }
    } else if (executor instanceof SparkBatchJobRunExecutor) {
        final ISparkBatchJob remoteBatch = sparkRemoteBatch;
        if (remoteBatch == null) {
            throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
        }
        if (selectedArtifact != null) {
            BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
        }
        setRunMode(RunMode.REMOTE);
        state = new SparkBatchRemoteRunState(getModel().getSubmitModel(), operation, remoteBatch);
    } else if (executor instanceof DefaultDebugExecutor) {
        setRunMode(RunMode.LOCAL);
        if (operation == null) {
            operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.DEBUG_LOCAL_SPARK_JOB);
            operation.start();
        }
        state = new SparkBatchLocalDebugState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
    } else if (executor instanceof DefaultRunExecutor) {
        setRunMode(RunMode.LOCAL);
        if (operation == null) {
            operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.RUN_LOCAL_SPARK_JOB);
            operation.start();
        }
        state = new SparkBatchLocalRunState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
    }
    if (state != null) {
        final Map<String, String> props = getActionProperties().entrySet().stream().collect(Collectors.toMap((Map.Entry<Object, Object> entry) -> entry.getKey() == null ? null : entry.getKey().toString(), (Map.Entry<Object, Object> entry) -> entry.getValue() == null ? "" : entry.getValue().toString()));
        final String configurationId = Optional.ofNullable(executionEnvironment.getRunnerAndConfigurationSettings()).map(settings -> settings.getType().getId()).orElse("");
        props.put("configurationId", configurationId);
        state.createAppInsightEvent(executor, props);
        EventUtil.logEvent(EventType.info, operation, props);
        // Clear the action properties
        getActionProperties().clear();
    }
    return state;
}
Also used : ISecureStore(com.microsoft.azure.toolkit.ide.common.store.ISecureStore) java.util(java.util) BLOB(com.microsoft.azure.hdinsight.spark.common.SparkSubmitStorageType.BLOB) TelemetryKeys(com.microsoft.intellij.telemetry.TelemetryKeys) NotNull(com.microsoft.azuretools.azurecommons.helpers.NotNull) ExecutionException(com.intellij.execution.ExecutionException) Artifact(com.intellij.packaging.artifacts.Artifact) CompileStepBeforeRun(com.intellij.compiler.options.CompileStepBeforeRun) InvalidDataException(com.intellij.openapi.util.InvalidDataException) StringUtils(org.apache.commons.lang3.StringUtils) com.intellij.execution.configurations(com.intellij.execution.configurations) SparkSubmitJobUploadStorageModelKt.getSecureStoreServiceOf(com.microsoft.azure.hdinsight.spark.common.SparkSubmitJobUploadStorageModelKt.getSecureStoreServiceOf) ILogger(com.microsoft.azure.hdinsight.common.logger.ILogger) Observable(rx.Observable) ExecutionEnvironment(com.intellij.execution.runners.ExecutionEnvironment) BuildArtifactsBeforeRunTaskProvider(com.intellij.packaging.impl.run.BuildArtifactsBeforeRunTaskProvider) SparkSubmissionJobUploadStorageWithUploadPathPanel(com.microsoft.azure.hdinsight.spark.ui.SparkSubmissionJobUploadStorageWithUploadPathPanel) SparkApplicationType(com.microsoft.azure.hdinsight.spark.run.action.SparkApplicationType) Disposer(com.intellij.openapi.util.Disposer) SparkBatchJobConfigurable(com.microsoft.azure.hdinsight.spark.ui.SparkBatchJobConfigurable) Project(com.intellij.openapi.project.Project) BuildArtifactsBeforeRunTask(com.intellij.packaging.impl.run.BuildArtifactsBeforeRunTask) Module(com.intellij.openapi.module.Module) BeforeRunTask(com.intellij.execution.BeforeRunTask) SettingsEditor(com.intellij.openapi.options.SettingsEditor) DefaultRunExecutor(com.intellij.execution.executors.DefaultRunExecutor) DefaultDebugExecutor(com.intellij.execution.executors.DefaultDebugExecutor) ProgramRunner(com.intellij.execution.runners.ProgramRunner) Nullable(com.microsoft.azuretools.azurecommons.helpers.Nullable) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) ADLS_GEN2(com.microsoft.azure.hdinsight.spark.common.SparkSubmitStorageType.ADLS_GEN2) AzureStoreManager(com.microsoft.azure.toolkit.ide.common.store.AzureStoreManager) Executor(com.intellij.execution.Executor) com.microsoft.azure.hdinsight.spark.run(com.microsoft.azure.hdinsight.spark.run) Collectors(java.util.stream.Collectors) File(java.io.File) JavaExecutionUtil(com.intellij.execution.JavaExecutionUtil) EventType(com.microsoft.azuretools.telemetrywrapper.EventType) Stream(java.util.stream.Stream) ArtifactUtil(com.intellij.packaging.impl.artifacts.ArtifactUtil) TelemetryConstants(com.microsoft.azuretools.telemetry.TelemetryConstants) TelemetryManager(com.microsoft.azuretools.telemetrywrapper.TelemetryManager) UsePassword(com.microsoft.azure.hdinsight.spark.common.SparkBatchRemoteDebugJobSshAuth.SSHAuthType.UsePassword) EventUtil(com.microsoft.azuretools.telemetrywrapper.EventUtil) WriteExternalException(com.intellij.openapi.util.WriteExternalException) Element(org.jdom.Element) com.microsoft.azure.hdinsight.spark.common(com.microsoft.azure.hdinsight.spark.common) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) Artifact(com.intellij.packaging.artifacts.Artifact) DefaultDebugExecutor(com.intellij.execution.executors.DefaultDebugExecutor) ExecutionException(com.intellij.execution.ExecutionException) DefaultRunExecutor(com.intellij.execution.executors.DefaultRunExecutor) Nullable(com.microsoft.azuretools.azurecommons.helpers.Nullable)

Example 18 with Operation

use of com.microsoft.azuretools.telemetrywrapper.Operation in project azure-tools-for-java by Microsoft.

the class NodeActionListener method actionPerformedAsync.

public ListenableFuture<Void> actionPerformedAsync(NodeActionEvent e) {
    String serviceName = transformHDInsight(getServiceName(e), e.getAction().getNode());
    String operationName = getOperationName(e);
    Operation operation = TelemetryManager.createOperation(serviceName, operationName);
    Node node = e.getAction().getNode();
    Mono<Map<String, String>> telemetryMono = buildProp(node);
    try {
        operation.start();
        actionPerformed(e);
        return Futures.immediateFuture(null);
    } catch (AzureCmdException | RuntimeException ex) {
        EventUtil.logError(operation, ErrorType.systemError, ex, null, null);
        AzureMessager.getMessager().error(ex);
        return Futures.immediateFailedFuture(ex);
    } finally {
        telemetryMono.subscribeOn(Schedulers.boundedElastic()).subscribe(properties -> {
            operation.trackProperties(properties);
            operation.complete();
        });
    }
}
Also used : AzureCmdException(com.microsoft.azuretools.azurecommons.helpers.AzureCmdException) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) HashMap(java.util.HashMap) Map(java.util.Map)

Example 19 with Operation

use of com.microsoft.azuretools.telemetrywrapper.Operation in project azure-tools-for-java by Microsoft.

the class RedisExplorerPresenter method onKeyList.

/**
 * Called when Scan button is clicked.
 *
 * @param sid
 *            subscription id of Redis Cache
 * @param id
 *            resource id of Redis Cache
 * @param db
 *            index of Redis Cache database
 * @param cursor
 *            scan cursor for Redis Cache
 * @param pattern
 *            scan match pattern for Redis Cache
 */
public void onKeyList(int db, String cursor, String pattern) {
    Operation operation = TelemetryManager.createOperation(TelemetryConstants.REDIS, TelemetryConstants.REDIS_SCAN);
    operation.start();
    Observable.fromCallable(() -> {
        return RedisExplorerMvpModel.getInstance().scanKeys(sid, id, db, cursor, pattern);
    }).subscribeOn(getSchedulerProvider().io()).subscribe(result -> {
        DefaultLoader.getIdeHelper().invokeLater(() -> {
            if (isViewDetached()) {
                return;
            }
            getMvpView().showScanResult(new RedisScanResult(result));
            operation.complete();
        });
    }, e -> {
        EventUtil.logError(operation, ErrorType.userError, new Exception(e), null, null);
        operation.complete();
        errorHandler(CANNOT_GET_REDIS_INFO, (Exception) e);
    });
}
Also used : Operation(com.microsoft.azuretools.telemetrywrapper.Operation) RedisScanResult(com.microsoft.azuretools.core.mvp.ui.rediscache.RedisScanResult)

Example 20 with Operation

use of com.microsoft.azuretools.telemetrywrapper.Operation in project azure-tools-for-java by Microsoft.

the class CreateFunctionAction method invokeDialog.

@Override
protected PsiElement[] invokeDialog(Project project, PsiDirectory psiDirectory) {
    final Operation operation = TelemetryManager.createOperation(TelemetryConstants.FUNCTION, TelemetryConstants.CREATE_FUNCTION_TRIGGER);
    try {
        operation.start();
        PsiPackage pkg = JavaDirectoryService.getInstance().getPackage(psiDirectory);
        // get existing package from current directory
        String hintPackageName = pkg == null ? "" : pkg.getQualifiedName();
        CreateFunctionForm form = new CreateFunctionForm(project, hintPackageName);
        List<PsiElement> psiElements = new ArrayList<>();
        if (form.showAndGet()) {
            final FunctionTemplate bindingTemplate;
            try {
                Map<String, String> parameters = form.getTemplateParameters();
                final String connectionName = parameters.get("connection");
                String triggerType = form.getTriggerType();
                String packageName = parameters.get("packageName");
                String className = parameters.get("className");
                PsiDirectory directory = ClassUtil.sourceRoot(psiDirectory);
                String newName = packageName.replace('.', '/');
                bindingTemplate = AzureFunctionsUtils.getFunctionTemplate(triggerType);
                operation.trackProperty(TelemetryConstants.TRIGGER_TYPE, triggerType);
                if (StringUtils.equalsIgnoreCase(triggerType, CreateFunctionForm.EVENT_HUB_TRIGGER)) {
                    if (StringUtils.isBlank(connectionName)) {
                        throw new AzureExecutionException(message("function.createFunction.error.connectionMissed"));
                    }
                    parameters.putIfAbsent("eventHubName", "myeventhub");
                    parameters.putIfAbsent("consumerGroup", "$Default");
                }
                final String functionClassContent = AzureFunctionsUtils.substituteParametersInTemplate(bindingTemplate, parameters);
                if (StringUtils.isNotEmpty(functionClassContent)) {
                    AzureTaskManager.getInstance().write(() -> {
                        CreateFileAction.MkDirs mkDirs = ApplicationManager.getApplication().runWriteAction((Computable<CreateFileAction.MkDirs>) () -> new CreateFileAction.MkDirs(newName + '/' + className, directory));
                        PsiFileFactory factory = PsiFileFactory.getInstance(project);
                        try {
                            mkDirs.directory.checkCreateFile(className + ".java");
                        } catch (final IncorrectOperationException e) {
                            final String dir = mkDirs.directory.getName();
                            final String error = String.format("failed to create function class[%s] in directory[%s]", className, dir);
                            throw new AzureToolkitRuntimeException(error, e);
                        }
                        CommandProcessor.getInstance().executeCommand(project, () -> {
                            PsiFile psiFile = factory.createFileFromText(className + ".java", JavaFileType.INSTANCE, functionClassContent);
                            psiElements.add(mkDirs.directory.add(psiFile));
                        }, null, null);
                        if (StringUtils.equalsIgnoreCase(triggerType, CreateFunctionForm.EVENT_HUB_TRIGGER)) {
                            try {
                                String connectionString = form.getEventHubNamespace() == null ? DEFAULT_EVENT_HUB_CONNECTION_STRING : getEventHubNamespaceConnectionString(form.getEventHubNamespace());
                                AzureFunctionsUtils.applyKeyValueToLocalSettingFile(new File(project.getBasePath(), "local.settings.json"), parameters.get("connection"), connectionString);
                            } catch (IOException e) {
                                EventUtil.logError(operation, ErrorType.systemError, e, null, null);
                                final String error = "failed to get connection string and save to local settings";
                                throw new AzureToolkitRuntimeException(error, e);
                            }
                        }
                    });
                }
            } catch (AzureExecutionException e) {
                AzureMessager.getMessager().error(e);
                EventUtil.logError(operation, ErrorType.systemError, e, null, null);
            }
        }
        if (!psiElements.isEmpty()) {
            FileEditorManager.getInstance(project).openFile(psiElements.get(0).getContainingFile().getVirtualFile(), false);
        }
        return psiElements.toArray(new PsiElement[0]);
    } finally {
        operation.complete();
    }
}
Also used : PsiFileFactory(com.intellij.psi.PsiFileFactory) ArrayList(java.util.ArrayList) PsiPackage(com.intellij.psi.PsiPackage) AzureToolkitRuntimeException(com.microsoft.azure.toolkit.lib.common.exception.AzureToolkitRuntimeException) Operation(com.microsoft.azuretools.telemetrywrapper.Operation) IOException(java.io.IOException) FunctionTemplate(com.microsoft.azure.toolkit.lib.legacy.function.template.FunctionTemplate) PsiDirectory(com.intellij.psi.PsiDirectory) AzureExecutionException(com.microsoft.azure.toolkit.lib.common.exception.AzureExecutionException) CreateFunctionForm(com.microsoft.azure.toolkit.intellij.function.CreateFunctionForm) IncorrectOperationException(com.intellij.util.IncorrectOperationException) PsiFile(com.intellij.psi.PsiFile) PsiFile(com.intellij.psi.PsiFile) File(java.io.File) PsiElement(com.intellij.psi.PsiElement) CreateFileAction(com.intellij.ide.actions.CreateFileAction)

Aggregations

Operation (com.microsoft.azuretools.telemetrywrapper.Operation)29 AzureOperation (com.microsoft.azure.toolkit.lib.common.operation.AzureOperation)9 IOException (java.io.IOException)8 File (java.io.File)7 HashMap (java.util.HashMap)7 AzureString (com.microsoft.azure.toolkit.lib.common.bundle.AzureString)6 EventUtil (com.microsoft.azuretools.telemetrywrapper.EventUtil)6 AzureToolkitRuntimeException (com.microsoft.azure.toolkit.lib.common.exception.AzureToolkitRuntimeException)5 TelemetryConstants (com.microsoft.azuretools.telemetry.TelemetryConstants)5 ErrorType (com.microsoft.azuretools.telemetrywrapper.ErrorType)5 Map (java.util.Map)5 Project (com.intellij.openapi.project.Project)4 MsalClientException (com.microsoft.aad.msal4j.MsalClientException)4 TelemetryManager (com.microsoft.azuretools.telemetrywrapper.TelemetryManager)4 DefaultDockerClient (com.spotify.docker.client.DefaultDockerClient)4 Path (java.nio.file.Path)4 CancellationException (java.util.concurrent.CancellationException)4 DefaultExecutionResult (com.intellij.execution.DefaultExecutionResult)3 ProgressIndicator (com.intellij.openapi.progress.ProgressIndicator)3 AzureTask (com.microsoft.azure.toolkit.lib.common.task.AzureTask)3