use of com.microsoft.azuretools.azurecommons.helpers.Nullable in project azure-tools-for-java by Microsoft.
the class HDInsightHelperImpl method openItem.
private void openItem(@NotNull final Project project, @NotNull final IClusterDetail myClusterDetail, @NotNull final String uuid, @Nullable final VirtualFile closeableFile) {
final LightVirtualFile virtualFile = new LightVirtualFile(myClusterDetail.getName() + ": Job View");
virtualFile.putUserData(JobViewEditorProvider.JOB_VIEW_KEY, myClusterDetail);
virtualFile.setFileType(new FileType() {
@NotNull
@Override
public String getName() {
return this.getClass().getName();
}
@NotNull
@Override
public String getDescription() {
return "job view dummy file";
}
@NotNull
@Override
public String getDefaultExtension() {
return "";
}
@Nullable
@Override
public Icon getIcon() {
return PluginUtil.getIcon(CommonConst.SPARK_JOBVIEW_ICONPATH);
}
@Override
public boolean isBinary() {
return true;
}
@Override
public boolean isReadOnly() {
return true;
}
@Nullable
@Override
public String getCharset(@NotNull final VirtualFile virtualFile, @NotNull final byte[] bytes) {
return "UTF8";
}
});
virtualFile.putUserData(JobViewEditorProvider.JOB_VIEW_UUID, uuid);
openItem(project, virtualFile, closeableFile);
}
use of com.microsoft.azuretools.azurecommons.helpers.Nullable in project azure-tools-for-java by Microsoft.
the class SparkBatchJob method parseAmHostHttpAddressHost.
/**
* Parse host from host:port combination string
*
* @param driverHttpAddress the host:port combination string to parse
* @return the host got, otherwise null
*/
@Nullable
String parseAmHostHttpAddressHost(@Nullable String driverHttpAddress) {
if (driverHttpAddress == null) {
return null;
}
final Pattern driverRegex = Pattern.compile("(?<host>[^:]+):(?<port>\\d+)");
final Matcher driverMatcher = driverRegex.matcher(driverHttpAddress);
return driverMatcher.matches() ? driverMatcher.group("host") : null;
}
use of com.microsoft.azuretools.azurecommons.helpers.Nullable in project azure-tools-for-java by Microsoft.
the class RequestDetail method getRequestDetail.
@Nullable
public static RequestDetail getRequestDetail(@NotNull URI myUrl) {
String[] queries = myUrl.getQuery() == null ? null : myUrl.getQuery().split("&");
String path = myUrl.getPath();
Matcher matcher = clusterPattern.matcher(path);
if (matcher.find()) {
return new RequestDetail(matcher.group(1), matcher.group(2), queries);
}
return null;
}
use of com.microsoft.azuretools.azurecommons.helpers.Nullable in project azure-tools-for-java by Microsoft.
the class WebHDFSDeploy method getArtifactUploadedPath.
@Nullable
public String getArtifactUploadedPath(String rootPath) throws URISyntaxException {
final List<NameValuePair> params = new WebHdfsParamsBuilder("OPEN").build();
final URIBuilder uriBuilder = new URIBuilder(rootPath);
uriBuilder.addParameters(params);
return uriBuilder.build().toString();
}
use of com.microsoft.azuretools.azurecommons.helpers.Nullable in project azure-tools-for-java by Microsoft.
the class LivySparkBatchJobRunConfiguration method getState.
@Nullable
@Override
public RunProfileState getState(@NotNull final Executor executor, @NotNull final ExecutionEnvironment executionEnvironment) throws ExecutionException {
Operation operation = executionEnvironment.getUserData(TelemetryKeys.OPERATION);
final String debugTarget = executionEnvironment.getUserData(SparkBatchJobDebuggerRunner.DEBUG_TARGET_KEY);
final boolean isExecutor = StringUtils.equals(debugTarget, SparkBatchJobDebuggerRunner.DEBUG_EXECUTOR);
RunProfileStateWithAppInsightsEvent state = null;
final Artifact selectedArtifact = ArtifactUtil.getArtifactWithOutputPaths(getProject()).stream().filter(artifact -> artifact.getName().equals(getSubmitModel().getArtifactName())).findFirst().orElse(null);
if (executor instanceof SparkBatchJobDebugExecutor) {
final ISparkBatchJob remoteDebugBatch = sparkRemoteBatch;
if (!(remoteDebugBatch instanceof SparkBatchRemoteDebugJob)) {
throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
}
if (isExecutor) {
setRunMode(RunMode.REMOTE_DEBUG_EXECUTOR);
state = new SparkBatchRemoteDebugExecutorState(getModel().getSubmitModel(), operation, remoteDebugBatch);
} else {
if (selectedArtifact != null) {
BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
}
setRunMode(RunMode.REMOTE);
state = new SparkBatchRemoteDebugState(getModel().getSubmitModel(), operation, sparkRemoteBatch);
}
} else if (executor instanceof SparkBatchJobRunExecutor) {
final ISparkBatchJob remoteBatch = sparkRemoteBatch;
if (remoteBatch == null) {
throw new ExecutionException("Spark Batch Job is not prepared for " + executor.getId());
}
if (selectedArtifact != null) {
BuildArtifactsBeforeRunTaskProvider.setBuildArtifactBeforeRun(getProject(), this, selectedArtifact);
}
setRunMode(RunMode.REMOTE);
state = new SparkBatchRemoteRunState(getModel().getSubmitModel(), operation, remoteBatch);
} else if (executor instanceof DefaultDebugExecutor) {
setRunMode(RunMode.LOCAL);
if (operation == null) {
operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.DEBUG_LOCAL_SPARK_JOB);
operation.start();
}
state = new SparkBatchLocalDebugState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
} else if (executor instanceof DefaultRunExecutor) {
setRunMode(RunMode.LOCAL);
if (operation == null) {
operation = TelemetryManager.createOperation(TelemetryConstants.HDINSIGHT, TelemetryConstants.RUN_LOCAL_SPARK_JOB);
operation.start();
}
state = new SparkBatchLocalRunState(getProject(), getModel().getLocalRunConfigurableModel(), operation);
}
if (state != null) {
final Map<String, String> props = getActionProperties().entrySet().stream().collect(Collectors.toMap((Map.Entry<Object, Object> entry) -> entry.getKey() == null ? null : entry.getKey().toString(), (Map.Entry<Object, Object> entry) -> entry.getValue() == null ? "" : entry.getValue().toString()));
final String configurationId = Optional.ofNullable(executionEnvironment.getRunnerAndConfigurationSettings()).map(settings -> settings.getType().getId()).orElse("");
props.put("configurationId", configurationId);
state.createAppInsightEvent(executor, props);
EventUtil.logEvent(EventType.info, operation, props);
// Clear the action properties
getActionProperties().clear();
}
return state;
}
Aggregations