use of com.microsoft.azure.hdinsight.common.WasbUri in project azure-tools-for-java by Microsoft.
the class ClusterDetail method getDefaultStorageAccount.
@Nullable
private IHDIStorageAccount getDefaultStorageAccount(Map<String, String> coresiteMap, ClusterIdentity clusterIdentity) throws HDIException {
String defaultStorageRootPath = getDefaultStorageRootPath();
if (defaultStorageRootPath == null) {
throw new HDIException("Failed to get default storage root path");
}
StorageAccountType storageType = StorageAccountType.parseUri(URI.create(defaultStorageRootPath));
switch(storageType) {
case ADLS:
return new ADLSStorageAccount(this, true, clusterIdentity, URI.create(defaultStorageRootPath));
case BLOB:
WasbUri wasbUri = WasbUri.parse(defaultStorageRootPath);
String storageAccountName = wasbUri.getStorageAccount() + ".blob." + wasbUri.getEndpointSuffix();
String defaultContainerName = wasbUri.getContainer();
String defaultStorageAccountKey = StorageAccountKeyPrefix + storageAccountName;
String storageAccountKey = null;
if (coresiteMap.containsKey(defaultStorageAccountKey)) {
storageAccountKey = coresiteMap.get(defaultStorageAccountKey);
}
if (storageAccountKey == null) {
return null;
}
return new HDStorageAccount(this, storageAccountName, storageAccountKey, true, defaultContainerName);
case ADLSGen2:
AbfsUri abfsUri = AbfsUri.parse(defaultStorageRootPath);
String accountName = abfsUri.getAccountName();
String fileSystem = abfsUri.getFileSystem();
return new ADLSGen2StorageAccount(this, accountName, null, true, fileSystem, abfsUri.getUri().getScheme());
default:
return null;
}
}
use of com.microsoft.azure.hdinsight.common.WasbUri in project azure-tools-for-java by Microsoft.
the class SparkBatchJobRunner method updateStorageConfigForSubmissionParameter.
// WARNING: When you change anything in this method, you should also change it in SparkScalaLivyConsoleRunConfiguration::applyRunConfiguration accordingly
protected SparkSubmissionParameter updateStorageConfigForSubmissionParameter(SparkSubmitModel submitModel) throws ExecutionException {
// If we use virtual file system to select referenced jars or files on ADLS Gen2 storage, the selected file path will
// be of URI schema which starts with "https://". Then job submission will fail with error like
// "Server returned HTTP response code: 401 for URL: https://accountName.dfs.core.windows.net/fs0/Reference.jar"
// Therefore, we need to transform the Gen2 "https" URI to "abfs" url to avoid the error.
final SparkSubmissionParameter submissionParameter = submitModel.getSubmissionParameter();
submissionParameter.setReferencedJars(submissionParameter.getReferencedJars().stream().map(this::transformToGen2Uri).collect(Collectors.toList()));
submissionParameter.setReferencedFiles(submissionParameter.getReferencedFiles().stream().map(this::transformToGen2Uri).collect(Collectors.toList()));
// If job upload storage type is Azure Blob storage, we need to put blob storage credential into livy configuration
if (submitModel.getJobUploadStorageModel().getStorageAccountType() == SparkSubmitStorageType.BLOB) {
try {
final WasbUri fsRoot = WasbUri.parse(submitModel.getJobUploadStorageModel().getUploadPath());
final String storageKey = submitModel.getJobUploadStorageModel().getStorageKey();
final Object existingConfigEntry = submissionParameter.getJobConfig().get(SparkSubmissionParameter.Conf);
final SparkConfigures wrappedConfig = existingConfigEntry instanceof Map ? new SparkConfigures(existingConfigEntry) : new SparkConfigures();
wrappedConfig.put("spark.hadoop." + fsRoot.getHadoopBlobFsPropertyKey(), storageKey);
// We need the following config to fix issue https://github.com/microsoft/azure-tools-for-java/issues/5002
wrappedConfig.put("spark.hadoop." + fsRoot.getKeyProviderPropertyKey(), fsRoot.getDefaultKeyProviderPropertyValue());
submissionParameter.getJobConfig().put(SparkSubmissionParameter.Conf, wrappedConfig);
} catch (final UnknownFormatConversionException error) {
final String errorHint = "Azure blob storage uploading path is not in correct format";
log().warn(String.format("%s. Uploading Path: %s. Error message: %s. Stacktrace:\n%s", errorHint, submitModel.getJobUploadStorageModel().getUploadPath(), error.getMessage(), ExceptionUtils.getStackTrace(error)));
throw new ExecutionException(errorHint);
} catch (final Exception error) {
final String errorHint = "Failed to update config for linked Azure Blob storage";
log().warn(String.format("%s. Error message: %s. Stacktrace:\n%s", errorHint, error.getMessage(), ExceptionUtils.getStackTrace(error)));
throw new ExecutionException(errorHint);
}
}
return submissionParameter;
}
Aggregations