use of com.microsoft.azure.management.datalake.store.implementation.DataLakeStoreAccountManagementClientImpl in project azure-sdk-for-java by Azure.
the class DataLakeAnalyticsManagementTestBase method initializeClients.
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) {
rgName = generateRandomResourceName("adlarg", 15);
adlsName = generateRandomResourceName("adls", 15);
jobAndCatalogAdlaName = generateRandomResourceName("secondadla", 15);
environmentLocation = Region.US_EAST2;
dataLakeAnalyticsAccountManagementClient = new DataLakeAnalyticsAccountManagementClientImpl(restClient).withSubscriptionId(defaultSubscription);
// TODO: in the future this needs to be dynamic depending on the Azure environment
// the tests are running in.
String adlaSuffix = "azuredatalakeanalytics.net";
addTextReplacementRule("https://(.*)." + adlaSuffix, this.mockUri());
// Generate creds and a set of rest clients for catalog and job
ApplicationTokenCredentials credentials = new AzureTestCredentials();
if (IS_RECORD) {
final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION"));
try {
credentials = ApplicationTokenCredentials.fromFile(credFile);
} catch (IOException e) {
Assert.fail("Failed to read credentials from file: " + credFile + " with error: " + e.getMessage());
}
}
if (IS_RECORD) {
RestClient restClientWithTimeout = buildRestClient(new RestClient.Builder().withConnectionTimeout(5, TimeUnit.MINUTES).withBaseUrl("https://{accountName}.{adlaJobDnsSuffix}").withCredentials(credentials).withLogLevel(LogLevel.BODY_AND_HEADERS).withNetworkInterceptor(this.interceptor()), IS_MOCKED);
dataLakeAnalyticsJobManagementClient = new DataLakeAnalyticsJobManagementClientImpl(restClientWithTimeout).withAdlaJobDnsSuffix(adlaSuffix);
RestClient catalogRestClient = buildRestClient(new RestClient.Builder().withBaseUrl("https://{accountName}.{adlaCatalogDnsSuffix}").withCredentials(credentials).withLogLevel(LogLevel.BODY_AND_HEADERS).withNetworkInterceptor(this.interceptor()), IS_MOCKED);
dataLakeAnalyticsCatalogManagementClient = new DataLakeAnalyticsCatalogManagementClientImpl(catalogRestClient).withAdlaCatalogDnsSuffix(adlaSuffix);
} else {
// for mocked clients, we can just use the basic rest client, since the DNS is replaced.
dataLakeAnalyticsCatalogManagementClient = new DataLakeAnalyticsCatalogManagementClientImpl(restClient);
dataLakeAnalyticsJobManagementClient = new DataLakeAnalyticsJobManagementClientImpl(restClient);
}
resourceManagementClient = ResourceManager.authenticate(restClient).withSubscription(defaultSubscription);
dataLakeStoreAccountManagementClient = new DataLakeStoreAccountManagementClientImpl(restClient).withSubscriptionId(defaultSubscription);
storageManagementClient = StorageManager.authenticate(restClient, defaultSubscription);
// create the resource group, ADLS account and ADLA account for job and catalog use.
resourceManagementClient.resourceGroups().define(rgName).withRegion(environmentLocation).create();
DataLakeStoreAccount createParams = new DataLakeStoreAccount();
createParams.withLocation(environmentLocation.name());
dataLakeStoreAccountManagementClient.accounts().create(rgName, adlsName, createParams);
List<DataLakeStoreAccountInfo> adlsAccts = new ArrayList<DataLakeStoreAccountInfo>();
DataLakeStoreAccountInfo adlsInfo = new DataLakeStoreAccountInfo();
adlsInfo.withName(adlsName);
adlsAccts.add(adlsInfo);
DataLakeAnalyticsAccount adlaCreateParams = new DataLakeAnalyticsAccount();
adlaCreateParams.withLocation(environmentLocation.name());
adlaCreateParams.withDataLakeStoreAccounts(adlsAccts);
adlaCreateParams.withDefaultDataLakeStoreAccount(adlsName);
dataLakeAnalyticsAccountManagementClient.accounts().create(rgName, jobAndCatalogAdlaName, adlaCreateParams);
}
use of com.microsoft.azure.management.datalake.store.implementation.DataLakeStoreAccountManagementClientImpl in project azure-sdk-for-java by Azure.
the class DataLakeStoreManagementTest method initializeClients.
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) {
environmentLocation = Region.US_EAST2;
resourceManagementClient = ResourceManager.authenticate(restClient).withSubscription(defaultSubscription);
resourceGroupName = generateRandomResourceName("adlsrg", 15);
dataLakeStoreAccountManagementClient = new DataLakeStoreAccountManagementClientImpl(restClient);
dataLakeStoreAccountManagementClient.withSubscriptionId(defaultSubscription);
// create the resource group
resourceManagementClient.resourceGroups().define(resourceGroupName).withRegion(environmentLocation).create();
}
use of com.microsoft.azure.management.datalake.store.implementation.DataLakeStoreAccountManagementClientImpl in project cloudbreak by hortonworks.
the class AzureSetup method validateAdlsFileSystem.
private void validateAdlsFileSystem(CloudCredential credential, FileSystem fileSystem) {
Map<String, Object> credentialAttributes = credential.getParameters();
String clientSecret = String.valueOf(credentialAttributes.get(AdlsFileSystemConfiguration.CREDENTIAL_SECRET_KEY));
String subscriptionId = String.valueOf(credentialAttributes.get(AdlsFileSystemConfiguration.SUBSCRIPTION_ID));
String clientId = String.valueOf(credentialAttributes.get(AdlsFileSystemConfiguration.ACCESS_KEY));
String tenantId = fileSystem.getStringParameter(AdlsFileSystemConfiguration.TENANT_ID);
String accountName = fileSystem.getStringParameter(FileSystemConfiguration.ACCOUNT_NAME);
ApplicationTokenCredentials creds = new ApplicationTokenCredentials(clientId, tenantId, clientSecret, AzureEnvironment.AZURE);
DataLakeStoreAccountManagementClient adlsClient = new DataLakeStoreAccountManagementClientImpl(creds);
adlsClient.withSubscriptionId(subscriptionId);
List<DataLakeStoreAccount> dataLakeStoreAccounts = adlsClient.accounts().list();
boolean validAccountname = false;
for (DataLakeStoreAccount account : dataLakeStoreAccounts) {
if (account.name().equalsIgnoreCase(accountName)) {
validAccountname = true;
break;
}
}
if (!validAccountname) {
throw new CloudConnectorException("The provided file system account name does not belong to a valid ADLS account");
}
}
Aggregations