use of com.microsoft.azure.PagedList in project azure-sdk-for-java by Azure.
the class AccountOperations method listNodeAgentSkus.
/**
* Enumerates the node agent SKU values supported by Batch Service.
*
* @param detailLevel A {@link DetailLevel} used for filtering the list and for controlling which properties are retrieved from the service.
* @param additionalBehaviors A collection of {@link BatchClientBehavior} instances that are applied to the Batch service request.
* @return A collection of {@link NodeAgentSku} that can be used to enumerate node agent SKU values
* @throws BatchErrorException Exception thrown from REST call
* @throws IOException Exception thrown from serialization/deserialization
*/
public List<NodeAgentSku> listNodeAgentSkus(DetailLevel detailLevel, Iterable<BatchClientBehavior> additionalBehaviors) throws BatchErrorException, IOException {
AccountListNodeAgentSkusOptions options = new AccountListNodeAgentSkusOptions();
BehaviorManager bhMgr = new BehaviorManager(this.customBehaviors(), additionalBehaviors);
bhMgr.appendDetailLevelToPerCallBehaviors(detailLevel);
bhMgr.applyRequestBehaviors(options);
ServiceResponseWithHeaders<PagedList<NodeAgentSku>, AccountListNodeAgentSkusHeaders> response = this._parentBatchClient.protocolLayer().accounts().listNodeAgentSkus(options);
return response.getBody();
}
use of com.microsoft.azure.PagedList in project azure-sdk-for-java by Azure.
the class TasksImpl method list.
/**
* Lists all of the tasks that are associated with the specified job.
*
* @param jobId The id of the job.
* @throws BatchErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @throws IllegalArgumentException exception thrown from invalid parameters
* @return the List<CloudTask> object wrapped in {@link ServiceResponseWithHeaders} if successful.
*/
public ServiceResponseWithHeaders<PagedList<CloudTask>, TaskListHeaders> list(final String jobId) throws BatchErrorException, IOException, IllegalArgumentException {
if (jobId == null) {
throw new IllegalArgumentException("Parameter jobId is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
final TaskListOptions taskListOptions = null;
String filter = null;
String select = null;
String expand = null;
Integer maxResults = null;
Integer timeout = null;
String clientRequestId = null;
Boolean returnClientRequestId = null;
DateTime ocpDate = null;
DateTimeRfc1123 ocpDateConverted = null;
if (ocpDate != null) {
ocpDateConverted = new DateTimeRfc1123(ocpDate);
}
Call<ResponseBody> call = service.list(jobId, this.client.apiVersion(), this.client.acceptLanguage(), filter, select, expand, maxResults, timeout, clientRequestId, returnClientRequestId, ocpDateConverted, this.client.userAgent());
ServiceResponseWithHeaders<PageImpl<CloudTask>, TaskListHeaders> response = listDelegate(call.execute());
PagedList<CloudTask> result = new PagedList<CloudTask>(response.getBody()) {
@Override
public Page<CloudTask> nextPage(String nextPageLink) throws BatchErrorException, IOException {
return listNext(nextPageLink, null).getBody();
}
};
return new ServiceResponseWithHeaders<>(result, response.getHeaders(), response.getResponse());
}
use of com.microsoft.azure.PagedList in project azure-sdk-for-java by Azure.
the class JobsImpl method listFromJobSchedule.
/**
* Lists the jobs that have been created under the specified job schedule.
*
* @param jobScheduleId The id of the job schedule from which you want to get a list of jobs.
* @param jobListFromJobScheduleOptions Additional parameters for the operation
* @throws BatchErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @throws IllegalArgumentException exception thrown from invalid parameters
* @return the List<CloudJob> object wrapped in {@link ServiceResponseWithHeaders} if successful.
*/
public ServiceResponseWithHeaders<PagedList<CloudJob>, JobListFromJobScheduleHeaders> listFromJobSchedule(final String jobScheduleId, final JobListFromJobScheduleOptions jobListFromJobScheduleOptions) throws BatchErrorException, IOException, IllegalArgumentException {
if (jobScheduleId == null) {
throw new IllegalArgumentException("Parameter jobScheduleId is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
Validator.validate(jobListFromJobScheduleOptions);
String filter = null;
if (jobListFromJobScheduleOptions != null) {
filter = jobListFromJobScheduleOptions.filter();
}
String select = null;
if (jobListFromJobScheduleOptions != null) {
select = jobListFromJobScheduleOptions.select();
}
String expand = null;
if (jobListFromJobScheduleOptions != null) {
expand = jobListFromJobScheduleOptions.expand();
}
Integer maxResults = null;
if (jobListFromJobScheduleOptions != null) {
maxResults = jobListFromJobScheduleOptions.maxResults();
}
Integer timeout = null;
if (jobListFromJobScheduleOptions != null) {
timeout = jobListFromJobScheduleOptions.timeout();
}
String clientRequestId = null;
if (jobListFromJobScheduleOptions != null) {
clientRequestId = jobListFromJobScheduleOptions.clientRequestId();
}
Boolean returnClientRequestId = null;
if (jobListFromJobScheduleOptions != null) {
returnClientRequestId = jobListFromJobScheduleOptions.returnClientRequestId();
}
DateTime ocpDate = null;
if (jobListFromJobScheduleOptions != null) {
ocpDate = jobListFromJobScheduleOptions.ocpDate();
}
DateTimeRfc1123 ocpDateConverted = null;
if (ocpDate != null) {
ocpDateConverted = new DateTimeRfc1123(ocpDate);
}
Call<ResponseBody> call = service.listFromJobSchedule(jobScheduleId, this.client.apiVersion(), this.client.acceptLanguage(), filter, select, expand, maxResults, timeout, clientRequestId, returnClientRequestId, ocpDateConverted, this.client.userAgent());
ServiceResponseWithHeaders<PageImpl<CloudJob>, JobListFromJobScheduleHeaders> response = listFromJobScheduleDelegate(call.execute());
PagedList<CloudJob> result = new PagedList<CloudJob>(response.getBody()) {
@Override
public Page<CloudJob> nextPage(String nextPageLink) throws BatchErrorException, IOException {
JobListFromJobScheduleNextOptions jobListFromJobScheduleNextOptions = null;
if (jobListFromJobScheduleOptions != null) {
jobListFromJobScheduleNextOptions = new JobListFromJobScheduleNextOptions();
jobListFromJobScheduleNextOptions.withClientRequestId(jobListFromJobScheduleOptions.clientRequestId());
jobListFromJobScheduleNextOptions.withReturnClientRequestId(jobListFromJobScheduleOptions.returnClientRequestId());
jobListFromJobScheduleNextOptions.withOcpDate(jobListFromJobScheduleOptions.ocpDate());
}
return listFromJobScheduleNext(nextPageLink, jobListFromJobScheduleNextOptions).getBody();
}
};
return new ServiceResponseWithHeaders<>(result, response.getHeaders(), response.getResponse());
}
use of com.microsoft.azure.PagedList in project azure-sdk-for-java by Azure.
the class FilesImpl method listFromComputeNode.
/**
* Lists all of the files in task directories on the specified compute node.
*
* @param poolId The id of the pool that contains the compute node.
* @param nodeId The id of the compute node whose files you want to list.
* @throws BatchErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @throws IllegalArgumentException exception thrown from invalid parameters
* @return the List<NodeFile> object wrapped in {@link ServiceResponseWithHeaders} if successful.
*/
public ServiceResponseWithHeaders<PagedList<NodeFile>, FileListFromComputeNodeHeaders> listFromComputeNode(final String poolId, final String nodeId) throws BatchErrorException, IOException, IllegalArgumentException {
if (poolId == null) {
throw new IllegalArgumentException("Parameter poolId is required and cannot be null.");
}
if (nodeId == null) {
throw new IllegalArgumentException("Parameter nodeId is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
final Boolean recursive = null;
final FileListFromComputeNodeOptions fileListFromComputeNodeOptions = null;
String filter = null;
Integer maxResults = null;
Integer timeout = null;
String clientRequestId = null;
Boolean returnClientRequestId = null;
DateTime ocpDate = null;
DateTimeRfc1123 ocpDateConverted = null;
if (ocpDate != null) {
ocpDateConverted = new DateTimeRfc1123(ocpDate);
}
Call<ResponseBody> call = service.listFromComputeNode(poolId, nodeId, recursive, this.client.apiVersion(), this.client.acceptLanguage(), filter, maxResults, timeout, clientRequestId, returnClientRequestId, ocpDateConverted, this.client.userAgent());
ServiceResponseWithHeaders<PageImpl<NodeFile>, FileListFromComputeNodeHeaders> response = listFromComputeNodeDelegate(call.execute());
PagedList<NodeFile> result = new PagedList<NodeFile>(response.getBody()) {
@Override
public Page<NodeFile> nextPage(String nextPageLink) throws BatchErrorException, IOException {
return listFromComputeNodeNext(nextPageLink, null).getBody();
}
};
return new ServiceResponseWithHeaders<>(result, response.getHeaders(), response.getResponse());
}
use of com.microsoft.azure.PagedList in project azure-sdk-for-java by Azure.
the class ManageVirtualMachineScaleSetAsync method runSample.
/**
* Main function which runs the actual sample.
* @param azure instance of the azure client
* @return true if sample runs successfully
*/
public static boolean runSample(final Azure azure) {
final Region region = Region.US_WEST_CENTRAL;
final String rgName = SdkContext.randomResourceName("rgCOVS", 15);
final String vnetName = SdkContext.randomResourceName("vnet", 24);
final String loadBalancerName1 = SdkContext.randomResourceName("intlb" + "-", 18);
final String publicIpName = "pip-" + loadBalancerName1;
final String frontendName = loadBalancerName1 + "-FE1";
final String backendPoolName1 = loadBalancerName1 + "-BAP1";
final String backendPoolName2 = loadBalancerName1 + "-BAP2";
final String httpProbe = "httpProbe";
final String httpsProbe = "httpsProbe";
final String httpLoadBalancingRule = "httpRule";
final String httpsLoadBalancingRule = "httpsRule";
final String natPool50XXto22 = "natPool50XXto22";
final String natPool60XXto23 = "natPool60XXto23";
final String vmssName = SdkContext.randomResourceName("vmss", 24);
final String userName = "tirekicker";
final String sshKey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfSPC2K7LZcFKEO+/t3dzmQYtrJFZNxOsbVgOVKietqHyvmYGHEC0J2wPdAqQ/63g/hhAEFRoyehM+rbeDri4txB3YFfnOK58jqdkyXzupWqXzOrlKY4Wz9SKjjN765+dqUITjKRIaAip1Ri137szRg71WnrmdP3SphTRlCx1Bk2nXqWPsclbRDCiZeF8QOTi4JqbmJyK5+0UqhqYRduun8ylAwKKQJ1NJt85sYIHn9f1Rfr6Tq2zS0wZ7DHbZL+zB5rSlAr8QyUdg/GQD+cmSs6LvPJKL78d6hMGk84ARtFo4A79ovwX/Fj01znDQkU6nJildfkaolH2rWFG/qttD azjava@javalib.com";
final String apacheInstallScript = "https://raw.githubusercontent.com/Azure/azure-sdk-for-java/master/azure-samples/src/main/resources/install_apache.sh";
final String installCommand = "bash install_apache.sh";
List<String> fileUris = new ArrayList<>();
fileUris.add(apacheInstallScript);
try {
//=============================================================
// Create a virtual network with a frontend subnet
System.out.println("Creating virtual network with a frontend subnet ...");
System.out.println("Creating a public IP address...");
System.out.println("Creating a load balancer");
final List<Indexable> createdResources = new ArrayList<>();
Observable.merge(azure.networks().define(vnetName).withRegion(region).withNewResourceGroup(rgName).withAddressSpace("172.16.0.0/16").defineSubnet("Front-end").withAddressPrefix("172.16.1.0/24").attach().createAsync(), azure.publicIPAddresses().define(publicIpName).withRegion(region).withExistingResourceGroup(rgName).withLeafDomainLabel(publicIpName).createAsync().flatMap(new Func1<Indexable, Observable<Indexable>>() {
@Override
public Observable<Indexable> call(Indexable indexable) {
if (indexable instanceof PublicIPAddress) {
PublicIPAddress publicIp = (PublicIPAddress) indexable;
//=============================================================
// Create an Internet facing load balancer with
// One frontend IP address
// Two backend address pools which contain network interfaces for the virtual
// machines to receive HTTP and HTTPS network traffic from the load balancer
// Two load balancing rules for HTTP and HTTPS to map public ports on the load
// balancer to ports in the backend address pool
// Two probes which contain HTTP and HTTPS health probes used to check availability
// of virtual machines in the backend address pool
// Three inbound NAT rules which contain rules that map a public port on the load
// balancer to a port for a specific virtual machine in the backend address pool
// - this provides direct VM connectivity for SSH to port 22 and TELNET to port 23
System.out.println("Creating a Internet facing load balancer with ...");
System.out.println("- A frontend IP address");
System.out.println("- Two backend address pools which contain network interfaces for the virtual\n" + " machines to receive HTTP and HTTPS network traffic from the load balancer");
System.out.println("- Two load balancing rules for HTTP and HTTPS to map public ports on the load\n" + " balancer to ports in the backend address pool");
System.out.println("- Two probes which contain HTTP and HTTPS health probes used to check availability\n" + " of virtual machines in the backend address pool");
System.out.println("- Two inbound NAT rules which contain rules that map a public port on the load\n" + " balancer to a port for a specific virtual machine in the backend address pool\n" + " - this provides direct VM connectivity for SSH to port 22 and TELNET to port 23");
return Observable.merge(Observable.just(indexable), azure.loadBalancers().define(loadBalancerName1).withRegion(region).withExistingResourceGroup(rgName).definePublicFrontend(frontendName).withExistingPublicIPAddress(publicIp).attach().defineBackend(backendPoolName1).attach().defineBackend(backendPoolName2).attach().defineHttpProbe(httpProbe).withRequestPath("/").withPort(80).attach().defineHttpProbe(httpsProbe).withRequestPath("/").withPort(443).attach().defineLoadBalancingRule(httpLoadBalancingRule).withProtocol(TransportProtocol.TCP).withFrontend(frontendName).withFrontendPort(80).withProbe(httpProbe).withBackend(backendPoolName1).attach().defineLoadBalancingRule(httpsLoadBalancingRule).withProtocol(TransportProtocol.TCP).withFrontend(frontendName).withFrontendPort(443).withProbe(httpsProbe).withBackend(backendPoolName2).attach().defineInboundNatPool(natPool50XXto22).withProtocol(TransportProtocol.TCP).withFrontend(frontendName).withFrontendPortRange(5000, 5099).withBackendPort(22).attach().defineInboundNatPool(natPool60XXto23).withProtocol(TransportProtocol.TCP).withFrontend(frontendName).withFrontendPortRange(6000, 6099).withBackendPort(23).attach().createAsync());
}
return Observable.just(indexable);
}
})).toBlocking().subscribe(new Action1<Indexable>() {
@Override
public void call(Indexable indexable) {
createdResources.add(indexable);
}
});
Network network = null;
PublicIPAddress publicIPAddress = null;
LoadBalancer loadBalancer1 = null;
for (Indexable indexable : createdResources) {
if (indexable instanceof PublicIPAddress) {
publicIPAddress = (PublicIPAddress) indexable;
System.out.println("Created a public IP address");
// Print the virtual network details
Utils.print(publicIPAddress);
} else if (indexable instanceof Network) {
network = (Network) indexable;
System.out.println("Created a virtual network");
// Print the virtual network details
Utils.print(network);
} else if (indexable instanceof LoadBalancer) {
loadBalancer1 = (LoadBalancer) indexable;
// Print load balancer details
System.out.println("Created a load balancer");
Utils.print(loadBalancer1);
}
}
//=============================================================
// Create a virtual machine scale set with three virtual machines
// And, install Apache Web servers on them
System.out.println("Creating virtual machine scale set with three virtual machines" + " in the frontend subnet ...");
final Date t1 = new Date();
VirtualMachineScaleSet virtualMachineScaleSet = (VirtualMachineScaleSet) azure.virtualMachineScaleSets().define(vmssName).withRegion(region).withExistingResourceGroup(rgName).withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D3_V2).withExistingPrimaryNetworkSubnet(network, "Front-end").withExistingPrimaryInternetFacingLoadBalancer(loadBalancer1).withPrimaryInternetFacingLoadBalancerBackends(backendPoolName1, backendPoolName2).withPrimaryInternetFacingLoadBalancerInboundNatPools(natPool50XXto22, natPool60XXto23).withoutPrimaryInternalLoadBalancer().withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS).withRootUsername(userName).withSsh(sshKey).withNewDataDisk(100).withNewDataDisk(100, 1, CachingTypes.READ_WRITE).withNewDataDisk(100, 2, CachingTypes.READ_WRITE, StorageAccountTypes.STANDARD_LRS).withCapacity(3).defineNewExtension("CustomScriptForLinux").withPublisher("Microsoft.OSTCExtensions").withType("CustomScriptForLinux").withVersion("1.4").withMinorVersionAutoUpgrade().withPublicSetting("fileUris", fileUris).withPublicSetting("commandToExecute", installCommand).attach().createAsync().map(new Func1<Indexable, Indexable>() {
@Override
public Indexable call(Indexable indexable) {
Date t2 = new Date();
System.out.println("Created a virtual machine scale set with " + "3 Linux VMs & Apache Web servers on them: (took " + ((t2.getTime() - t1.getTime()) / 1000) + " seconds) ");
System.out.println();
return indexable;
}
}).toBlocking().last();
final String pipFqdn = publicIPAddress.fqdn();
//=============================================================
// List virtual machine scale set instance network interfaces and SSH connection string
System.out.println("Listing scale set virtual machine instance network interfaces and SSH connection string...");
virtualMachineScaleSet.virtualMachines().listAsync().map(new Func1<VirtualMachineScaleSetVM, VirtualMachineScaleSetVM>() {
@Override
public VirtualMachineScaleSetVM call(VirtualMachineScaleSetVM instance) {
System.out.println("Scale set virtual machine instance #" + instance.instanceId());
System.out.println(instance.id());
PagedList<VirtualMachineScaleSetNetworkInterface> networkInterfaces = instance.listNetworkInterfaces();
// Pick the first NIC
VirtualMachineScaleSetNetworkInterface networkInterface = networkInterfaces.get(0);
for (VirtualMachineScaleSetNicIPConfiguration ipConfig : networkInterface.ipConfigurations().values()) {
if (ipConfig.isPrimary()) {
List<LoadBalancerInboundNatRule> natRules = ipConfig.listAssociatedLoadBalancerInboundNatRules();
for (LoadBalancerInboundNatRule natRule : natRules) {
if (natRule.backendPort() == 22) {
System.out.println("SSH connection string: " + userName + "@" + pipFqdn + ":" + natRule.frontendPort());
break;
}
}
break;
}
}
return instance;
}
}).toBlocking().subscribe();
//=============================================================
// Stop the virtual machine scale set
System.out.println("Updating virtual machine scale set ...");
// Stop the virtual machine scale set
virtualMachineScaleSet.powerOffAsync().concatWith(virtualMachineScaleSet.deallocateAsync()).concatWith(virtualMachineScaleSet.startAsync()).toObservable().concatWith(virtualMachineScaleSet.update().withCapacity(6).withoutDataDisk(0).withoutDataDisk(200).applyAsync().flatMap(new Func1<VirtualMachineScaleSet, Observable<Void>>() {
@Override
public Observable<Void> call(VirtualMachineScaleSet virtualMachineScaleSet) {
System.out.println("Updated virtual machine scale set");
// Restart the virtual machine scale set
return virtualMachineScaleSet.restartAsync().toObservable();
}
})).toBlocking().subscribe();
return true;
} catch (Exception f) {
System.out.println(f.getMessage());
f.printStackTrace();
} finally {
try {
System.out.println("Deleting Resource Group: " + rgName);
azure.resourceGroups().deleteByName(rgName);
System.out.println("Deleted Resource Group: " + rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
return false;
}
Aggregations