use of com.google.api.ads.adwords.lib.utils.BatchJobUploadResponse in project googleads-java-lib by googleads.
the class BatchJobLoggerTest method testLogUpload.
/**
* Confirms an upload is logged as expected.
*/
@Test
public void testLogUpload() throws IOException {
String contentsString = "some contents";
InputStream responseContent = CharSource.wrap(contentsString).asByteSource(UTF_8).openStream();
BatchJobUploadResponse response = new BatchJobUploadResponse(responseContent, statusCode, statusMessage, contentsString.length(), URI.create(url));
ArgumentCaptor<RemoteCallReturn> returnCaptor = ArgumentCaptor.forClass(RemoteCallReturn.class);
batchJobLogger.logUpload(contentsString, URI.create(url), response, exception);
verify(loggerDelegate).logRequestSummary(returnCaptor.capture());
RemoteCallReturn capturedReturn = returnCaptor.getValue();
assertEquals(exception, capturedReturn.getException());
RequestInfo requestInfo = capturedReturn.getRequestInfo();
assertEquals(url, requestInfo.getUrl());
assertEquals("clientCustomerId", requestInfo.getContextName());
assertNull(requestInfo.getContextValue());
assertThat(requestInfo.getPayload(), containsString(contentsString));
assertThat(requestInfo.getServiceName(), containsString("upload"));
ResponseInfo responseInfo = capturedReturn.getResponseInfo();
assertNull(responseInfo.getRequestId());
assertThat(responseInfo.getPayload(), startsWith(String.valueOf(response.getHttpStatus())));
assertThat(responseInfo.getPayload(), containsString(response.getHttpResponseMessage()));
verify(loggerDelegate).logRequestDetails(returnCaptor.capture());
assertSame("The same RemoteCallReturn object was not passed to request details and request summary", capturedReturn, returnCaptor.getValue());
}
use of com.google.api.ads.adwords.lib.utils.BatchJobUploadResponse in project googleads-java-lib by googleads.
the class AddKeywordsUsingIncrementalBatchJob method runExample.
/**
* Runs the example.
*
* @param adWordsServices the services factory.
* @param session the session.
* @param adGroupId the ID of the ad group where keywords will be added.
* @throws BatchJobException if uploading operations or downloading results failed.
* @throws ApiException if the API request failed with one or more service errors.
* @throws RemoteException if the API request failed due to other errors.
* @throws InterruptedException if the thread was interrupted while sleeping between retries.
* @throws TimeoutException if the job did not complete after job status was polled {@link
* #MAX_POLL_ATTEMPTS} times.
*/
public static void runExample(AdWordsServicesInterface adWordsServices, AdWordsSession session, Long adGroupId) throws RemoteException, BatchJobException, InterruptedException, TimeoutException {
// Get the BatchJobService.
BatchJobServiceInterface batchJobService = adWordsServices.get(session, BatchJobServiceInterface.class);
BatchJobOperation addOp = new BatchJobOperation();
addOp.setOperator(Operator.ADD);
addOp.setOperand(new BatchJob());
BatchJob batchJob = batchJobService.mutate(new BatchJobOperation[] { addOp }).getValue(0);
System.out.printf("Created BatchJob with ID %d, status '%s' and upload URL %s.%n", batchJob.getId(), batchJob.getStatus(), batchJob.getUploadUrl().getUrl());
// Create a BatchJobHelper for uploading operations.
BatchJobHelper batchJobHelper = adWordsServices.getUtility(session, BatchJobHelper.class);
BatchJobUploadStatus batchJobUploadStatus = new BatchJobUploadStatus(0, URI.create(batchJob.getUploadUrl().getUrl()));
List<AdGroupCriterionOperation> operations = new ArrayList<>();
// incrementally.
for (int i = 0; i < NUMBER_OF_KEYWORDS_TO_ADD; i++) {
// Create Keyword.
String text = String.format("mars%d", i);
// Make 10% of keywords invalid to demonstrate error handling.
if (i % 10 == 0) {
text = text + "!!!";
}
Keyword keyword = new Keyword();
keyword.setText(text);
keyword.setMatchType(KeywordMatchType.BROAD);
// Create BiddableAdGroupCriterion.
BiddableAdGroupCriterion bagc = new BiddableAdGroupCriterion();
bagc.setAdGroupId(adGroupId);
bagc.setCriterion(keyword);
// Create AdGroupCriterionOperation.
AdGroupCriterionOperation agco = new AdGroupCriterionOperation();
agco.setOperand(bagc);
agco.setOperator(Operator.ADD);
// Add to the list of operations.
operations.add(agco);
// If the current list of operations has reached KEYWORDS_PER_UPLOAD or this is the last
// operation, upload the current list of operations.
boolean isLastOperation = i == NUMBER_OF_KEYWORDS_TO_ADD - 1;
if (operations.size() == KEYWORDS_PER_UPLOAD || isLastOperation) {
BatchJobUploadResponse uploadResponse = batchJobHelper.uploadIncrementalBatchJobOperations(operations, isLastOperation, batchJobUploadStatus);
System.out.printf("Uploaded %d operations for batch job with ID %d.%n", operations.size(), batchJob.getId());
// Set the batch job upload status and clear the operations list in preparation for the
// next upload.
batchJobUploadStatus = uploadResponse.getBatchJobUploadStatus();
operations.clear();
}
}
// Poll for completion of the batch job using an exponential back off.
int pollAttempts = 0;
boolean isPending;
boolean wasCancelRequested = false;
Selector selector = new SelectorBuilder().fields(BatchJobField.Id, BatchJobField.Status, BatchJobField.DownloadUrl, BatchJobField.ProcessingErrors, BatchJobField.ProgressStats).equalsId(batchJob.getId()).build();
do {
long sleepSeconds = (long) Math.scalb(30, pollAttempts);
System.out.printf("Sleeping %d seconds...%n", sleepSeconds);
Thread.sleep(sleepSeconds * 1000);
batchJob = batchJobService.get(selector).getEntries(0);
System.out.printf("Batch job ID %d has status '%s'.%n", batchJob.getId(), batchJob.getStatus());
pollAttempts++;
isPending = PENDING_STATUSES.contains(batchJob.getStatus());
// times.
if (isPending && !wasCancelRequested && pollAttempts == MAX_POLL_ATTEMPTS) {
batchJob.setStatus(BatchJobStatus.CANCELING);
BatchJobOperation batchJobSetOperation = new BatchJobOperation();
batchJobSetOperation.setOperand(batchJob);
batchJobSetOperation.setOperator(Operator.SET);
// Only request cancellation once per job.
wasCancelRequested = true;
try {
batchJob = batchJobService.mutate(new BatchJobOperation[] { batchJobSetOperation }).getValue(0);
System.out.printf("Requested cancellation of batch job with ID %d.%n", batchJob.getId());
} catch (ApiException e) {
if (e.getErrors() != null && e.getErrors().length > 0 && e.getErrors(0) instanceof BatchJobError) {
BatchJobError batchJobError = (BatchJobError) e.getErrors(0);
if (BatchJobErrorReason.INVALID_STATE_CHANGE.equals(batchJobError.getReason())) {
System.out.printf("Attempt to cancel batch job with ID %d was rejected because the job already " + "completed or was canceled.", batchJob.getId());
continue;
}
}
throw e;
} finally {
// Reset the poll attempt counter to wait for cancellation.
pollAttempts = 0;
}
}
} while (isPending && pollAttempts < MAX_POLL_ATTEMPTS);
if (isPending) {
throw new TimeoutException("Job is still in pending state after polling " + MAX_POLL_ATTEMPTS + " times.");
}
if (batchJob.getProcessingErrors() != null) {
int errorIndex = 0;
for (BatchJobProcessingError processingError : batchJob.getProcessingErrors()) {
System.out.printf(" Processing error [%d]: errorType=%s, trigger=%s, errorString=%s, fieldPath=%s" + ", reason=%s%n", errorIndex++, processingError.getApiErrorType(), processingError.getTrigger(), processingError.getErrorString(), processingError.getFieldPath(), processingError.getReason());
}
} else {
System.out.println("No processing errors found.");
}
if (batchJob.getDownloadUrl() != null && batchJob.getDownloadUrl().getUrl() != null) {
BatchJobMutateResponse mutateResponse = batchJobHelper.downloadBatchJobMutateResponse(batchJob.getDownloadUrl().getUrl());
System.out.printf("Downloaded results from %s:%n", batchJob.getDownloadUrl().getUrl());
for (MutateResult mutateResult : mutateResponse.getMutateResults()) {
String outcome = mutateResult.getErrorList() == null ? "SUCCESS" : "FAILURE";
System.out.printf(" Operation [%d] - %s%n", mutateResult.getIndex(), outcome);
}
} else {
System.out.println("No results available for download.");
}
}
Aggregations