use of com.google.api.services.bigquery.model.TableDataInsertAllResponse in project beam by apache.
the class BigQueryServicesImplTest method testInsertRateLimitRetry.
/**
* Tests that {@link DatasetServiceImpl#insertAll} retries rate limited attempts.
*/
@Test
public void testInsertRateLimitRetry() throws Exception {
TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = new ArrayList<>();
rows.add(wrapValue(new TableRow()));
// First response is 403 rate limited, second response has valid payload.
setupMockResponses(response -> {
when(response.getStatusCode()).thenReturn(403);
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getContent()).thenReturn(toStream(errorWithReasonAndStatus("rateLimitExceeded", 403)));
}, response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(new TableDataInsertAllResponse()));
});
DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.create());
dataService.insertAll(ref, rows, null, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.alwaysRetry(), null, null, false, false, false, null);
verifyAllResponsesAreRead();
expectedLogs.verifyInfo("BigQuery insertAll error, retrying:");
verifyWriteMetricWasSet("project", "dataset", "table", "ratelimitexceeded", 1);
}
use of com.google.api.services.bigquery.model.TableDataInsertAllResponse in project beam by apache.
the class BigQueryServicesImplTest method testInsertWithinRequestByteSizeLimits.
/**
* Tests that {@link DatasetServiceImpl#insertAll} does not go over limit of rows per request.
*/
@Test
public void testInsertWithinRequestByteSizeLimits() throws Exception {
TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = ImmutableList.of(wrapValue(new TableRow().set("row", "a")), wrapValue(new TableRow().set("row", "b")), wrapValue(new TableRow().set("row", "cdefghijklmnopqrstuvwxyz")));
List<String> insertIds = ImmutableList.of("a", "b", "c");
final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();
setupMockResponses(response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
}, response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
});
DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.fromArgs("--maxStreamingBatchSize=15").create());
dataService.insertAll(ref, rows, insertIds, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.retryTransientErrors(), new ArrayList<>(), ErrorContainer.TABLE_ROW_ERROR_CONTAINER, false, false, false, null);
verifyAllResponsesAreRead();
verifyWriteMetricWasSet("project", "dataset", "table", "ok", 2);
}
use of com.google.api.services.bigquery.model.TableDataInsertAllResponse in project beam by apache.
the class BigQueryServicesImplTest method testInsertWithinRowCountLimits.
/**
* Tests that {@link DatasetServiceImpl#insertAll} does not go over limit of rows per request.
*/
@Test
public void testInsertWithinRowCountLimits() throws Exception {
TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = ImmutableList.of(wrapValue(new TableRow().set("row", "a")), wrapValue(new TableRow().set("row", "b")), wrapValue(new TableRow().set("row", "c")));
List<String> insertIds = ImmutableList.of("a", "b", "c");
final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();
setupMockResponses(response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
}, response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
}, response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
});
DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.fromArgs("--maxStreamingRowsToBatch=1").create());
dataService.insertAll(ref, rows, insertIds, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.alwaysRetry(), null, null, false, false, false, null);
verifyAllResponsesAreRead();
verifyWriteMetricWasSet("project", "dataset", "table", "ok", 3);
}
use of com.google.api.services.bigquery.model.TableDataInsertAllResponse in project google-cloud-java by GoogleCloudPlatform.
the class BigQueryImplTest method testInsertAllWithProject.
@Test
public void testInsertAllWithProject() {
Map<String, Object> row1 = ImmutableMap.<String, Object>of("field", "value1");
Map<String, Object> row2 = ImmutableMap.<String, Object>of("field", "value2");
List<RowToInsert> rows = ImmutableList.of(new RowToInsert("row1", row1), new RowToInsert("row2", row2));
TableId tableId = TableId.of(OTHER_PROJECT, DATASET, TABLE);
InsertAllRequest request = InsertAllRequest.newBuilder(tableId).setRows(rows).setSkipInvalidRows(false).setIgnoreUnknownValues(true).setTemplateSuffix("suffix").build();
TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest().setRows(Lists.transform(rows, new Function<RowToInsert, TableDataInsertAllRequest.Rows>() {
@Override
public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) {
return new TableDataInsertAllRequest.Rows().setInsertId(rowToInsert.getId()).setJson(rowToInsert.getContent());
}
})).setSkipInvalidRows(false).setIgnoreUnknownValues(true).setTemplateSuffix("suffix");
TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse().setInsertErrors(ImmutableList.of(new TableDataInsertAllResponse.InsertErrors().setIndex(0L).setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage")))));
EasyMock.expect(bigqueryRpcMock.insertAll(OTHER_PROJECT, DATASET, TABLE, requestPb)).andReturn(responsePb);
EasyMock.replay(bigqueryRpcMock);
bigquery = options.getService();
InsertAllResponse response = bigquery.insertAll(request);
assertNotNull(response.getErrorsFor(0L));
assertNull(response.getErrorsFor(1L));
assertEquals(1, response.getErrorsFor(0L).size());
assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage());
}
use of com.google.api.services.bigquery.model.TableDataInsertAllResponse in project beam by apache.
the class BigQueryServicesImplTest method testInsertDoesNotRetry.
/**
* Tests that {@link DatasetServiceImpl#insertAll} does not retry non-rate-limited attempts.
*/
@Test
public void testInsertDoesNotRetry() throws Throwable {
TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
List<ValueInSingleWindow<TableRow>> rows = new ArrayList<>();
rows.add(wrapTableRow(new TableRow()));
// First response is 403 not-rate-limited, second response has valid payload but should not
// be invoked.
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(403).thenReturn(200);
when(response.getContent()).thenReturn(toStream(errorWithReasonAndStatus("actually forbidden", 403))).thenReturn(toStream(new TableDataInsertAllResponse()));
thrown.expect(GoogleJsonResponseException.class);
thrown.expectMessage("actually forbidden");
DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());
try {
dataService.insertAll(ref, rows, null, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), new MockSleeper(), InsertRetryPolicy.alwaysRetry(), null);
fail();
} catch (RuntimeException e) {
verify(response, times(1)).getStatusCode();
verify(response, times(1)).getContent();
verify(response, times(1)).getContentType();
throw e.getCause();
}
}
Aggregations