use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeBigQueryServices method encodeQuery.
static String encodeQuery(List<TableRow> rows) throws IOException {
ListCoder<TableRow> listCoder = ListCoder.of(TableRowJsonCoder.of());
ByteArrayOutputStream output = new ByteArrayOutputStream();
listCoder.encode(rows, output, Context.OUTER);
return Base64.encodeBase64String(output.toByteArray());
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeBigQueryServices method rowsFromEncodedQuery.
static List<TableRow> rowsFromEncodedQuery(String query) throws IOException {
ListCoder<TableRow> listCoder = ListCoder.of(TableRowJsonCoder.of());
ByteArrayInputStream input = new ByteArrayInputStream(Base64.decodeBase64(query));
List<TableRow> rows = listCoder.decode(input, Context.OUTER);
for (TableRow row : rows) {
convertNumbers(row);
}
return rows;
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeDatasetService method insertAll.
@Override
public long insertAll(TableReference ref, List<ValueInSingleWindow<TableRow>> rowList, @Nullable List<String> insertIdList, InsertRetryPolicy retryPolicy, List<ValueInSingleWindow<TableRow>> failedInserts) throws IOException, InterruptedException {
Map<TableRow, List<TableDataInsertAllResponse.InsertErrors>> insertErrors = getInsertErrors();
synchronized (BigQueryIOTest.tables) {
if (insertIdList != null) {
assertEquals(rowList.size(), insertIdList.size());
} else {
insertIdList = Lists.newArrayListWithExpectedSize(rowList.size());
for (int i = 0; i < rowList.size(); ++i) {
insertIdList.add(Integer.toString(ThreadLocalRandom.current().nextInt()));
}
}
long dataSize = 0;
TableContainer tableContainer = getTableContainer(ref.getProjectId(), ref.getDatasetId(), ref.getTableId());
for (int i = 0; i < rowList.size(); ++i) {
TableRow row = rowList.get(i).getValue();
List<TableDataInsertAllResponse.InsertErrors> allErrors = insertErrors.get(row);
boolean shouldInsert = true;
if (allErrors != null) {
for (TableDataInsertAllResponse.InsertErrors errors : allErrors) {
if (!retryPolicy.shouldRetry(new Context(errors))) {
shouldInsert = false;
}
}
}
if (shouldInsert) {
dataSize += tableContainer.addRow(row, insertIdList.get(i));
} else {
failedInserts.add(rowList.get(i));
}
}
return dataSize;
}
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeDatasetService method getInsertErrors.
Map<TableRow, List<TableDataInsertAllResponse.InsertErrors>> getInsertErrors() {
Map<TableRow, List<TableDataInsertAllResponse.InsertErrors>> parsedInsertErrors = Maps.newHashMap();
synchronized (BigQueryIOTest.tables) {
for (Map.Entry<String, List<String>> entry : this.insertErrors.entrySet()) {
TableRow tableRow = BigQueryHelpers.fromJsonString(entry.getKey(), TableRow.class);
List<TableDataInsertAllResponse.InsertErrors> allErrors = Lists.newArrayList();
for (String errorsString : entry.getValue()) {
allErrors.add(BigQueryHelpers.fromJsonString(errorsString, TableDataInsertAllResponse.InsertErrors.class));
}
parsedInsertErrors.put(tableRow, allErrors);
}
}
return parsedInsertErrors;
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class BigQueryServicesImplTest method testInsertRetrySelectRows.
/**
* Tests that {@link DatasetServiceImpl#insertAll} retries selected rows on failure.
*/
@Test
public void testInsertRetrySelectRows() throws Exception {
TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
List<ValueInSingleWindow<TableRow>> rows = ImmutableList.of(wrapTableRow(new TableRow().set("row", "a")), wrapTableRow(new TableRow().set("row", "b")));
List<String> insertIds = ImmutableList.of("a", "b");
final TableDataInsertAllResponse bFailed = new TableDataInsertAllResponse().setInsertErrors(ImmutableList.of(new InsertErrors().setIndex(1L).setErrors(ImmutableList.of(new ErrorProto()))));
final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200).thenReturn(200);
when(response.getContent()).thenReturn(toStream(bFailed)).thenReturn(toStream(allRowsSucceeded));
DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());
dataService.insertAll(ref, rows, insertIds, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), new MockSleeper(), InsertRetryPolicy.alwaysRetry(), null);
verify(response, times(2)).getStatusCode();
verify(response, times(2)).getContent();
verify(response, times(2)).getContentType();
}
Aggregations