use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class ReadRowsHelper method readRows.
public Iterator<ReadRowsResponse> readRows() {
BigQueryReadClient client = bigQueryReadClientFactory.getBigQueryReadClient();
incomingStream = new StreamCombiningIterator(client, requests, options.prebufferResponses, options.getMaxReadRowsRetries());
return incomingStream;
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project trino by trinodb.
the class BigQueryReadClientFactory method create.
BigQueryReadClient create(ConnectorSession session) {
Optional<Credentials> credentials = credentialsSupplier.getCredentials(session);
try {
BigQueryReadSettings.Builder clientSettings = BigQueryReadSettings.newBuilder().setTransportChannelProvider(BigQueryReadSettings.defaultGrpcTransportProviderBuilder().setHeaderProvider(headerProvider).build());
credentials.ifPresent(value -> clientSettings.setCredentialsProvider(FixedCredentialsProvider.create(value)));
return BigQueryReadClient.create(clientSettings.build());
} catch (IOException e) {
throw new UncheckedIOException("Error creating BigQueryReadClient", e);
}
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project trino by trinodb.
the class TestReadRowsHelper method testRetryOfSingleFailure.
@Test
public void testRetryOfSingleFailure() {
BigQueryReadClient client = mock(BigQueryReadClient.class);
MockResponsesBatch batch1 = new MockResponsesBatch();
batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(10).build());
batch1.addException(new StatusRuntimeException(Status.INTERNAL.withDescription("Received unexpected EOS on DATA frame from server.")));
MockResponsesBatch batch2 = new MockResponsesBatch();
batch2.addResponse(ReadRowsResponse.newBuilder().setRowCount(11).build());
List<ReadRowsResponse> responses = ImmutableList.copyOf(new MockReadRowsHelper(client, "test", 3, ImmutableList.of(batch1, batch2)).readRows());
assertThat(responses.size()).isEqualTo(2);
assertThat(responses.stream().mapToLong(ReadRowsResponse::getRowCount).sum()).isEqualTo(21);
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project trino by trinodb.
the class ReadSessionCreator method create.
public ReadSession create(ConnectorSession session, TableId remoteTable, List<String> selectedFields, Optional<String> filter, int parallelism) {
BigQueryClient client = bigQueryClientFactory.create(session);
TableInfo tableDetails = client.getTable(remoteTable).orElseThrow(() -> new TableNotFoundException(new SchemaTableName(remoteTable.getDataset(), remoteTable.getTable())));
TableInfo actualTable = getActualTable(client, tableDetails, selectedFields);
List<String> filteredSelectedFields = selectedFields.stream().filter(BigQueryUtil::validColumnName).collect(toList());
try (BigQueryReadClient bigQueryReadClient = bigQueryReadClientFactory.create(session)) {
ReadSession.TableReadOptions.Builder readOptions = ReadSession.TableReadOptions.newBuilder().addAllSelectedFields(filteredSelectedFields);
filter.ifPresent(readOptions::setRowRestriction);
ReadSession readSession = bigQueryReadClient.createReadSession(CreateReadSessionRequest.newBuilder().setParent("projects/" + client.getProjectId()).setReadSession(ReadSession.newBuilder().setDataFormat(DataFormat.AVRO).setTable(toTableResourceName(actualTable.getTableId())).setReadOptions(readOptions)).setMaxStreamCount(parallelism).build());
return readSession;
}
}
Aggregations