use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryClientFactoryTest method testGetReadClientWithUserAgent.
@Test
public void testGetReadClientWithUserAgent() {
BigQueryClientFactory clientFactory = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient = clientFactory.getBigQueryReadClient();
assertNotNull(readClient);
BigQueryClientFactory clientFactory2 = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient2 = clientFactory2.getBigQueryReadClient();
assertNotNull(readClient2);
assertSame(readClient, readClient2);
BigQueryClientFactory clientFactory3 = new BigQueryClientFactory(bigQueryCredentialsSupplier, HttpUtil.createHeaderProvider(bigQueryConfig, "test-agent-2"), bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient3 = clientFactory3.getBigQueryReadClient();
assertNotNull(readClient3);
assertNotSame(readClient, readClient3);
assertNotSame(readClient2, readClient3);
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryClientFactoryTest method testGetReadClientWithServiceAccountCredentials.
@Test
public void testGetReadClientWithServiceAccountCredentials() {
when(bigQueryCredentialsSupplier.getCredentials()).thenReturn(createServiceAccountCredentials("test-client-id"));
BigQueryClientFactory clientFactory = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient = clientFactory.getBigQueryReadClient();
assertNotNull(readClient);
when(bigQueryCredentialsSupplier.getCredentials()).thenReturn(createServiceAccountCredentials("test-client-id"));
BigQueryClientFactory clientFactory2 = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient2 = clientFactory2.getBigQueryReadClient();
assertNotNull(readClient2);
assertSame(readClient, readClient2);
when(bigQueryCredentialsSupplier.getCredentials()).thenReturn(createServiceAccountCredentials("test-client-id-2"));
BigQueryClientFactory clientFactory3 = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient3 = clientFactory3.getBigQueryReadClient();
assertNotNull(readClient3);
assertNotSame(readClient, readClient3);
assertNotSame(readClient2, readClient3);
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryClientFactoryTest method testGetReadClientWithBigQueryConfig.
@Test
public void testGetReadClientWithBigQueryConfig() {
BigQueryClientFactory clientFactory = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, new TestBigQueryConfig(Optional.of("US:8080")));
BigQueryReadClient readClient = clientFactory.getBigQueryReadClient();
assertNotNull(readClient);
BigQueryClientFactory clientFactory2 = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, new TestBigQueryConfig(Optional.of("US:8080")));
BigQueryReadClient readClient2 = clientFactory2.getBigQueryReadClient();
assertNotNull(readClient2);
assertSame(readClient, readClient2);
BigQueryClientFactory clientFactory3 = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, new TestBigQueryConfig(Optional.of("EU:8080")));
BigQueryReadClient readClient3 = clientFactory3.getBigQueryReadClient();
assertNotNull(readClient3);
assertNotSame(readClient, readClient3);
assertNotSame(readClient2, readClient3);
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class ReadSessionCreator method create.
/**
* Creates a new ReadSession for parallel reads.
*
* <p>Some attributes are governed by the {@link ReadSessionCreatorConfig} that this object was
* constructed with.
*
* @param table The table to create the session for.
* @param selectedFields
* @param filter
* @return
*/
public ReadSessionResponse create(TableId table, ImmutableList<String> selectedFields, Optional<String> filter) {
TableInfo tableDetails = bigQueryClient.getTable(table);
TableInfo actualTable = getActualTable(tableDetails, selectedFields, filter);
StandardTableDefinition tableDefinition = actualTable.getDefinition();
BigQueryReadClient bigQueryReadClient = bigQueryReadClientFactory.getBigQueryReadClient();
String tablePath = toTablePath(actualTable.getTableId());
CreateReadSessionRequest request = config.getRequestEncodedBase().map(value -> {
try {
return com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest.parseFrom(java.util.Base64.getDecoder().decode(value));
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw new RuntimeException("Couldn't decode:" + value, e);
}
}).orElse(CreateReadSessionRequest.newBuilder().build());
ReadSession.Builder requestedSession = request.getReadSession().toBuilder();
config.getTraceId().ifPresent(traceId -> requestedSession.setTraceId(traceId));
TableReadOptions.Builder readOptions = requestedSession.getReadOptionsBuilder();
if (!isInputTableAView(tableDetails)) {
filter.ifPresent(readOptions::setRowRestriction);
}
readOptions.addAllSelectedFields(selectedFields);
readOptions.setArrowSerializationOptions(ArrowSerializationOptions.newBuilder().setBufferCompression(config.getArrowCompressionCodec()).build());
ReadSession readSession = bigQueryReadClient.createReadSession(request.newBuilder().setParent("projects/" + bigQueryClient.getProjectId()).setReadSession(requestedSession.setDataFormat(config.getReadDataFormat()).setReadOptions(readOptions).setTable(tablePath).build()).setMaxStreamCount(getMaxNumPartitionsRequested(config.getMaxParallelism(), tableDefinition)).build());
return new ReadSessionResponse(readSession, actualTable);
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project trino by trinodb.
the class TestReadRowsHelper method testNoFailures.
@Test
public void testNoFailures() {
BigQueryReadClient client = mock(BigQueryReadClient.class);
MockResponsesBatch batch1 = new MockResponsesBatch();
batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(10).build());
batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(11).build());
// so we can run multiple tests
List<ReadRowsResponse> responses = ImmutableList.copyOf(new MockReadRowsHelper(client, "test", 3, ImmutableList.of(batch1)).readRows());
assertThat(responses.size()).isEqualTo(2);
assertThat(responses.stream().mapToLong(ReadRowsResponse::getRowCount).sum()).isEqualTo(21);
}
Aggregations