use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project java-bigquerystorage by googleapis.
the class StorageArrowSample method main.
public static void main(String... args) throws Exception {
// Sets your Google Cloud Platform project ID.
// String projectId = "YOUR_PROJECT_ID";
String projectId = args[0];
Integer snapshotMillis = null;
if (args.length > 1) {
snapshotMillis = Integer.parseInt(args[1]);
}
try (BigQueryReadClient client = BigQueryReadClient.create()) {
String parent = String.format("projects/%s", projectId);
// This example uses baby name data from the public datasets.
String srcTable = String.format("projects/%s/datasets/%s/tables/%s", "bigquery-public-data", "usa_names", "usa_1910_current");
// We specify the columns to be projected by adding them to the selected fields,
// and set a simple filter to restrict which rows are transmitted.
TableReadOptions options = TableReadOptions.newBuilder().addSelectedFields("name").addSelectedFields("number").addSelectedFields("state").setRowRestriction("state = \"WA\"").build();
// Start specifying the read session we want created.
ReadSession.Builder sessionBuilder = ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.ARROW).setReadOptions(options);
// Optionally specify the snapshot time. When unspecified, snapshot time is "now".
if (snapshotMillis != null) {
Timestamp t = Timestamp.newBuilder().setSeconds(snapshotMillis / 1000).setNanos((int) ((snapshotMillis % 1000) * 1000000)).build();
TableModifiers modifiers = TableModifiers.newBuilder().setSnapshotTime(t).build();
sessionBuilder.setTableModifiers(modifiers);
}
// Begin building the session creation request.
CreateReadSessionRequest.Builder builder = CreateReadSessionRequest.newBuilder().setParent(parent).setReadSession(sessionBuilder).setMaxStreamCount(1);
ReadSession session = client.createReadSession(builder.build());
// Setup a simple reader and start a read session.
try (SimpleRowReader reader = new SimpleRowReader(session.getArrowSchema())) {
// Assert that there are streams available in the session. An empty table may not have
// data available. If no sessions are available for an anonymous (cached) table, consider
// writing results of a query to a named table rather than consuming cached results
// directly.
Preconditions.checkState(session.getStreamsCount() > 0);
// Use the first stream to perform reading.
String streamName = session.getStreams(0).getName();
ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(streamName).build();
// Process each block of rows as they arrive and decode using our simple row reader.
ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
for (ReadRowsResponse response : stream) {
Preconditions.checkState(response.hasArrowRecordBatch());
reader.processRows(response.getArrowRecordBatch());
}
}
}
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project java-bigquerystorage by googleapis.
the class StorageSample method main.
public static void main(String... args) throws Exception {
// Sets your Google Cloud Platform project ID.
// String projectId = "YOUR_PROJECT_ID";
String projectId = args[0];
Integer snapshotMillis = null;
if (args.length > 1) {
snapshotMillis = Integer.parseInt(args[1]);
}
try (BigQueryReadClient client = BigQueryReadClient.create()) {
String parent = String.format("projects/%s", projectId);
// This example uses baby name data from the public datasets.
String srcTable = String.format("projects/%s/datasets/%s/tables/%s", "bigquery-public-data", "usa_names", "usa_1910_current");
// We specify the columns to be projected by adding them to the selected fields,
// and set a simple filter to restrict which rows are transmitted.
TableReadOptions options = TableReadOptions.newBuilder().addSelectedFields("name").addSelectedFields("number").addSelectedFields("state").setRowRestriction("state = \"WA\"").build();
// Start specifying the read session we want created.
ReadSession.Builder sessionBuilder = ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.AVRO).setReadOptions(options);
// Optionally specify the snapshot time. When unspecified, snapshot time is "now".
if (snapshotMillis != null) {
Timestamp t = Timestamp.newBuilder().setSeconds(snapshotMillis / 1000).setNanos((int) ((snapshotMillis % 1000) * 1000000)).build();
TableModifiers modifiers = TableModifiers.newBuilder().setSnapshotTime(t).build();
sessionBuilder.setTableModifiers(modifiers);
}
// Begin building the session creation request.
CreateReadSessionRequest.Builder builder = CreateReadSessionRequest.newBuilder().setParent(parent).setReadSession(sessionBuilder).setMaxStreamCount(1);
// Request the session creation.
ReadSession session = client.createReadSession(builder.build());
SimpleRowReader reader = new SimpleRowReader(new Schema.Parser().parse(session.getAvroSchema().getSchema()));
// Assert that there are streams available in the session. An empty table may not have
// data available. If no sessions are available for an anonymous (cached) table, consider
// writing results of a query to a named table rather than consuming cached results directly.
Preconditions.checkState(session.getStreamsCount() > 0);
// Use the first stream to perform reading.
String streamName = session.getStreams(0).getName();
ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(streamName).build();
// Process each block of rows as they arrive and decode using our simple row reader.
ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
for (ReadRowsResponse response : stream) {
Preconditions.checkState(response.hasAvroRows());
reader.processRows(response.getAvroRows());
}
}
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryClientFactory method getBigQueryReadClient.
public BigQueryReadClient getBigQueryReadClient() {
synchronized (readClientMap) {
if (!readClientMap.containsKey(this)) {
BigQueryReadClient bigQueryReadClient = createBigQueryReadClient(this.bqConfig.getEndpoint());
Runtime.getRuntime().addShutdownHook(new Thread(() -> shutdownBigQueryReadClient(bigQueryReadClient)));
readClientMap.put(this, bigQueryReadClient);
}
}
return readClientMap.get(this);
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryClientFactory method createBigQueryReadClient.
private BigQueryReadClient createBigQueryReadClient(Optional<String> endpoint) {
try {
InstantiatingGrpcChannelProvider.Builder transportBuilder = BigQueryReadSettings.defaultGrpcTransportProviderBuilder().setHeaderProvider(headerProvider);
setProxyConfig(transportBuilder);
endpoint.ifPresent(e -> {
log.info("Overriding endpoint to: ", e);
transportBuilder.setEndpoint(e);
});
BigQueryReadSettings.Builder clientSettings = BigQueryReadSettings.newBuilder().setTransportChannelProvider(transportBuilder.build()).setCredentialsProvider(FixedCredentialsProvider.create(credentials));
return BigQueryReadClient.create(clientSettings.build());
} catch (IOException e) {
throw new UncheckedIOException("Error creating BigQueryStorageReadClient", e);
}
}
use of com.google.cloud.bigquery.storage.v1.BigQueryReadClient in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryClientFactoryTest method testGetReadClientForSameClientFactory.
@Test
public void testGetReadClientForSameClientFactory() {
BigQueryClientFactory clientFactory = new BigQueryClientFactory(bigQueryCredentialsSupplier, headerProvider, bigQueryConfig);
when(bigQueryConfig.getBigQueryProxyConfig()).thenReturn(bigQueryProxyConfig);
BigQueryReadClient readClient = clientFactory.getBigQueryReadClient();
assertNotNull(readClient);
BigQueryReadClient readClient2 = clientFactory.getBigQueryReadClient();
assertNotNull(readClient2);
assertSame(readClient, readClient2);
}
Aggregations