use of com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse in project gapic-generator-java by googleapis.
the class AsyncReadRows method asyncReadRows.
public static void asyncReadRows() throws Exception {
// It may require modifications to work in your environment.
try (BaseBigtableDataClient baseBigtableDataClient = BaseBigtableDataClient.create()) {
ReadRowsRequest request = ReadRowsRequest.newBuilder().setTableName(TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]").toString()).setAppProfileId("appProfileId704923523").setRows(RowSet.newBuilder().build()).setFilter(RowFilter.newBuilder().build()).setRowsLimit(-944199211).build();
ServerStream<ReadRowsResponse> stream = baseBigtableDataClient.readRowsCallable().call(request);
for (ReadRowsResponse response : stream) {
// Do something when a response is received.
}
}
}
use of com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse in project grpc-gcp-java by GoogleCloudPlatform.
the class BigtableIntegrationTest method testReadRows.
@Test
public void testReadRows() throws Exception {
BigtableBlockingStub stub = getBigtableBlockingStub();
ReadRowsRequest request = ReadRowsRequest.newBuilder().setTableName(TABLE_NAME).setRows(RowSet.newBuilder().addRowKeys(ByteString.copyFromUtf8("test-row"))).build();
Iterator<ReadRowsResponse> response = stub.readRows(request);
assertEquals(1, gcpChannel.channelRefs.size());
}
use of com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse in project spark-bigquery-connector by GoogleCloudDataproc.
the class StreamCombiningIterator method next.
/**
* Consumes the next response and asynchronously request the next response from the observer.
*
* @return The next response.
* @throws NoSuchElementException If the stream has been consumed or cancelled.
*/
@Override
public ReadRowsResponse next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
try {
Observer observer = observersQueue.poll();
Preconditions.checkState(observer != null);
observer.request();
@SuppressWarnings("unchecked") ReadRowsResponse tmp = (ReadRowsResponse) last;
return tmp;
} finally {
if (last != EOS) {
last = null;
}
}
}
use of com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryInputPartitionContext method createPartitionReaderContext.
@Override
public InputPartitionReaderContext<InternalRow> createPartitionReaderContext() {
ReadRowsRequest.Builder readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(streamName);
ReadRowsHelper readRowsHelper = new ReadRowsHelper(bigQueryReadClientFactory, readRowsRequest, options);
Iterator<ReadRowsResponse> readRowsResponses = readRowsHelper.readRows();
return new BigQueryInputPartitionReaderContext(readRowsResponses, converter, readRowsHelper);
}
use of com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse in project spark-bigquery-connector by GoogleCloudDataproc.
the class BigQueryInputPartitionReaderContextTest method testReadAvro.
@Test
public void testReadAvro() throws Exception {
TableInfo allTypesTableInfo = allTypesTableInfo();
ReadRowsResponse.Builder readRowsResponse = ReadRowsResponse.newBuilder();
TextFormat.merge(ALL_TYPES_TABLE_READ_ROWS_RESPONSE_STR, readRowsResponse);
Iterator<ReadRowsResponse> readRowsResponses = ImmutableList.of(readRowsResponse.build()).iterator();
ReadRowsResponseToInternalRowIteratorConverter converter = ReadRowsResponseToInternalRowIteratorConverter.avro(ALL_TYPES_TABLE_BIGQUERY_SCHEMA, ALL_TYPES_TABLE_FIELDS, ALL_TYPES_TABLE_AVRO_RAW_SCHEMA, Optional.empty());
BigQueryInputPartitionReaderContext reader = new BigQueryInputPartitionReaderContext(readRowsResponses, converter, null);
assertThat(reader.next()).isTrue();
InternalRow row = reader.get();
assertThat(reader.next()).isFalse();
assertThat(row.numFields()).isEqualTo(15);
assertThat(row.getString(0)).isEqualTo("hello");
}
Aggregations