use of com.google.cloud.bigquery.storage.v1.ReadRowsRequest in project beam by apache.
the class BigQueryIOStorageReadTest method testFractionConsumedWithSplitArrow.
@Test
public void testFractionConsumedWithSplitArrow() throws Exception {
ReadSession readSession = ReadSession.newBuilder().setName("readSession").setArrowSchema(ArrowSchema.newBuilder().setSerializedSchema(serializeArrowSchema(ARROW_SCHEMA)).build()).setDataFormat(DataFormat.ARROW).build();
ReadRowsRequest expectedRequest = ReadRowsRequest.newBuilder().setReadStream("parentStream").build();
List<String> names = Arrays.asList("A", "B", "C", "D", "E", "F", "G");
List<Long> values = Arrays.asList(1L, 2L, 3L, 4L, 5L, 6L, 7L);
List<ReadRowsResponse> parentResponse = Lists.newArrayList(createResponseArrow(ARROW_SCHEMA, names.subList(0, 2), values.subList(0, 2), 0.0, 0.25), createResponseArrow(ARROW_SCHEMA, names.subList(2, 4), values.subList(2, 4), 0.3, 0.5), createResponseArrow(ARROW_SCHEMA, names.subList(4, 7), values.subList(4, 7), 0.7, 0.875));
StorageClient fakeStorageClient = mock(StorageClient.class);
when(fakeStorageClient.readRows(expectedRequest, "")).thenReturn(new FakeBigQueryServerStream<>(parentResponse));
when(fakeStorageClient.splitReadStream(SplitReadStreamRequest.newBuilder().setName("parentStream").setFraction(0.5f).build())).thenReturn(SplitReadStreamResponse.newBuilder().setPrimaryStream(ReadStream.newBuilder().setName("primaryStream")).setRemainderStream(ReadStream.newBuilder().setName("remainderStream")).build());
List<ReadRowsResponse> primaryResponses = Lists.newArrayList(createResponseArrow(ARROW_SCHEMA, names.subList(1, 3), values.subList(1, 3), 0.25, 0.75), createResponseArrow(ARROW_SCHEMA, names.subList(3, 4), values.subList(3, 4), 0.8, 1.0));
when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream("primaryStream").setOffset(1).build(), "")).thenReturn(new FakeBigQueryServerStream<>(primaryResponses));
BigQueryStorageStreamSource<TableRow> streamSource = BigQueryStorageStreamSource.create(readSession, ReadStream.newBuilder().setName("parentStream").build(), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
BoundedReader<TableRow> reader = streamSource.createReader(options);
// Before call to BoundedReader#start, fraction consumed must be zero.
assertEquals(0.0, reader.getFractionConsumed(), DELTA);
// Reads A.
assertTrue(reader.start());
assertEquals(0.125, reader.getFractionConsumed(), DELTA);
reader.splitAtFraction(0.5);
assertEquals(0.125, reader.getFractionConsumed(), DELTA);
// Reads B.
assertTrue(reader.advance());
assertEquals(0.5, reader.getFractionConsumed(), DELTA);
// Reads C.
assertTrue(reader.advance());
assertEquals(0.75, reader.getFractionConsumed(), DELTA);
// Reads D.
assertTrue(reader.advance());
assertEquals(1.0, reader.getFractionConsumed(), DELTA);
assertFalse(reader.advance());
assertEquals(1.0, reader.getFractionConsumed(), DELTA);
}
use of com.google.cloud.bigquery.storage.v1.ReadRowsRequest in project beam by apache.
the class BigQueryIOStorageQueryTest method doReadFromBigQueryIO.
private void doReadFromBigQueryIO(boolean templateCompatibility) throws Exception {
TableReference sourceTableRef = BigQueryHelpers.parseTableSpec("project:dataset.table");
fakeDatasetService.createDataset(sourceTableRef.getProjectId(), sourceTableRef.getDatasetId(), "asia-northeast1", "Fake plastic tree^H^H^H^Htables", null);
fakeDatasetService.createTable(new Table().setTableReference(sourceTableRef).setLocation("asia-northeast1"));
Table queryResultTable = new Table().setSchema(TABLE_SCHEMA).setNumBytes(0L);
String encodedQuery = FakeBigQueryServices.encodeQueryResult(queryResultTable);
fakeJobService.expectDryRunQuery(options.getProject(), encodedQuery, new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(1024L * 1024L).setReferencedTables(ImmutableList.of(sourceTableRef))));
ReadSession readSession = ReadSession.newBuilder().setName("readSessionName").setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING)).addStreams(ReadStream.newBuilder().setName("streamName")).setDataFormat(DataFormat.AVRO).build();
ReadRowsRequest expectedReadRowsRequest = ReadRowsRequest.newBuilder().setReadStream("streamName").build();
List<GenericRecord> records = Lists.newArrayList(createRecord("A", 1, AVRO_SCHEMA), createRecord("B", 2, AVRO_SCHEMA), createRecord("C", 3, AVRO_SCHEMA), createRecord("D", 4, AVRO_SCHEMA));
List<ReadRowsResponse> readRowsResponses = Lists.newArrayList(createResponse(AVRO_SCHEMA, records.subList(0, 2), 0.0, 0.500), createResponse(AVRO_SCHEMA, records.subList(2, 4), 0.5, 0.875));
//
// Note that since the temporary table name is generated by the pipeline, we can't match the
// expected create read session request exactly. For now, match against any appropriately typed
// proto object.
//
StorageClient fakeStorageClient = mock(StorageClient.class, withSettings().serializable());
when(fakeStorageClient.createReadSession(any())).thenReturn(readSession);
when(fakeStorageClient.readRows(expectedReadRowsRequest, "")).thenReturn(new FakeBigQueryServerStream<>(readRowsResponses));
BigQueryIO.TypedRead<KV<String, Long>> typedRead = BigQueryIO.read(new ParseKeyValue()).fromQuery(encodedQuery).withMethod(Method.DIRECT_READ).withTestServices(new FakeBigQueryServices().withDatasetService(fakeDatasetService).withJobService(fakeJobService).withStorageClient(fakeStorageClient));
if (templateCompatibility) {
typedRead = typedRead.withTemplateCompatibility();
}
PCollection<KV<String, Long>> output = p.apply(typedRead);
PAssert.that(output).containsInAnyOrder(ImmutableList.of(KV.of("A", 1L), KV.of("B", 2L), KV.of("C", 3L), KV.of("D", 4L)));
p.run();
}
Aggregations