Search in sources :

Example 6 with ReadRowsResponse

use of com.google.cloud.bigquery.storage.v1.ReadRowsResponse in project beam by apache.

the class BigQueryIOStorageReadTest method testStreamSourceSplitAtFractionSucceedsArrow.

@Test
public void testStreamSourceSplitAtFractionSucceedsArrow() throws Exception {
    List<String> names = Arrays.asList("A", "B", "C", "D", "E", "F", "G");
    List<Long> values = Arrays.asList(1L, 2L, 3L, 4L, 5L, 6L, 7L);
    List<ReadRowsResponse> parentResponses = Lists.newArrayList(createResponseArrow(ARROW_SCHEMA, names.subList(0, 2), values.subList(0, 2), 0.0, 0.25), createResponseArrow(ARROW_SCHEMA, names.subList(2, 3), values.subList(2, 3), 0.25, 0.5), createResponseArrow(ARROW_SCHEMA, names.subList(3, 5), values.subList(3, 5), 0.5, 0.75));
    StorageClient fakeStorageClient = mock(StorageClient.class);
    when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream("parentStream").build(), "")).thenReturn(new FakeBigQueryServerStream<>(parentResponses));
    // Mocks the split call.
    when(fakeStorageClient.splitReadStream(SplitReadStreamRequest.newBuilder().setName("parentStream").setFraction(0.5f).build())).thenReturn(SplitReadStreamResponse.newBuilder().setPrimaryStream(ReadStream.newBuilder().setName("primaryStream")).setRemainderStream(ReadStream.newBuilder().setName("remainderStream")).build());
    // Mocks the ReadRows calls expected on the primary and residual streams.
    when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream("primaryStream").setOffset(2).build(), "")).thenReturn(new FakeBigQueryServerStream<>(parentResponses.subList(1, 2)));
    when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream("remainderStream").build(), "")).thenReturn(new FakeBigQueryServerStream<>(parentResponses.subList(2, parentResponses.size())));
    BigQueryStorageStreamSource<TableRow> streamSource = BigQueryStorageStreamSource.create(ReadSession.newBuilder().setName("readSession").setArrowSchema(ArrowSchema.newBuilder().setSerializedSchema(serializeArrowSchema(ARROW_SCHEMA)).build()).setDataFormat(DataFormat.ARROW).build(), ReadStream.newBuilder().setName("parentStream").build(), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
    // Read a few records from the parent stream and ensure that records are returned in the
    // prescribed order.
    BoundedReader<TableRow> parent = streamSource.createReader(options);
    assertTrue(parent.start());
    assertEquals("A", parent.getCurrent().get("name"));
    assertTrue(parent.advance());
    assertEquals("B", parent.getCurrent().get("name"));
    // Now split the stream, and ensure that the "parent" reader has been replaced with the
    // primary stream and that the returned source points to the residual stream.
    BoundedReader<TableRow> primary = parent;
    BoundedSource<TableRow> residualSource = parent.splitAtFraction(0.5);
    assertNotNull(residualSource);
    BoundedReader<TableRow> residual = residualSource.createReader(options);
    assertTrue(primary.advance());
    assertEquals("C", primary.getCurrent().get("name"));
    assertFalse(primary.advance());
    assertTrue(residual.start());
    assertEquals("D", residual.getCurrent().get("name"));
    assertTrue(residual.advance());
    assertEquals("E", residual.getCurrent().get("name"));
    assertFalse(residual.advance());
}
Also used : StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) ByteString(com.google.protobuf.ByteString) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableRow(com.google.api.services.bigquery.model.TableRow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) Test(org.junit.Test)

Example 7 with ReadRowsResponse

use of com.google.cloud.bigquery.storage.v1.ReadRowsResponse in project beam by apache.

the class BigQueryIOStorageReadTest method createResponseArrow.

private ReadRowsResponse createResponseArrow(org.apache.arrow.vector.types.pojo.Schema arrowSchema, List<String> name, List<Long> number, double progressAtResponseStart, double progressAtResponseEnd) {
    ArrowRecordBatch serializedRecord;
    try (VectorSchemaRoot schemaRoot = VectorSchemaRoot.create(arrowSchema, allocator)) {
        schemaRoot.allocateNew();
        schemaRoot.setRowCount(name.size());
        VarCharVector strVector = (VarCharVector) schemaRoot.getFieldVectors().get(0);
        BigIntVector bigIntVector = (BigIntVector) schemaRoot.getFieldVectors().get(1);
        for (int i = 0; i < name.size(); i++) {
            bigIntVector.set(i, number.get(i));
            strVector.set(i, new Text(name.get(i)));
        }
        VectorUnloader unLoader = new VectorUnloader(schemaRoot);
        try (org.apache.arrow.vector.ipc.message.ArrowRecordBatch records = unLoader.getRecordBatch()) {
            try (ByteArrayOutputStream os = new ByteArrayOutputStream()) {
                MessageSerializer.serialize(new WriteChannel(Channels.newChannel(os)), records);
                serializedRecord = ArrowRecordBatch.newBuilder().setRowCount(records.getLength()).setSerializedRecordBatch(ByteString.copyFrom(os.toByteArray())).build();
            } catch (IOException e) {
                throw new RuntimeException("Error writing to byte array output stream", e);
            }
        }
    }
    return ReadRowsResponse.newBuilder().setArrowRecordBatch(serializedRecord).setRowCount(name.size()).setStats(StreamStats.newBuilder().setProgress(Progress.newBuilder().setAtResponseStart(progressAtResponseStart).setAtResponseEnd(progressAtResponseEnd))).build();
}
Also used : VectorSchemaRoot(org.apache.arrow.vector.VectorSchemaRoot) VarCharVector(org.apache.arrow.vector.VarCharVector) Text(org.apache.arrow.vector.util.Text) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) BigIntVector(org.apache.arrow.vector.BigIntVector) VectorUnloader(org.apache.arrow.vector.VectorUnloader) StatusRuntimeException(io.grpc.StatusRuntimeException) ArrowRecordBatch(com.google.cloud.bigquery.storage.v1.ArrowRecordBatch) WriteChannel(org.apache.arrow.vector.ipc.WriteChannel)

Example 8 with ReadRowsResponse

use of com.google.cloud.bigquery.storage.v1.ReadRowsResponse in project beam by apache.

the class BigQueryIOStorageReadTest method testReadFromStreamSource.

@Test
public void testReadFromStreamSource() throws Exception {
    ReadSession readSession = ReadSession.newBuilder().setName("readSession").setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING)).build();
    ReadRowsRequest expectedRequest = ReadRowsRequest.newBuilder().setReadStream("readStream").build();
    List<GenericRecord> records = Lists.newArrayList(createRecord("A", 1, AVRO_SCHEMA), createRecord("B", 2, AVRO_SCHEMA), createRecord("C", 3, AVRO_SCHEMA));
    List<ReadRowsResponse> responses = Lists.newArrayList(createResponse(AVRO_SCHEMA, records.subList(0, 2), 0.0, 0.50), createResponse(AVRO_SCHEMA, records.subList(2, 3), 0.5, 0.75));
    StorageClient fakeStorageClient = mock(StorageClient.class);
    when(fakeStorageClient.readRows(expectedRequest, "")).thenReturn(new FakeBigQueryServerStream<>(responses));
    BigQueryStorageStreamSource<TableRow> streamSource = BigQueryStorageStreamSource.create(readSession, ReadStream.newBuilder().setName("readStream").build(), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
    List<TableRow> rows = new ArrayList<>();
    BoundedReader<TableRow> reader = streamSource.createReader(options);
    for (boolean hasNext = reader.start(); hasNext; hasNext = reader.advance()) {
        rows.add(reader.getCurrent());
    }
    System.out.println("Rows: " + rows);
    assertEquals(3, rows.size());
}
Also used : ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) ArrayList(java.util.ArrayList) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableRow(com.google.api.services.bigquery.model.TableRow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 9 with ReadRowsResponse

use of com.google.cloud.bigquery.storage.v1.ReadRowsResponse in project beam by apache.

the class BigQueryStorageArrowReader method processReadRowsResponse.

@Override
public void processReadRowsResponse(ReadRowsResponse readRowsResponse) throws IOException {
    com.google.cloud.bigquery.storage.v1.ArrowRecordBatch recordBatch = readRowsResponse.getArrowRecordBatch();
    rowCount = recordBatch.getRowCount();
    this.alloc = new RootAllocator(Long.MAX_VALUE);
    InputStream input = protoSchema.getSerializedSchema().newInput();
    Schema arrowSchema = ArrowConversion.arrowSchemaFromInput(input);
    this.recordBatchIterator = ArrowConversion.rowsFromSerializedRecordBatch(arrowSchema, recordBatch.getSerializedRecordBatch().newInput(), this.alloc);
}
Also used : RootAllocator(org.apache.arrow.memory.RootAllocator) InputStream(java.io.InputStream) Schema(org.apache.arrow.vector.types.pojo.Schema) ArrowSchema(com.google.cloud.bigquery.storage.v1.ArrowSchema)

Example 10 with ReadRowsResponse

use of com.google.cloud.bigquery.storage.v1.ReadRowsResponse in project beam by apache.

the class BigQueryStorageAvroReader method processReadRowsResponse.

@Override
public void processReadRowsResponse(ReadRowsResponse readRowsResponse) {
    AvroRows avroRows = readRowsResponse.getAvroRows();
    rowCount = avroRows.getRowCount();
    decoder = DecoderFactory.get().binaryDecoder(avroRows.getSerializedBinaryRows().toByteArray(), decoder);
}
Also used : AvroRows(com.google.cloud.bigquery.storage.v1.AvroRows)

Aggregations

ReadRowsResponse (com.google.cloud.bigquery.storage.v1.ReadRowsResponse)18 StorageClient (org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient)17 FakeBigQueryServices (org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices)17 Test (org.junit.Test)17 TableRow (com.google.api.services.bigquery.model.TableRow)14 TableRowParser (org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser)13 ReadRowsRequest (com.google.cloud.bigquery.storage.v1.ReadRowsRequest)11 ReadSession (com.google.cloud.bigquery.storage.v1.ReadSession)10 ByteString (com.google.protobuf.ByteString)9 GenericRecord (org.apache.avro.generic.GenericRecord)6 Table (com.google.api.services.bigquery.model.Table)4 TableReference (com.google.api.services.bigquery.model.TableReference)4 CreateReadSessionRequest (com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)3 StatusRuntimeException (io.grpc.StatusRuntimeException)3 KV (org.apache.beam.sdk.values.KV)3 FailedPreconditionException (com.google.api.gax.rpc.FailedPreconditionException)2 ArrayList (java.util.ArrayList)2 JobStatistics (com.google.api.services.bigquery.model.JobStatistics)1 JobStatistics2 (com.google.api.services.bigquery.model.JobStatistics2)1 ArrowRecordBatch (com.google.cloud.bigquery.storage.v1.ArrowRecordBatch)1