Search in sources :

Example 1 with ReadStream

use of com.google.cloud.bigquery.storage.v1.ReadStream in project beam by apache.

the class BigQueryStorageSourceBase method split.

@Override
public List<BigQueryStorageStreamSource<T>> split(long desiredBundleSizeBytes, PipelineOptions options) throws Exception {
    BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
    Table targetTable = getTargetTable(bqOptions);
    ReadSession.Builder readSessionBuilder = ReadSession.newBuilder();
    if (targetTable != null) {
        readSessionBuilder.setTable(BigQueryHelpers.toTableResourceName(targetTable.getTableReference()));
    } else {
        // If the table does not exist targetTable will be null.
        // Construct the table id if we can generate it. For error recording/logging.
        @Nullable String tableReferenceId = getTargetTableId(bqOptions);
        if (tableReferenceId != null) {
            readSessionBuilder.setTable(tableReferenceId);
        }
    }
    if (selectedFieldsProvider != null || rowRestrictionProvider != null) {
        ReadSession.TableReadOptions.Builder tableReadOptionsBuilder = ReadSession.TableReadOptions.newBuilder();
        if (selectedFieldsProvider != null) {
            tableReadOptionsBuilder.addAllSelectedFields(selectedFieldsProvider.get());
        }
        if (rowRestrictionProvider != null) {
            tableReadOptionsBuilder.setRowRestriction(rowRestrictionProvider.get());
        }
        readSessionBuilder.setReadOptions(tableReadOptionsBuilder);
    }
    if (format != null) {
        readSessionBuilder.setDataFormat(format);
    }
    int streamCount = 0;
    if (desiredBundleSizeBytes > 0) {
        long tableSizeBytes = (targetTable != null) ? targetTable.getNumBytes() : 0;
        streamCount = (int) Math.min(tableSizeBytes / desiredBundleSizeBytes, MAX_SPLIT_COUNT);
    }
    streamCount = Math.max(streamCount, MIN_SPLIT_COUNT);
    CreateReadSessionRequest createReadSessionRequest = CreateReadSessionRequest.newBuilder().setParent(BigQueryHelpers.toProjectResourceName(bqOptions.getBigQueryProject() == null ? bqOptions.getProject() : bqOptions.getBigQueryProject())).setReadSession(readSessionBuilder).setMaxStreamCount(streamCount).build();
    ReadSession readSession;
    try (StorageClient client = bqServices.getStorageClient(bqOptions)) {
        readSession = client.createReadSession(createReadSessionRequest);
        LOG.info("Sent BigQuery Storage API CreateReadSession request '{}'; received response '{}'.", createReadSessionRequest, readSession);
    }
    if (readSession.getStreamsList().isEmpty()) {
        // The underlying table is empty or all rows have been pruned.
        return ImmutableList.of();
    }
    Schema sessionSchema;
    if (readSession.getDataFormat() == DataFormat.ARROW) {
        org.apache.arrow.vector.types.pojo.Schema schema = ArrowConversion.arrowSchemaFromInput(readSession.getArrowSchema().getSerializedSchema().newInput());
        org.apache.beam.sdk.schemas.Schema beamSchema = ArrowConversion.ArrowSchemaTranslator.toBeamSchema(schema);
        sessionSchema = AvroUtils.toAvroSchema(beamSchema);
    } else if (readSession.getDataFormat() == DataFormat.AVRO) {
        sessionSchema = new Schema.Parser().parse(readSession.getAvroSchema().getSchema());
    } else {
        throw new IllegalArgumentException("data is not in a supported dataFormat: " + readSession.getDataFormat());
    }
    TableSchema trimmedSchema = BigQueryAvroUtils.trimBigQueryTableSchema(targetTable.getSchema(), sessionSchema);
    List<BigQueryStorageStreamSource<T>> sources = Lists.newArrayList();
    for (ReadStream readStream : readSession.getStreamsList()) {
        sources.add(BigQueryStorageStreamSource.create(readSession, readStream, trimmedSchema, parseFn, outputCoder, bqServices));
    }
    return ImmutableList.copyOf(sources);
}
Also used : TableSchema(com.google.api.services.bigquery.model.TableSchema) Schema(org.apache.avro.Schema) TableSchema(com.google.api.services.bigquery.model.TableSchema) ReadStream(com.google.cloud.bigquery.storage.v1.ReadStream) Table(com.google.api.services.bigquery.model.Table) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) Nullable(org.checkerframework.checker.nullness.qual.Nullable) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)

Example 2 with ReadStream

use of com.google.cloud.bigquery.storage.v1.ReadStream in project beam by apache.

the class BigQueryIOStorageReadTest method testFractionConsumedArrow.

@Test
public void testFractionConsumedArrow() throws Exception {
    ReadSession readSession = ReadSession.newBuilder().setName("readSession").setArrowSchema(ArrowSchema.newBuilder().setSerializedSchema(serializeArrowSchema(ARROW_SCHEMA)).build()).setDataFormat(DataFormat.ARROW).build();
    ReadRowsRequest expectedRequest = ReadRowsRequest.newBuilder().setReadStream("readStream").build();
    List<String> names = Arrays.asList("A", "B", "C", "D", "E", "F", "G");
    List<Long> values = Arrays.asList(1L, 2L, 3L, 4L, 5L, 6L, 7L);
    List<ReadRowsResponse> responses = Lists.newArrayList(createResponseArrow(ARROW_SCHEMA, names.subList(0, 2), values.subList(0, 2), 0.0, 0.25), createResponseArrow(ARROW_SCHEMA, Lists.newArrayList(), Lists.newArrayList(), 0.25, 0.25), createResponseArrow(ARROW_SCHEMA, names.subList(2, 4), values.subList(2, 4), 0.3, 0.5), createResponseArrow(ARROW_SCHEMA, names.subList(4, 7), values.subList(4, 7), 0.7, 1.0));
    StorageClient fakeStorageClient = mock(StorageClient.class);
    when(fakeStorageClient.readRows(expectedRequest, "")).thenReturn(new FakeBigQueryServerStream<>(responses));
    BigQueryStorageStreamSource<TableRow> streamSource = BigQueryStorageStreamSource.create(readSession, ReadStream.newBuilder().setName("readStream").build(), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
    BoundedReader<TableRow> reader = streamSource.createReader(options);
    // Before call to BoundedReader#start, fraction consumed must be zero.
    assertEquals(0.0, reader.getFractionConsumed(), DELTA);
    // Reads A.
    assertTrue(reader.start());
    assertEquals(0.125, reader.getFractionConsumed(), DELTA);
    // Reads B.
    assertTrue(reader.advance());
    assertEquals(0.25, reader.getFractionConsumed(), DELTA);
    // Reads C.
    assertTrue(reader.advance());
    assertEquals(0.4, reader.getFractionConsumed(), DELTA);
    // Reads D.
    assertTrue(reader.advance());
    assertEquals(0.5, reader.getFractionConsumed(), DELTA);
    // Reads E.
    assertTrue(reader.advance());
    assertEquals(0.8, reader.getFractionConsumed(), DELTA);
    // Reads F.
    assertTrue(reader.advance());
    assertEquals(0.9, reader.getFractionConsumed(), DELTA);
    // Reads G.
    assertTrue(reader.advance());
    assertEquals(1.0, reader.getFractionConsumed(), DELTA);
    // Reaches the end.
    assertFalse(reader.advance());
    // We are done with the stream, so we should report 100% consumption.
    assertEquals(Double.valueOf(1.0), reader.getFractionConsumed());
}
Also used : ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) ByteString(com.google.protobuf.ByteString) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableRow(com.google.api.services.bigquery.model.TableRow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) Test(org.junit.Test)

Example 3 with ReadStream

use of com.google.cloud.bigquery.storage.v1.ReadStream in project beam by apache.

the class BigQueryIOStorageReadTest method testFractionConsumed.

@Test
public void testFractionConsumed() throws Exception {
    ReadSession readSession = ReadSession.newBuilder().setName("readSession").setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING)).build();
    ReadRowsRequest expectedRequest = ReadRowsRequest.newBuilder().setReadStream("readStream").build();
    List<GenericRecord> records = Lists.newArrayList(createRecord("A", 1, AVRO_SCHEMA), createRecord("B", 2, AVRO_SCHEMA), createRecord("C", 3, AVRO_SCHEMA), createRecord("D", 4, AVRO_SCHEMA), createRecord("E", 5, AVRO_SCHEMA), createRecord("F", 6, AVRO_SCHEMA), createRecord("G", 7, AVRO_SCHEMA));
    List<ReadRowsResponse> responses = Lists.newArrayList(createResponse(AVRO_SCHEMA, records.subList(0, 2), 0.0, 0.25), // to such responses.
    createResponse(AVRO_SCHEMA, Lists.newArrayList(), 0.25, 0.25), createResponse(AVRO_SCHEMA, records.subList(2, 4), 0.3, 0.5), createResponse(AVRO_SCHEMA, records.subList(4, 7), 0.7, 1.0));
    StorageClient fakeStorageClient = mock(StorageClient.class);
    when(fakeStorageClient.readRows(expectedRequest, "")).thenReturn(new FakeBigQueryServerStream<>(responses));
    BigQueryStorageStreamSource<TableRow> streamSource = BigQueryStorageStreamSource.create(readSession, ReadStream.newBuilder().setName("readStream").build(), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
    BoundedReader<TableRow> reader = streamSource.createReader(options);
    // Before call to BoundedReader#start, fraction consumed must be zero.
    assertEquals(0.0, reader.getFractionConsumed(), DELTA);
    // Reads A.
    assertTrue(reader.start());
    assertEquals(0.125, reader.getFractionConsumed(), DELTA);
    // Reads B.
    assertTrue(reader.advance());
    assertEquals(0.25, reader.getFractionConsumed(), DELTA);
    // Reads C.
    assertTrue(reader.advance());
    assertEquals(0.4, reader.getFractionConsumed(), DELTA);
    // Reads D.
    assertTrue(reader.advance());
    assertEquals(0.5, reader.getFractionConsumed(), DELTA);
    // Reads E.
    assertTrue(reader.advance());
    assertEquals(0.8, reader.getFractionConsumed(), DELTA);
    // Reads F.
    assertTrue(reader.advance());
    assertEquals(0.9, reader.getFractionConsumed(), DELTA);
    // Reads G.
    assertTrue(reader.advance());
    assertEquals(1.0, reader.getFractionConsumed(), DELTA);
    // Reaches the end.
    assertFalse(reader.advance());
    // We are done with the stream, so we should report 100% consumption.
    assertEquals(Double.valueOf(1.0), reader.getFractionConsumed());
}
Also used : ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableRow(com.google.api.services.bigquery.model.TableRow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 4 with ReadStream

use of com.google.cloud.bigquery.storage.v1.ReadStream in project beam by apache.

the class BigQueryIOStorageReadTest method testReadFromStreamSource.

@Test
public void testReadFromStreamSource() throws Exception {
    ReadSession readSession = ReadSession.newBuilder().setName("readSession").setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING)).build();
    ReadRowsRequest expectedRequest = ReadRowsRequest.newBuilder().setReadStream("readStream").build();
    List<GenericRecord> records = Lists.newArrayList(createRecord("A", 1, AVRO_SCHEMA), createRecord("B", 2, AVRO_SCHEMA), createRecord("C", 3, AVRO_SCHEMA));
    List<ReadRowsResponse> responses = Lists.newArrayList(createResponse(AVRO_SCHEMA, records.subList(0, 2), 0.0, 0.50), createResponse(AVRO_SCHEMA, records.subList(2, 3), 0.5, 0.75));
    StorageClient fakeStorageClient = mock(StorageClient.class);
    when(fakeStorageClient.readRows(expectedRequest, "")).thenReturn(new FakeBigQueryServerStream<>(responses));
    BigQueryStorageStreamSource<TableRow> streamSource = BigQueryStorageStreamSource.create(readSession, ReadStream.newBuilder().setName("readStream").build(), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
    List<TableRow> rows = new ArrayList<>();
    BoundedReader<TableRow> reader = streamSource.createReader(options);
    for (boolean hasNext = reader.start(); hasNext; hasNext = reader.advance()) {
        rows.add(reader.getCurrent());
    }
    System.out.println("Rows: " + rows);
    assertEquals(3, rows.size());
}
Also used : ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) ArrayList(java.util.ArrayList) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableRow(com.google.api.services.bigquery.model.TableRow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 5 with ReadStream

use of com.google.cloud.bigquery.storage.v1.ReadStream in project beam by apache.

the class BigQueryIOStorageReadTest method testStreamSourceSplitAtFractionRepeated.

@Test
public void testStreamSourceSplitAtFractionRepeated() throws Exception {
    List<ReadStream> readStreams = Lists.newArrayList(ReadStream.newBuilder().setName("stream1").build(), ReadStream.newBuilder().setName("stream2").build(), ReadStream.newBuilder().setName("stream3").build());
    StorageClient fakeStorageClient = mock(StorageClient.class);
    // Mock the initial ReadRows call.
    when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream(readStreams.get(0).getName()).build(), "")).thenReturn(new FakeBigQueryServerStream<>(Lists.newArrayList(createResponse(AVRO_SCHEMA, Lists.newArrayList(createRecord("A", 1, AVRO_SCHEMA), createRecord("B", 2, AVRO_SCHEMA)), 0.0, 0.25), createResponse(AVRO_SCHEMA, Lists.newArrayList(createRecord("C", 3, AVRO_SCHEMA), createRecord("D", 4, AVRO_SCHEMA)), 0.25, 0.50), createResponse(AVRO_SCHEMA, Lists.newArrayList(createRecord("E", 5, AVRO_SCHEMA), createRecord("F", 6, AVRO_SCHEMA)), 0.5, 0.75))));
    // Mock the first SplitReadStream call.
    when(fakeStorageClient.splitReadStream(SplitReadStreamRequest.newBuilder().setName(readStreams.get(0).getName()).setFraction(0.83f).build())).thenReturn(SplitReadStreamResponse.newBuilder().setPrimaryStream(readStreams.get(1)).setRemainderStream(ReadStream.newBuilder().setName("ignored")).build());
    // Mock the second ReadRows call.
    when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream(readStreams.get(1).getName()).setOffset(1).build(), "")).thenReturn(new FakeBigQueryServerStream<>(Lists.newArrayList(createResponse(AVRO_SCHEMA, Lists.newArrayList(createRecord("B", 2, AVRO_SCHEMA), createRecord("C", 3, AVRO_SCHEMA)), 0.0, 0.50), createResponse(AVRO_SCHEMA, Lists.newArrayList(createRecord("D", 4, AVRO_SCHEMA), createRecord("E", 5, AVRO_SCHEMA)), 0.5, 0.75))));
    // Mock the second SplitReadStream call.
    when(fakeStorageClient.splitReadStream(SplitReadStreamRequest.newBuilder().setName(readStreams.get(1).getName()).setFraction(0.75f).build())).thenReturn(SplitReadStreamResponse.newBuilder().setPrimaryStream(readStreams.get(2)).setRemainderStream(ReadStream.newBuilder().setName("ignored")).build());
    // Mock the third ReadRows call.
    when(fakeStorageClient.readRows(ReadRowsRequest.newBuilder().setReadStream(readStreams.get(2).getName()).setOffset(2).build(), "")).thenReturn(new FakeBigQueryServerStream<>(Lists.newArrayList(createResponse(AVRO_SCHEMA, Lists.newArrayList(createRecord("C", 3, AVRO_SCHEMA), createRecord("D", 4, AVRO_SCHEMA)), 0.80, 0.90))));
    BoundedSource<TableRow> source = BigQueryStorageStreamSource.create(ReadSession.newBuilder().setName("readSession").setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING)).build(), readStreams.get(0), TABLE_SCHEMA, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withStorageClient(fakeStorageClient));
    BoundedReader<TableRow> reader = source.createReader(options);
    assertTrue(reader.start());
    assertEquals("A", reader.getCurrent().get("name"));
    BoundedSource<TableRow> residualSource = reader.splitAtFraction(0.83f);
    assertNotNull(residualSource);
    assertEquals("A", reader.getCurrent().get("name"));
    assertTrue(reader.advance());
    assertEquals("B", reader.getCurrent().get("name"));
    residualSource = reader.splitAtFraction(0.75f);
    assertNotNull(residualSource);
    assertEquals("B", reader.getCurrent().get("name"));
    assertTrue(reader.advance());
    assertEquals("C", reader.getCurrent().get("name"));
    assertTrue(reader.advance());
    assertEquals("D", reader.getCurrent().get("name"));
    assertFalse(reader.advance());
}
Also used : ReadStream(com.google.cloud.bigquery.storage.v1.ReadStream) TableRow(com.google.api.services.bigquery.model.TableRow) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) Test(org.junit.Test)

Aggregations

StorageClient (org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient)7 TableRow (com.google.api.services.bigquery.model.TableRow)6 TableRowParser (org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser)6 FakeBigQueryServices (org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices)6 Test (org.junit.Test)6 ReadRowsResponse (com.google.cloud.bigquery.storage.v1.ReadRowsResponse)5 ReadSession (com.google.cloud.bigquery.storage.v1.ReadSession)5 ReadRowsRequest (com.google.cloud.bigquery.storage.v1.ReadRowsRequest)4 ReadStream (com.google.cloud.bigquery.storage.v1.ReadStream)3 ByteString (com.google.protobuf.ByteString)3 ArrayList (java.util.ArrayList)2 GenericRecord (org.apache.avro.generic.GenericRecord)2 Table (com.google.api.services.bigquery.model.Table)1 TableSchema (com.google.api.services.bigquery.model.TableSchema)1 CreateReadSessionRequest (com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)1 Schema (org.apache.avro.Schema)1 Nullable (org.checkerframework.checker.nullness.qual.Nullable)1