Search in sources :

Example 1 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project spark-bigquery-connector by GoogleCloudDataproc.

the class ReadSessionCreator method create.

/**
 * Creates a new ReadSession for parallel reads.
 *
 * <p>Some attributes are governed by the {@link ReadSessionCreatorConfig} that this object was
 * constructed with.
 *
 * @param table The table to create the session for.
 * @param selectedFields
 * @param filter
 * @return
 */
public ReadSessionResponse create(TableId table, ImmutableList<String> selectedFields, Optional<String> filter) {
    TableInfo tableDetails = bigQueryClient.getTable(table);
    TableInfo actualTable = getActualTable(tableDetails, selectedFields, filter);
    StandardTableDefinition tableDefinition = actualTable.getDefinition();
    BigQueryReadClient bigQueryReadClient = bigQueryReadClientFactory.getBigQueryReadClient();
    String tablePath = toTablePath(actualTable.getTableId());
    CreateReadSessionRequest request = config.getRequestEncodedBase().map(value -> {
        try {
            return com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest.parseFrom(java.util.Base64.getDecoder().decode(value));
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
            throw new RuntimeException("Couldn't decode:" + value, e);
        }
    }).orElse(CreateReadSessionRequest.newBuilder().build());
    ReadSession.Builder requestedSession = request.getReadSession().toBuilder();
    config.getTraceId().ifPresent(traceId -> requestedSession.setTraceId(traceId));
    TableReadOptions.Builder readOptions = requestedSession.getReadOptionsBuilder();
    if (!isInputTableAView(tableDetails)) {
        filter.ifPresent(readOptions::setRowRestriction);
    }
    readOptions.addAllSelectedFields(selectedFields);
    readOptions.setArrowSerializationOptions(ArrowSerializationOptions.newBuilder().setBufferCompression(config.getArrowCompressionCodec()).build());
    ReadSession readSession = bigQueryReadClient.createReadSession(request.newBuilder().setParent("projects/" + bigQueryClient.getProjectId()).setReadSession(requestedSession.setDataFormat(config.getReadDataFormat()).setReadOptions(readOptions).setTable(tablePath).build()).setMaxStreamCount(getMaxNumPartitionsRequested(config.getMaxParallelism(), tableDefinition)).build());
    return new ReadSessionResponse(readSession, actualTable);
}
Also used : TableDefinition(com.google.cloud.bigquery.TableDefinition) Logger(org.slf4j.Logger) StandardTableDefinition(com.google.cloud.bigquery.StandardTableDefinition) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) TableId(com.google.cloud.bigquery.TableId) LoggerFactory(org.slf4j.LoggerFactory) BigQueryReadClient(com.google.cloud.bigquery.storage.v1.BigQueryReadClient) OptionalInt(java.util.OptionalInt) String.format(java.lang.String.format) UNSUPPORTED(com.google.cloud.bigquery.connector.common.BigQueryErrorCode.UNSUPPORTED) Stream(java.util.stream.Stream) ImmutableList(com.google.common.collect.ImmutableList) TableReadOptions(com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) ArrowSerializationOptions(com.google.cloud.bigquery.storage.v1.ArrowSerializationOptions) Optional(java.util.Optional) TableInfo(com.google.cloud.bigquery.TableInfo) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) BigQueryReadClient(com.google.cloud.bigquery.storage.v1.BigQueryReadClient) TableInfo(com.google.cloud.bigquery.TableInfo) TableReadOptions(com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) StandardTableDefinition(com.google.cloud.bigquery.StandardTableDefinition) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)

Example 2 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class ITBigQueryStorageTest method ProcessRowsAtSnapshot.

/**
 * Reads all the rows from the specified table.
 *
 * <p>For every row, the consumer is called for processing.
 *
 * @param table
 * @param snapshotInMillis Optional. If specified, all rows up to timestamp will be returned.
 * @param filter Optional. If specified, it will be used to restrict returned data.
 * @param consumer that receives all Avro rows.
 * @throws IOException
 */
private void ProcessRowsAtSnapshot(String table, Long snapshotInMillis, String filter, AvroRowConsumer consumer) throws IOException {
    Preconditions.checkNotNull(table);
    Preconditions.checkNotNull(consumer);
    CreateReadSessionRequest.Builder createSessionRequestBuilder = CreateReadSessionRequest.newBuilder().setParent(parentProjectId).setMaxStreamCount(1).setReadSession(ReadSession.newBuilder().setTable(table).setDataFormat(DataFormat.AVRO).build());
    if (snapshotInMillis != null) {
        Timestamp snapshotTimestamp = Timestamp.newBuilder().setSeconds(snapshotInMillis / 1_000).setNanos((int) ((snapshotInMillis % 1000) * 1000000)).build();
        createSessionRequestBuilder.getReadSessionBuilder().setTableModifiers(TableModifiers.newBuilder().setSnapshotTime(snapshotTimestamp).build());
    }
    if (filter != null && !filter.isEmpty()) {
        createSessionRequestBuilder.getReadSessionBuilder().setReadOptions(TableReadOptions.newBuilder().setRowRestriction(filter).build());
    }
    ReadSession session = client.createReadSession(createSessionRequestBuilder.build());
    assertEquals(String.format("Did not receive expected number of streams for table '%s' CreateReadSession response:%n%s", table, session.toString()), 1, session.getStreamsCount());
    ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(session.getStreams(0).getName()).build();
    SimpleRowReader reader = new SimpleRowReader(new Schema.Parser().parse(session.getAvroSchema().getSchema()));
    ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
    for (ReadRowsResponse response : stream) {
        reader.processRows(response.getAvroRows(), consumer);
    }
}
Also used : ReadRowsResponse(com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse) ReadSession(com.google.cloud.bigquery.storage.v1beta2.ReadSession) ReadRowsRequest(com.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest) Timestamp(com.google.protobuf.Timestamp) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest)

Example 3 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class ITBigQueryStorageTest method testColumnSelection.

@Test
public void testColumnSelection() throws IOException {
    String table = BigQueryResource.FormatTableResource(/* projectId = */
    "bigquery-public-data", /* datasetId = */
    "samples", /* tableId = */
    "shakespeare");
    TableReadOptions options = TableReadOptions.newBuilder().addSelectedFields("word").addSelectedFields("word_count").setRowRestriction("word_count > 100").build();
    CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().setParent(parentProjectId).setMaxStreamCount(1).setReadSession(ReadSession.newBuilder().setTable(table).setReadOptions(options).setDataFormat(DataFormat.AVRO).build()).build();
    ReadSession session = client.createReadSession(request);
    assertEquals(String.format("Did not receive expected number of streams for table '%s' CreateReadSession response:%n%s", table, session.toString()), 1, session.getStreamsCount());
    ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(session.getStreams(0).getName()).build();
    Schema avroSchema = new Schema.Parser().parse(session.getAvroSchema().getSchema());
    String actualSchemaMessage = String.format("Unexpected schema. Actual schema:%n%s", avroSchema.toString(/* pretty = */
    true));
    assertEquals(actualSchemaMessage, Schema.Type.RECORD, avroSchema.getType());
    assertEquals(actualSchemaMessage, "__root__", avroSchema.getName());
    assertEquals(actualSchemaMessage, 2, avroSchema.getFields().size());
    assertEquals(actualSchemaMessage, Schema.Type.STRING, avroSchema.getField("word").schema().getType());
    assertEquals(actualSchemaMessage, Schema.Type.LONG, avroSchema.getField("word_count").schema().getType());
    SimpleRowReader reader = new SimpleRowReader(avroSchema);
    long rowCount = 0;
    ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
    for (ReadRowsResponse response : stream) {
        rowCount += response.getRowCount();
        reader.processRows(response.getAvroRows(), new AvroRowConsumer() {

            @Override
            public void accept(GenericData.Record record) {
                String rowAssertMessage = String.format("Row not matching expectations: %s", record.toString());
                Long wordCount = (Long) record.get("word_count");
                assertWithMessage(rowAssertMessage).that(wordCount).isGreaterThan(100L);
                Utf8 word = (Utf8) record.get("word");
                assertWithMessage(rowAssertMessage).that(word.length()).isGreaterThan(0);
            }
        });
    }
    assertEquals(1_333, rowCount);
}
Also used : AvroRowConsumer(com.google.cloud.bigquery.storage.v1beta2.it.SimpleRowReader.AvroRowConsumer) ReadSession(com.google.cloud.bigquery.storage.v1beta2.ReadSession) Schema(org.apache.avro.Schema) ReadRowsRequest(com.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest) GenericData(org.apache.avro.generic.GenericData) ReadRowsResponse(com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse) Utf8(org.apache.avro.util.Utf8) TableReadOptions(com.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest) Test(org.junit.Test)

Example 4 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class ITBigQueryStorageTest method testFilter.

@Test
public void testFilter() throws IOException {
    String table = BigQueryResource.FormatTableResource(/* projectId = */
    "bigquery-public-data", /* datasetId = */
    "samples", /* tableId = */
    "shakespeare");
    TableReadOptions options = TableReadOptions.newBuilder().setRowRestriction("word_count > 100").build();
    CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().setParent(parentProjectId).setMaxStreamCount(1).setReadSession(ReadSession.newBuilder().setTable(table).setReadOptions(options).setDataFormat(DataFormat.AVRO).build()).build();
    ReadSession session = client.createReadSession(request);
    assertEquals(String.format("Did not receive expected number of streams for table '%s' CreateReadSession response:%n%s", table, session.toString()), 1, session.getStreamsCount());
    ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(session.getStreams(0).getName()).build();
    SimpleRowReader reader = new SimpleRowReader(new Schema.Parser().parse(session.getAvroSchema().getSchema()));
    long rowCount = 0;
    ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
    for (ReadRowsResponse response : stream) {
        rowCount += response.getRowCount();
        reader.processRows(response.getAvroRows(), new AvroRowConsumer() {

            @Override
            public void accept(GenericData.Record record) {
                Long wordCount = (Long) record.get("word_count");
                assertWithMessage("Row not matching expectations: %s", record.toString()).that(wordCount).isGreaterThan(100L);
            }
        });
    }
    assertEquals(1_333, rowCount);
}
Also used : AvroRowConsumer(com.google.cloud.bigquery.storage.v1beta2.it.SimpleRowReader.AvroRowConsumer) ReadSession(com.google.cloud.bigquery.storage.v1beta2.ReadSession) ReadRowsRequest(com.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest) GenericData(org.apache.avro.generic.GenericData) ReadRowsResponse(com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse) TableReadOptions(com.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest) Test(org.junit.Test)

Example 5 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class ITBigQueryStorageTest method ProcessRowsAtSnapshot.

/**
 * Reads all the rows from the specified table.
 *
 * <p>For every row, the consumer is called for processing.
 *
 * @param table
 * @param snapshotInMillis Optional. If specified, all rows up to timestamp will be returned.
 * @param filter Optional. If specified, it will be used to restrict returned data.
 * @param consumer that receives all Avro rows.
 * @throws IOException
 */
private void ProcessRowsAtSnapshot(String table, Long snapshotInMillis, String filter, AvroRowConsumer consumer) throws IOException {
    Preconditions.checkNotNull(table);
    Preconditions.checkNotNull(consumer);
    CreateReadSessionRequest.Builder createSessionRequestBuilder = CreateReadSessionRequest.newBuilder().setParent(parentProjectId).setMaxStreamCount(1).setReadSession(ReadSession.newBuilder().setTable(table).setDataFormat(DataFormat.AVRO).build());
    if (snapshotInMillis != null) {
        Timestamp snapshotTimestamp = Timestamp.newBuilder().setSeconds(snapshotInMillis / 1_000).setNanos((int) ((snapshotInMillis % 1000) * 1000000)).build();
        createSessionRequestBuilder.getReadSessionBuilder().setTableModifiers(TableModifiers.newBuilder().setSnapshotTime(snapshotTimestamp).build());
    }
    if (filter != null && !filter.isEmpty()) {
        createSessionRequestBuilder.getReadSessionBuilder().setReadOptions(TableReadOptions.newBuilder().setRowRestriction(filter).build());
    }
    ReadSession session = client.createReadSession(createSessionRequestBuilder.build());
    assertEquals(String.format("Did not receive expected number of streams for table '%s' CreateReadSession response:%n%s", table, session.toString()), 1, session.getStreamsCount());
    ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(session.getStreams(0).getName()).build();
    SimpleRowReader reader = new SimpleRowReader(new Schema.Parser().parse(session.getAvroSchema().getSchema()));
    ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
    for (ReadRowsResponse response : stream) {
        reader.processRows(response.getAvroRows(), consumer);
    }
}
Also used : ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) Timestamp(com.google.protobuf.Timestamp) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)

Aggregations

ReadSession (com.google.cloud.bigquery.storage.v1.ReadSession)29 Test (org.junit.Test)23 ReadRowsRequest (com.google.cloud.bigquery.storage.v1.ReadRowsRequest)17 ReadRowsResponse (com.google.cloud.bigquery.storage.v1.ReadRowsResponse)17 CreateReadSessionRequest (com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)15 StorageClient (org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient)14 FakeBigQueryServices (org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices)13 TableRow (com.google.api.services.bigquery.model.TableRow)10 TableRowParser (org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser)9 Table (com.google.api.services.bigquery.model.Table)8 TableReference (com.google.api.services.bigquery.model.TableReference)7 ByteString (com.google.protobuf.ByteString)7 TableReadOptions (com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions)6 ReadSession (com.google.cloud.bigquery.storage.v1beta2.ReadSession)6 GenericRecord (org.apache.avro.generic.GenericRecord)6 TableInfo (com.google.cloud.bigquery.TableInfo)5 ReadRowsRequest (com.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest)5 ReadRowsResponse (com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse)5 ArrayList (java.util.ArrayList)5 TableId (com.google.cloud.bigquery.TableId)4