Search in sources :

Example 6 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class ITBigQueryStorageTest method testColumnSelection.

@Test
public void testColumnSelection() throws IOException {
    String table = BigQueryResource.FormatTableResource(/* projectId = */
    "bigquery-public-data", /* datasetId = */
    "samples", /* tableId = */
    "shakespeare");
    TableReadOptions options = TableReadOptions.newBuilder().addSelectedFields("word").addSelectedFields("word_count").setRowRestriction("word_count > 100").build();
    CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().setParent(parentProjectId).setMaxStreamCount(1).setReadSession(ReadSession.newBuilder().setTable(table).setReadOptions(options).setDataFormat(DataFormat.AVRO).build()).build();
    ReadSession session = client.createReadSession(request);
    assertEquals(String.format("Did not receive expected number of streams for table '%s' CreateReadSession response:%n%s", table, session.toString()), 1, session.getStreamsCount());
    ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(session.getStreams(0).getName()).build();
    Schema avroSchema = new Schema.Parser().parse(session.getAvroSchema().getSchema());
    String actualSchemaMessage = String.format("Unexpected schema. Actual schema:%n%s", avroSchema.toString(/* pretty = */
    true));
    assertEquals(actualSchemaMessage, Schema.Type.RECORD, avroSchema.getType());
    assertEquals(actualSchemaMessage, "__root__", avroSchema.getName());
    assertEquals(actualSchemaMessage, 2, avroSchema.getFields().size());
    assertEquals(actualSchemaMessage, Schema.Type.STRING, avroSchema.getField("word").schema().getType());
    assertEquals(actualSchemaMessage, Schema.Type.LONG, avroSchema.getField("word_count").schema().getType());
    SimpleRowReader reader = new SimpleRowReader(avroSchema);
    long rowCount = 0;
    ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
    for (ReadRowsResponse response : stream) {
        rowCount += response.getRowCount();
        reader.processRows(response.getAvroRows(), new AvroRowConsumer() {

            @Override
            public void accept(GenericData.Record record) {
                String rowAssertMessage = String.format("Row not matching expectations: %s", record.toString());
                Long wordCount = (Long) record.get("word_count");
                assertWithMessage(rowAssertMessage).that(wordCount).isGreaterThan(100L);
                Utf8 word = (Utf8) record.get("word");
                assertWithMessage(rowAssertMessage).that(word.length()).isGreaterThan(0);
            }
        });
    }
    assertEquals(1_333, rowCount);
}
Also used : AvroRowConsumer(com.google.cloud.bigquery.storage.v1.it.SimpleRowReader.AvroRowConsumer) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) Schema(org.apache.avro.Schema) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) GenericData(org.apache.avro.generic.GenericData) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) Utf8(org.apache.avro.util.Utf8) TableReadOptions(com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) Test(org.junit.Test)

Example 7 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class StorageArrowSample method main.

public static void main(String... args) throws Exception {
    // Sets your Google Cloud Platform project ID.
    // String projectId = "YOUR_PROJECT_ID";
    String projectId = args[0];
    Integer snapshotMillis = null;
    if (args.length > 1) {
        snapshotMillis = Integer.parseInt(args[1]);
    }
    try (BigQueryReadClient client = BigQueryReadClient.create()) {
        String parent = String.format("projects/%s", projectId);
        // This example uses baby name data from the public datasets.
        String srcTable = String.format("projects/%s/datasets/%s/tables/%s", "bigquery-public-data", "usa_names", "usa_1910_current");
        // We specify the columns to be projected by adding them to the selected fields,
        // and set a simple filter to restrict which rows are transmitted.
        TableReadOptions options = TableReadOptions.newBuilder().addSelectedFields("name").addSelectedFields("number").addSelectedFields("state").setRowRestriction("state = \"WA\"").build();
        // Start specifying the read session we want created.
        ReadSession.Builder sessionBuilder = ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.ARROW).setReadOptions(options);
        // Optionally specify the snapshot time.  When unspecified, snapshot time is "now".
        if (snapshotMillis != null) {
            Timestamp t = Timestamp.newBuilder().setSeconds(snapshotMillis / 1000).setNanos((int) ((snapshotMillis % 1000) * 1000000)).build();
            TableModifiers modifiers = TableModifiers.newBuilder().setSnapshotTime(t).build();
            sessionBuilder.setTableModifiers(modifiers);
        }
        // Begin building the session creation request.
        CreateReadSessionRequest.Builder builder = CreateReadSessionRequest.newBuilder().setParent(parent).setReadSession(sessionBuilder).setMaxStreamCount(1);
        ReadSession session = client.createReadSession(builder.build());
        // Setup a simple reader and start a read session.
        try (SimpleRowReader reader = new SimpleRowReader(session.getArrowSchema())) {
            // Assert that there are streams available in the session.  An empty table may not have
            // data available.  If no sessions are available for an anonymous (cached) table, consider
            // writing results of a query to a named table rather than consuming cached results
            // directly.
            Preconditions.checkState(session.getStreamsCount() > 0);
            // Use the first stream to perform reading.
            String streamName = session.getStreams(0).getName();
            ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(streamName).build();
            // Process each block of rows as they arrive and decode using our simple row reader.
            ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
            for (ReadRowsResponse response : stream) {
                Preconditions.checkState(response.hasArrowRecordBatch());
                reader.processRows(response.getArrowRecordBatch());
            }
        }
    }
}
Also used : ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) TableModifiers(com.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) Timestamp(com.google.protobuf.Timestamp) BigQueryReadClient(com.google.cloud.bigquery.storage.v1.BigQueryReadClient) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableReadOptions(com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)

Example 8 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project java-bigquerystorage by googleapis.

the class StorageSample method main.

public static void main(String... args) throws Exception {
    // Sets your Google Cloud Platform project ID.
    // String projectId = "YOUR_PROJECT_ID";
    String projectId = args[0];
    Integer snapshotMillis = null;
    if (args.length > 1) {
        snapshotMillis = Integer.parseInt(args[1]);
    }
    try (BigQueryReadClient client = BigQueryReadClient.create()) {
        String parent = String.format("projects/%s", projectId);
        // This example uses baby name data from the public datasets.
        String srcTable = String.format("projects/%s/datasets/%s/tables/%s", "bigquery-public-data", "usa_names", "usa_1910_current");
        // We specify the columns to be projected by adding them to the selected fields,
        // and set a simple filter to restrict which rows are transmitted.
        TableReadOptions options = TableReadOptions.newBuilder().addSelectedFields("name").addSelectedFields("number").addSelectedFields("state").setRowRestriction("state = \"WA\"").build();
        // Start specifying the read session we want created.
        ReadSession.Builder sessionBuilder = ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.AVRO).setReadOptions(options);
        // Optionally specify the snapshot time.  When unspecified, snapshot time is "now".
        if (snapshotMillis != null) {
            Timestamp t = Timestamp.newBuilder().setSeconds(snapshotMillis / 1000).setNanos((int) ((snapshotMillis % 1000) * 1000000)).build();
            TableModifiers modifiers = TableModifiers.newBuilder().setSnapshotTime(t).build();
            sessionBuilder.setTableModifiers(modifiers);
        }
        // Begin building the session creation request.
        CreateReadSessionRequest.Builder builder = CreateReadSessionRequest.newBuilder().setParent(parent).setReadSession(sessionBuilder).setMaxStreamCount(1);
        // Request the session creation.
        ReadSession session = client.createReadSession(builder.build());
        SimpleRowReader reader = new SimpleRowReader(new Schema.Parser().parse(session.getAvroSchema().getSchema()));
        // Assert that there are streams available in the session.  An empty table may not have
        // data available.  If no sessions are available for an anonymous (cached) table, consider
        // writing results of a query to a named table rather than consuming cached results directly.
        Preconditions.checkState(session.getStreamsCount() > 0);
        // Use the first stream to perform reading.
        String streamName = session.getStreams(0).getName();
        ReadRowsRequest readRowsRequest = ReadRowsRequest.newBuilder().setReadStream(streamName).build();
        // Process each block of rows as they arrive and decode using our simple row reader.
        ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
        for (ReadRowsResponse response : stream) {
            Preconditions.checkState(response.hasAvroRows());
            reader.processRows(response.getAvroRows());
        }
    }
}
Also used : ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) TableModifiers(com.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) Timestamp(com.google.protobuf.Timestamp) BigQueryReadClient(com.google.cloud.bigquery.storage.v1.BigQueryReadClient) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) TableReadOptions(com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)

Example 9 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project beam by apache.

the class BigQueryStorageSourceBase method split.

@Override
public List<BigQueryStorageStreamSource<T>> split(long desiredBundleSizeBytes, PipelineOptions options) throws Exception {
    BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
    Table targetTable = getTargetTable(bqOptions);
    ReadSession.Builder readSessionBuilder = ReadSession.newBuilder();
    if (targetTable != null) {
        readSessionBuilder.setTable(BigQueryHelpers.toTableResourceName(targetTable.getTableReference()));
    } else {
        // If the table does not exist targetTable will be null.
        // Construct the table id if we can generate it. For error recording/logging.
        @Nullable String tableReferenceId = getTargetTableId(bqOptions);
        if (tableReferenceId != null) {
            readSessionBuilder.setTable(tableReferenceId);
        }
    }
    if (selectedFieldsProvider != null || rowRestrictionProvider != null) {
        ReadSession.TableReadOptions.Builder tableReadOptionsBuilder = ReadSession.TableReadOptions.newBuilder();
        if (selectedFieldsProvider != null) {
            tableReadOptionsBuilder.addAllSelectedFields(selectedFieldsProvider.get());
        }
        if (rowRestrictionProvider != null) {
            tableReadOptionsBuilder.setRowRestriction(rowRestrictionProvider.get());
        }
        readSessionBuilder.setReadOptions(tableReadOptionsBuilder);
    }
    if (format != null) {
        readSessionBuilder.setDataFormat(format);
    }
    int streamCount = 0;
    if (desiredBundleSizeBytes > 0) {
        long tableSizeBytes = (targetTable != null) ? targetTable.getNumBytes() : 0;
        streamCount = (int) Math.min(tableSizeBytes / desiredBundleSizeBytes, MAX_SPLIT_COUNT);
    }
    streamCount = Math.max(streamCount, MIN_SPLIT_COUNT);
    CreateReadSessionRequest createReadSessionRequest = CreateReadSessionRequest.newBuilder().setParent(BigQueryHelpers.toProjectResourceName(bqOptions.getBigQueryProject() == null ? bqOptions.getProject() : bqOptions.getBigQueryProject())).setReadSession(readSessionBuilder).setMaxStreamCount(streamCount).build();
    ReadSession readSession;
    try (StorageClient client = bqServices.getStorageClient(bqOptions)) {
        readSession = client.createReadSession(createReadSessionRequest);
        LOG.info("Sent BigQuery Storage API CreateReadSession request '{}'; received response '{}'.", createReadSessionRequest, readSession);
    }
    if (readSession.getStreamsList().isEmpty()) {
        // The underlying table is empty or all rows have been pruned.
        return ImmutableList.of();
    }
    Schema sessionSchema;
    if (readSession.getDataFormat() == DataFormat.ARROW) {
        org.apache.arrow.vector.types.pojo.Schema schema = ArrowConversion.arrowSchemaFromInput(readSession.getArrowSchema().getSerializedSchema().newInput());
        org.apache.beam.sdk.schemas.Schema beamSchema = ArrowConversion.ArrowSchemaTranslator.toBeamSchema(schema);
        sessionSchema = AvroUtils.toAvroSchema(beamSchema);
    } else if (readSession.getDataFormat() == DataFormat.AVRO) {
        sessionSchema = new Schema.Parser().parse(readSession.getAvroSchema().getSchema());
    } else {
        throw new IllegalArgumentException("data is not in a supported dataFormat: " + readSession.getDataFormat());
    }
    TableSchema trimmedSchema = BigQueryAvroUtils.trimBigQueryTableSchema(targetTable.getSchema(), sessionSchema);
    List<BigQueryStorageStreamSource<T>> sources = Lists.newArrayList();
    for (ReadStream readStream : readSession.getStreamsList()) {
        sources.add(BigQueryStorageStreamSource.create(readSession, readStream, trimmedSchema, parseFn, outputCoder, bqServices));
    }
    return ImmutableList.copyOf(sources);
}
Also used : TableSchema(com.google.api.services.bigquery.model.TableSchema) Schema(org.apache.avro.Schema) TableSchema(com.google.api.services.bigquery.model.TableSchema) ReadStream(com.google.cloud.bigquery.storage.v1.ReadStream) Table(com.google.api.services.bigquery.model.Table) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) Nullable(org.checkerframework.checker.nullness.qual.Nullable) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)

Example 10 with ReadSession

use of com.google.cloud.bigquery.storage.v1beta2.ReadSession in project beam by apache.

the class BigQueryIOStorageQueryTest method testQuerySourceInitialSplit_EmptyResult.

@Test
public void testQuerySourceInitialSplit_EmptyResult() throws Exception {
    TableReference sourceTableRef = BigQueryHelpers.parseTableSpec("project:dataset.table");
    fakeDatasetService.createDataset(sourceTableRef.getProjectId(), sourceTableRef.getDatasetId(), "asia-northeast1", "Fake plastic tree^H^H^H^Htables", null);
    fakeDatasetService.createTable(new Table().setTableReference(sourceTableRef).setLocation("asia-northeast1"));
    Table queryResultTable = new Table().setSchema(TABLE_SCHEMA).setNumBytes(0L);
    String encodedQuery = FakeBigQueryServices.encodeQueryResult(queryResultTable);
    fakeJobService.expectDryRunQuery(options.getProject(), encodedQuery, new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(1024L * 1024L).setReferencedTables(ImmutableList.of(sourceTableRef))));
    String stepUuid = "testStepUuid";
    TableReference tempTableReference = createTempTableReference(options.getProject(), BigQueryResourceNaming.createJobIdPrefix(options.getJobName(), stepUuid, JobType.QUERY), Optional.empty());
    CreateReadSessionRequest expectedRequest = CreateReadSessionRequest.newBuilder().setParent("projects/" + options.getProject()).setReadSession(ReadSession.newBuilder().setTable(BigQueryHelpers.toTableResourceName(tempTableReference))).setMaxStreamCount(10).build();
    ReadSession emptyReadSession = ReadSession.newBuilder().build();
    StorageClient fakeStorageClient = mock(StorageClient.class);
    when(fakeStorageClient.createReadSession(expectedRequest)).thenReturn(emptyReadSession);
    BigQueryStorageQuerySource<TableRow> querySource = BigQueryStorageQuerySource.create(stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), /* flattenResults = */
    true, /* useLegacySql = */
    true, /* priority = */
    QueryPriority.BATCH, /* location = */
    null, /* queryTempDataset = */
    null, /* kmsKey = */
    null, null, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withDatasetService(fakeDatasetService).withJobService(fakeJobService).withStorageClient(fakeStorageClient));
    List<? extends BoundedSource<TableRow>> sources = querySource.split(1024L, options);
    assertTrue(sources.isEmpty());
}
Also used : JobStatistics(com.google.api.services.bigquery.model.JobStatistics) JobStatistics2(com.google.api.services.bigquery.model.JobStatistics2) Table(com.google.api.services.bigquery.model.Table) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) StorageClient(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient) ByteString(com.google.protobuf.ByteString) TableRowParser(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser) TableReference(com.google.api.services.bigquery.model.TableReference) BigQueryResourceNaming.createTempTableReference(org.apache.beam.sdk.io.gcp.bigquery.BigQueryResourceNaming.createTempTableReference) TableRow(com.google.api.services.bigquery.model.TableRow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) Test(org.junit.Test)

Aggregations

ReadSession (com.google.cloud.bigquery.storage.v1.ReadSession)29 Test (org.junit.Test)23 ReadRowsRequest (com.google.cloud.bigquery.storage.v1.ReadRowsRequest)17 ReadRowsResponse (com.google.cloud.bigquery.storage.v1.ReadRowsResponse)17 CreateReadSessionRequest (com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)15 StorageClient (org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient)14 FakeBigQueryServices (org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices)13 TableRow (com.google.api.services.bigquery.model.TableRow)10 TableRowParser (org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TableRowParser)9 Table (com.google.api.services.bigquery.model.Table)8 TableReference (com.google.api.services.bigquery.model.TableReference)7 ByteString (com.google.protobuf.ByteString)7 TableReadOptions (com.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions)6 ReadSession (com.google.cloud.bigquery.storage.v1beta2.ReadSession)6 GenericRecord (org.apache.avro.generic.GenericRecord)6 TableInfo (com.google.cloud.bigquery.TableInfo)5 ReadRowsRequest (com.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest)5 ReadRowsResponse (com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse)5 ArrayList (java.util.ArrayList)5 TableId (com.google.cloud.bigquery.TableId)4