Search in sources :

Example 31 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class WriteRename method startWriteRename.

private PendingJobData startWriteRename(TableDestination finalTableDestination, Iterable<WriteTables.Result> tempTableNames, ProcessContext c, BoundedWindow window) throws Exception {
    // The pane may have advanced either here due to triggering or due to an upstream trigger. We
    // check the upstream
    // trigger to handle the case where an earlier pane triggered the single-partition path. If this
    // happened, then the
    // table will already exist so we want to append to the table.
    boolean isFirstPane = Iterables.getFirst(tempTableNames, null).isFirstPane() && c.pane().isFirst();
    WriteDisposition writeDisposition = isFirstPane ? firstPaneWriteDisposition : WriteDisposition.WRITE_APPEND;
    CreateDisposition createDisposition = isFirstPane ? firstPaneCreateDisposition : CreateDisposition.CREATE_NEVER;
    List<TableReference> tempTables = StreamSupport.stream(tempTableNames.spliterator(), false).map(result -> BigQueryHelpers.fromJsonString(result.getTableName(), TableReference.class)).collect(Collectors.toList());
    // Make sure each destination table gets a unique job id.
    String jobIdPrefix = BigQueryResourceNaming.createJobIdWithDestination(c.sideInput(jobIdToken), finalTableDestination, -1, c.pane().getIndex());
    BigQueryHelpers.PendingJob retryJob = startCopy(bqServices.getJobService(c.getPipelineOptions().as(BigQueryOptions.class)), getDatasetService(c.getPipelineOptions().as(BigQueryOptions.class)), jobIdPrefix, finalTableDestination.getTableReference(), tempTables, writeDisposition, createDisposition, kmsKey, loadJobProjectId);
    return new PendingJobData(retryJob, finalTableDestination, tempTables, window);
}
Also used : KV(org.apache.beam.sdk.values.KV) PendingJobManager(org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.PendingJobManager) LoggerFactory(org.slf4j.LoggerFactory) JobReference(com.google.api.services.bigquery.model.JobReference) Multimap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Multimap) JobService(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.JobService) Map(java.util.Map) Iterables(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables) StreamSupport(java.util.stream.StreamSupport) ArrayListMultimap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ArrayListMultimap) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) Nullable(javax.annotation.Nullable) ValueProvider(org.apache.beam.sdk.options.ValueProvider) DoFn(org.apache.beam.sdk.transforms.DoFn) DisplayData(org.apache.beam.sdk.transforms.display.DisplayData) TableReference(com.google.api.services.bigquery.model.TableReference) Logger(org.slf4j.Logger) Collection(java.util.Collection) Lists(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists) IOException(java.io.IOException) CreateDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition) JobConfigurationTableCopy(com.google.api.services.bigquery.model.JobConfigurationTableCopy) Collectors(java.util.stream.Collectors) DatasetService(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.DatasetService) List(java.util.List) WriteDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition) PCollectionView(org.apache.beam.sdk.values.PCollectionView) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) EncryptionConfiguration(com.google.api.services.bigquery.model.EncryptionConfiguration) TableReference(com.google.api.services.bigquery.model.TableReference) CreateDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition) WriteDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition)

Example 32 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class BigQueryServicesImplTest method testInsertRetrySelectRows.

/**
 * Tests that {@link DatasetServiceImpl#insertAll} retries selected rows on failure.
 */
@Test
public void testInsertRetrySelectRows() throws Exception {
    TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
    List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = ImmutableList.of(wrapValue(new TableRow().set("row", "a")), wrapValue(new TableRow().set("row", "b")));
    List<String> insertIds = ImmutableList.of("a", "b");
    final TableDataInsertAllResponse bFailed = new TableDataInsertAllResponse().setInsertErrors(ImmutableList.of(new InsertErrors().setIndex(1L).setErrors(ImmutableList.of(new ErrorProto()))));
    final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();
    setupMockResponses(response -> {
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getStatusCode()).thenReturn(200);
        when(response.getContent()).thenReturn(toStream(bFailed));
    }, response -> {
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getStatusCode()).thenReturn(200);
        when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
    });
    DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.create());
    dataService.insertAll(ref, rows, insertIds, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.alwaysRetry(), null, null, false, false, false, null);
    verifyAllResponsesAreRead();
    verifyWriteMetricWasSet("project", "dataset", "table", "unknown", 1);
    verifyWriteMetricWasSet("project", "dataset", "table", "ok", 1);
}
Also used : TableReference(com.google.api.services.bigquery.model.TableReference) ErrorProto(com.google.api.services.bigquery.model.ErrorProto) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) TableRow(com.google.api.services.bigquery.model.TableRow) TableDataInsertAllResponse(com.google.api.services.bigquery.model.TableDataInsertAllResponse) InsertErrors(com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors) Matchers.containsString(org.hamcrest.Matchers.containsString) FailsafeValueInSingleWindow(org.apache.beam.sdk.values.FailsafeValueInSingleWindow) MockSleeper(com.google.api.client.testing.util.MockSleeper) Test(org.junit.Test)

Example 33 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class BigQueryServicesImplTest method testSkipInvalidRowsIgnoreUnknownIgnoreInsertIdsValuesStreaming.

/**
 * Tests that {@link DatasetServiceImpl#insertAll} respects the skipInvalidRows,
 * ignoreUnknownValues and ignoreInsertIds parameters.
 */
@Test
public void testSkipInvalidRowsIgnoreUnknownIgnoreInsertIdsValuesStreaming() throws InterruptedException, IOException {
    TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
    List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = ImmutableList.of(wrapValue(new TableRow()), wrapValue(new TableRow()));
    final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();
    // Return a 200 response each time
    MockSetupFunction allRowsSucceededResponseFunction = response -> {
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getStatusCode()).thenReturn(200);
        when(response.getContent()).thenReturn(toStream(allRowsSucceeded));
    };
    setupMockResponses(allRowsSucceededResponseFunction, allRowsSucceededResponseFunction);
    DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.create());
    // First, test with all flags disabled
    dataService.insertAll(ref, rows, null, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.neverRetry(), Lists.newArrayList(), ErrorContainer.TABLE_ROW_ERROR_CONTAINER, false, false, false, null);
    TableDataInsertAllRequest parsedRequest = fromString(request.getContentAsString(), TableDataInsertAllRequest.class);
    assertFalse(parsedRequest.getSkipInvalidRows());
    assertFalse(parsedRequest.getIgnoreUnknownValues());
    // Then with all enabled
    dataService.insertAll(ref, rows, null, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.neverRetry(), Lists.newArrayList(), ErrorContainer.TABLE_ROW_ERROR_CONTAINER, true, true, true, null);
    parsedRequest = fromString(request.getContentAsString(), TableDataInsertAllRequest.class);
    assertTrue(parsedRequest.getSkipInvalidRows());
    assertTrue(parsedRequest.getIgnoreUnknownValues());
    assertNull(parsedRequest.getRows().get(0).getInsertId());
    assertNull(parsedRequest.getRows().get(1).getInsertId());
    verifyWriteMetricWasSet("project", "dataset", "table", "ok", 2);
}
Also used : MetricName(org.apache.beam.sdk.metrics.MetricName) ExpectedLogs(org.apache.beam.sdk.testing.ExpectedLogs) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) MockSleeper(com.google.api.client.testing.util.MockSleeper) ValueInSingleWindow(org.apache.beam.sdk.values.ValueInSingleWindow) ErrorInfo(com.google.api.client.googleapis.json.GoogleJsonError.ErrorInfo) MockitoAnnotations(org.mockito.MockitoAnnotations) GoogleJsonErrorContainer(com.google.api.client.googleapis.json.GoogleJsonErrorContainer) Strings(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings) GenericJson(com.google.api.client.json.GenericJson) GlobalWindow(org.apache.beam.sdk.transforms.windowing.GlobalWindow) Status(io.grpc.Status) JobServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.JobServiceImpl) FastNanoClockAndSleeper(org.apache.beam.sdk.extensions.gcp.util.FastNanoClockAndSleeper) Json(com.google.api.client.json.Json) Mockito.atLeastOnce(org.mockito.Mockito.atLeastOnce) HttpResponseException(com.google.api.client.http.HttpResponseException) Matchers.instanceOf(org.hamcrest.Matchers.instanceOf) MockLowLevelHttpRequest(com.google.api.client.testing.http.MockLowLevelHttpRequest) Assert.assertFalse(org.junit.Assert.assertFalse) Matchers.is(org.hamcrest.Matchers.is) Metadata(io.grpc.Metadata) RetryBoundedBackOff(com.google.cloud.hadoop.util.RetryBoundedBackOff) Matchers.containsString(org.hamcrest.Matchers.containsString) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) Mockito.mock(org.mockito.Mockito.mock) MockHttpTransport(com.google.api.client.testing.http.MockHttpTransport) MonitoringInfoMetricName(org.apache.beam.runners.core.metrics.MonitoringInfoMetricName) Duration(org.joda.time.Duration) RunWith(org.junit.runner.RunWith) ArrayList(java.util.ArrayList) JobReference(com.google.api.services.bigquery.model.JobReference) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TableReference(com.google.api.services.bigquery.model.TableReference) RetryHttpRequestInitializer(org.apache.beam.sdk.extensions.gcp.util.RetryHttpRequestInitializer) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) GoogleJsonError(com.google.api.client.googleapis.json.GoogleJsonError) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) MetricsContainerImpl(org.apache.beam.runners.core.metrics.MetricsContainerImpl) Assert.assertTrue(org.junit.Assert.assertTrue) Mockito.times(org.mockito.Mockito.times) IOException(java.io.IOException) Test(org.junit.Test) ApiException(com.google.api.gax.rpc.ApiException) MetricsEnvironment(org.apache.beam.sdk.metrics.MetricsEnvironment) Mockito.never(org.mockito.Mockito.never) Assert.assertNull(org.junit.Assert.assertNull) Bigquery(com.google.api.services.bigquery.Bigquery) StatusCode(com.google.api.gax.rpc.StatusCode) Assert.assertEquals(org.junit.Assert.assertEquals) CreateReadSessionRequest(com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) SplitReadStreamRequest(com.google.cloud.bigquery.storage.v1.SplitReadStreamRequest) ApiErrorExtractor(com.google.cloud.hadoop.util.ApiErrorExtractor) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) GoogleJsonResponseException(com.google.api.client.googleapis.json.GoogleJsonResponseException) ByteArrayInputStream(java.io.ByteArrayInputStream) Transport(org.apache.beam.sdk.extensions.gcp.util.Transport) TableRow(com.google.api.services.bigquery.model.TableRow) Assert.fail(org.junit.Assert.fail) TableSchema(com.google.api.services.bigquery.model.TableSchema) FailsafeValueInSingleWindow(org.apache.beam.sdk.values.FailsafeValueInSingleWindow) JacksonFactory(com.google.api.client.json.jackson2.JacksonFactory) PaneInfo(org.apache.beam.sdk.transforms.windowing.PaneInfo) Parser(com.google.protobuf.Parser) List(java.util.List) JobStatus(com.google.api.services.bigquery.model.JobStatus) TableDataInsertAllResponse(com.google.api.services.bigquery.model.TableDataInsertAllResponse) TableDataInsertAllRequest(com.google.api.services.bigquery.model.TableDataInsertAllRequest) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) BackOff(com.google.api.client.util.BackOff) ErrorProto(com.google.api.services.bigquery.model.ErrorProto) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) PipelineOptionsFactory(org.apache.beam.sdk.options.PipelineOptionsFactory) BackOffAdapter(org.apache.beam.sdk.extensions.gcp.util.BackOffAdapter) RetryInfo(com.google.rpc.RetryInfo) Verify(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Verify) MonitoringInfoConstants(org.apache.beam.runners.core.metrics.MonitoringInfoConstants) Job(com.google.api.services.bigquery.model.Job) ExpectedException(org.junit.rules.ExpectedException) Sleeper(com.google.api.client.util.Sleeper) GcpResourceIdentifiers(org.apache.beam.runners.core.metrics.GcpResourceIdentifiers) FluentBackoff(org.apache.beam.sdk.util.FluentBackoff) Iterator(java.util.Iterator) Assert.assertNotNull(org.junit.Assert.assertNotNull) Lists(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists) Mockito.when(org.mockito.Mockito.when) JUnit4(org.junit.runners.JUnit4) InsertErrors(com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors) Table(com.google.api.services.bigquery.model.Table) Mockito.verify(org.mockito.Mockito.verify) LowLevelHttpResponse(com.google.api.client.http.LowLevelHttpResponse) Rule(org.junit.Rule) SplitReadStreamResponse(com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse) TableDataList(com.google.api.services.bigquery.model.TableDataList) InputStream(java.io.InputStream) TableDataInsertAllRequest(com.google.api.services.bigquery.model.TableDataInsertAllRequest) TableReference(com.google.api.services.bigquery.model.TableReference) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) TableRow(com.google.api.services.bigquery.model.TableRow) TableDataInsertAllResponse(com.google.api.services.bigquery.model.TableDataInsertAllResponse) FailsafeValueInSingleWindow(org.apache.beam.sdk.values.FailsafeValueInSingleWindow) MockSleeper(com.google.api.client.testing.util.MockSleeper) Test(org.junit.Test)

Example 34 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class BigQueryServicesImplTest method testCreateTableDoesNotRetry.

/**
 * Tests that {@link BigQueryServicesImpl} does not retry non-rate-limited attempts.
 */
@Test
public void testCreateTableDoesNotRetry() throws IOException {
    TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
    Table testTable = new Table().setTableReference(ref);
    // First response is 403 not-rate-limited, second response has valid payload but should not
    // be invoked.
    setupMockResponses(response -> {
        when(response.getStatusCode()).thenReturn(403);
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getContent()).thenReturn(toStream(errorWithReasonAndStatus("actually forbidden", 403)));
    }, response -> {
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getStatusCode()).thenReturn(200);
        when(response.getContent()).thenReturn(toStream(testTable));
    });
    thrown.expect(GoogleJsonResponseException.class);
    thrown.expectMessage("actually forbidden");
    BigQueryServicesImpl.DatasetServiceImpl services = new BigQueryServicesImpl.DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.create());
    try {
        services.tryCreateTable(testTable, new RetryBoundedBackOff(BackOff.ZERO_BACKOFF, 3), Sleeper.DEFAULT);
        fail();
    } catch (IOException e) {
        verify(responses[0], atLeastOnce()).getStatusCode();
        verify(responses[0]).getContent();
        verify(responses[0]).getContentType();
        // It should not invoke 2nd response
        verify(responses[1], never()).getStatusCode();
        verify(responses[1], never()).getContent();
        verify(responses[1], never()).getContentType();
        throw e;
    }
}
Also used : RetryBoundedBackOff(com.google.cloud.hadoop.util.RetryBoundedBackOff) TableReference(com.google.api.services.bigquery.model.TableReference) Table(com.google.api.services.bigquery.model.Table) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) IOException(java.io.IOException) Test(org.junit.Test)

Example 35 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class BigQueryServicesImplTest method testExtendedErrorRetrieval.

/**
 * Tests that {@link DatasetServiceImpl#insertAll} uses the supplied {@link ErrorContainer}.
 */
@Test
public void testExtendedErrorRetrieval() throws InterruptedException, IOException {
    TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
    List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = ImmutableList.of(wrapValue(new TableRow().set("a", 1)), wrapValue(new TableRow().set("b", 2)));
    final TableDataInsertAllResponse failures = new TableDataInsertAllResponse().setInsertErrors(ImmutableList.of(new InsertErrors().setIndex(0L).setErrors(ImmutableList.of(new ErrorProto().setReason("timeout"))), new InsertErrors().setIndex(1L).setErrors(ImmutableList.of(new ErrorProto().setReason("invalid")))));
    final List<ValueInSingleWindow<BigQueryInsertError>> expected = ImmutableList.of(wrapErrorValue(new BigQueryInsertError(rows.get(0).getValue(), failures.getInsertErrors().get(0), ref)), wrapErrorValue(new BigQueryInsertError(rows.get(1).getValue(), failures.getInsertErrors().get(1), ref)));
    setupMockResponses(response -> {
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getStatusCode()).thenReturn(200);
        when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
        when(response.getContent()).thenReturn(toStream(failures));
    });
    DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.create());
    List<ValueInSingleWindow<BigQueryInsertError>> failedInserts = Lists.newArrayList();
    dataService.insertAll(ref, rows, null, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), TEST_BACKOFF, new MockSleeper(), InsertRetryPolicy.neverRetry(), failedInserts, ErrorContainer.BIG_QUERY_INSERT_ERROR_ERROR_CONTAINER, false, false, false, null);
    assertThat(failedInserts, is(expected));
}
Also used : TableReference(com.google.api.services.bigquery.model.TableReference) ErrorProto(com.google.api.services.bigquery.model.ErrorProto) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) TableRow(com.google.api.services.bigquery.model.TableRow) TableDataInsertAllResponse(com.google.api.services.bigquery.model.TableDataInsertAllResponse) ValueInSingleWindow(org.apache.beam.sdk.values.ValueInSingleWindow) FailsafeValueInSingleWindow(org.apache.beam.sdk.values.FailsafeValueInSingleWindow) InsertErrors(com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors) FailsafeValueInSingleWindow(org.apache.beam.sdk.values.FailsafeValueInSingleWindow) MockSleeper(com.google.api.client.testing.util.MockSleeper) Test(org.junit.Test)

Aggregations

TableReference (com.google.api.services.bigquery.model.TableReference)139 Test (org.junit.Test)75 TableRow (com.google.api.services.bigquery.model.TableRow)68 Table (com.google.api.services.bigquery.model.Table)61 TableSchema (com.google.api.services.bigquery.model.TableSchema)36 DatasetServiceImpl (org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl)29 TableFieldSchema (com.google.api.services.bigquery.model.TableFieldSchema)22 TableDataInsertAllResponse (com.google.api.services.bigquery.model.TableDataInsertAllResponse)19 FakeBigQueryServices (org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices)19 MockSleeper (com.google.api.client.testing.util.MockSleeper)17 BigQueryHelpers.createTempTableReference (org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.createTempTableReference)17 FailsafeValueInSingleWindow (org.apache.beam.sdk.values.FailsafeValueInSingleWindow)16 JobStatus (com.google.api.services.bigquery.model.JobStatus)15 ReadSession (com.google.cloud.bigquery.storage.v1.ReadSession)15 BigQueryResourceNaming.createTempTableReference (org.apache.beam.sdk.io.gcp.bigquery.BigQueryResourceNaming.createTempTableReference)15 ErrorProto (com.google.api.services.bigquery.model.ErrorProto)14 JobStatistics (com.google.api.services.bigquery.model.JobStatistics)14 CreateReadSessionRequest (com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)14 ByteString (com.google.protobuf.ByteString)14 IOException (java.io.IOException)14