Search in sources :

Example 1 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class FakeDatasetService method createTable.

@Override
public void createTable(Table table) throws IOException {
    TableReference tableReference = table.getTableReference();
    synchronized (BigQueryIOTest.tables) {
        Map<String, TableContainer> dataset = BigQueryIOTest.tables.get(tableReference.getProjectId(), tableReference.getDatasetId());
        if (dataset == null) {
            throwNotFound("Tried to get a dataset %s:%s, but no such table was set", tableReference.getProjectId(), tableReference.getDatasetId());
        }
        TableContainer tableContainer = dataset.get(tableReference.getTableId());
        if (tableContainer == null) {
            tableContainer = new TableContainer(table);
            dataset.put(tableReference.getTableId(), tableContainer);
        }
    }
}
Also used : TableReference(com.google.api.services.bigquery.model.TableReference)

Example 2 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class BigQueryTableRowIteratorTest method testReadFromQuery.

/**
   * Verifies that when the query runs, the correct data is returned and the temporary dataset and
   * table are both cleaned up.
   */
@Test
public void testReadFromQuery() throws IOException, InterruptedException {
    // Mock job inserting.
    Job dryRunJob = new Job().setStatistics(new JobStatistics().setQuery(new JobStatistics2().setReferencedTables(ImmutableList.of(new TableReference()))));
    Job insertedJob = new Job().setJobReference(new JobReference());
    when(mockJobsInsert.execute()).thenReturn(dryRunJob, insertedJob);
    // Mock job polling.
    JobStatus status = new JobStatus().setState("DONE");
    JobConfigurationQuery resultQueryConfig = new JobConfigurationQuery().setDestinationTable(new TableReference().setProjectId("project").setDatasetId("tempdataset").setTableId("temptable"));
    Job getJob = new Job().setJobReference(new JobReference()).setStatus(status).setConfiguration(new JobConfiguration().setQuery(resultQueryConfig));
    when(mockJobsGet.execute()).thenReturn(getJob);
    // Mock table schema fetch.
    when(mockTablesGet.execute()).thenReturn(tableWithLocation(), tableWithBasicSchema());
    byte[] photoBytes = "photograph".getBytes();
    String photoBytesEncoded = BaseEncoding.base64().encode(photoBytes);
    // Mock table data fetch.
    when(mockTabledataList.execute()).thenReturn(rawDataList(rawRow("Arthur", 42, photoBytesEncoded, "2000-01-01", "2000-01-01 00:00:00.000005", "00:00:00.000005")));
    // Run query and verify
    String query = "SELECT name, count, photo, anniversary_date, " + "anniversary_datetime, anniversary_time from table";
    JobConfigurationQuery queryConfig = new JobConfigurationQuery().setQuery(query);
    try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromQuery(queryConfig, "project", mockClient)) {
        iterator.open();
        assertTrue(iterator.advance());
        TableRow row = iterator.getCurrent();
        assertTrue(row.containsKey("name"));
        assertTrue(row.containsKey("answer"));
        assertTrue(row.containsKey("photo"));
        assertTrue(row.containsKey("anniversary_date"));
        assertTrue(row.containsKey("anniversary_datetime"));
        assertTrue(row.containsKey("anniversary_time"));
        assertEquals("Arthur", row.get("name"));
        assertEquals(42, row.get("answer"));
        assertEquals(photoBytesEncoded, row.get("photo"));
        assertEquals("2000-01-01", row.get("anniversary_date"));
        assertEquals("2000-01-01 00:00:00.000005", row.get("anniversary_datetime"));
        assertEquals("00:00:00.000005", row.get("anniversary_time"));
        assertFalse(iterator.advance());
    }
    // Temp dataset created and later deleted.
    verify(mockClient, times(2)).datasets();
    verify(mockDatasets).insert(anyString(), any(Dataset.class));
    verify(mockDatasetsInsert).execute();
    verify(mockDatasets).delete(anyString(), anyString());
    verify(mockDatasetsDelete).execute();
    // Job inserted to run the query, polled once.
    verify(mockClient, times(3)).jobs();
    verify(mockJobs, times(2)).insert(anyString(), any(Job.class));
    verify(mockJobsInsert, times(2)).execute();
    verify(mockJobs).get(anyString(), anyString());
    verify(mockJobsGet).execute();
    // Temp table get after query finish, deleted after reading.
    verify(mockClient, times(3)).tables();
    verify(mockTables, times(2)).get(anyString(), anyString(), anyString());
    verify(mockTablesGet, times(2)).execute();
    verify(mockTables).delete(anyString(), anyString(), anyString());
    verify(mockTablesDelete).execute();
    // Table data read.
    verify(mockClient).tabledata();
    verify(mockTabledata).list("project", "tempdataset", "temptable");
    verify(mockTabledataList).execute();
}
Also used : JobStatistics(com.google.api.services.bigquery.model.JobStatistics) JobStatistics2(com.google.api.services.bigquery.model.JobStatistics2) JobReference(com.google.api.services.bigquery.model.JobReference) JobConfigurationQuery(com.google.api.services.bigquery.model.JobConfigurationQuery) Dataset(com.google.api.services.bigquery.model.Dataset) Matchers.anyString(org.mockito.Matchers.anyString) Matchers.containsString(org.hamcrest.Matchers.containsString) JobStatus(com.google.api.services.bigquery.model.JobStatus) TableReference(com.google.api.services.bigquery.model.TableReference) TableRow(com.google.api.services.bigquery.model.TableRow) Job(com.google.api.services.bigquery.model.Job) JobConfiguration(com.google.api.services.bigquery.model.JobConfiguration) Test(org.junit.Test)

Example 3 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class BigQueryServicesImplTest method testInsertRetry.

/**
   * Tests that {@link DatasetServiceImpl#insertAll} retries quota rate limited attempts.
   */
@Test
public void testInsertRetry() throws Exception {
    TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
    List<ValueInSingleWindow<TableRow>> rows = new ArrayList<>();
    rows.add(wrapTableRow(new TableRow()));
    // First response is 403 rate limited, second response has valid payload.
    when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
    when(response.getStatusCode()).thenReturn(403).thenReturn(200);
    when(response.getContent()).thenReturn(toStream(errorWithReasonAndStatus("rateLimitExceeded", 403))).thenReturn(toStream(new TableDataInsertAllResponse()));
    DatasetServiceImpl dataService = new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());
    dataService.insertAll(ref, rows, null, BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()), new MockSleeper(), InsertRetryPolicy.alwaysRetry(), null);
    verify(response, times(2)).getStatusCode();
    verify(response, times(2)).getContent();
    verify(response, times(2)).getContentType();
    expectedLogs.verifyInfo("BigQuery insertAll exceeded rate limit, retrying");
}
Also used : TableReference(com.google.api.services.bigquery.model.TableReference) DatasetServiceImpl(org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl) TableRow(com.google.api.services.bigquery.model.TableRow) TableDataInsertAllResponse(com.google.api.services.bigquery.model.TableDataInsertAllResponse) ArrayList(java.util.ArrayList) ValueInSingleWindow(org.apache.beam.sdk.values.ValueInSingleWindow) MockSleeper(com.google.api.client.testing.util.MockSleeper) Test(org.junit.Test)

Example 4 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class FakeJobService method runLoadJob.

private JobStatus runLoadJob(JobReference jobRef, JobConfigurationLoad load) throws InterruptedException, IOException {
    TableReference destination = load.getDestinationTable();
    TableSchema schema = load.getSchema();
    List<ResourceId> sourceFiles = filesForLoadJobs.get(jobRef.getProjectId(), jobRef.getJobId());
    WriteDisposition writeDisposition = WriteDisposition.valueOf(load.getWriteDisposition());
    CreateDisposition createDisposition = CreateDisposition.valueOf(load.getCreateDisposition());
    checkArgument(load.getSourceFormat().equals("NEWLINE_DELIMITED_JSON"));
    Table existingTable = datasetService.getTable(destination);
    if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
        return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
    }
    datasetService.createTable(new Table().setTableReference(destination).setSchema(schema));
    List<TableRow> rows = Lists.newArrayList();
    for (ResourceId filename : sourceFiles) {
        rows.addAll(readRows(filename.toString()));
    }
    datasetService.insertAll(destination, rows, null);
    return new JobStatus().setState("DONE");
}
Also used : JobStatus(com.google.api.services.bigquery.model.JobStatus) TableReference(com.google.api.services.bigquery.model.TableReference) CreateDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition) HashBasedTable(com.google.common.collect.HashBasedTable) Table(com.google.api.services.bigquery.model.Table) ErrorProto(com.google.api.services.bigquery.model.ErrorProto) TableSchema(com.google.api.services.bigquery.model.TableSchema) ResourceId(org.apache.beam.sdk.io.fs.ResourceId) TableRow(com.google.api.services.bigquery.model.TableRow) WriteDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition)

Example 5 with TableReference

use of com.google.api.services.bigquery.model.TableReference in project beam by apache.

the class TrafficMaxLaneFlow method main.

/**
   * Sets up and starts streaming pipeline.
   *
   * @throws IOException if there is a problem setting up resources
   */
public static void main(String[] args) throws IOException {
    TrafficMaxLaneFlowOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(TrafficMaxLaneFlowOptions.class);
    options.setBigQuerySchema(FormatMaxesFn.getSchema());
    // Using ExampleUtils to set up required resources.
    ExampleUtils exampleUtils = new ExampleUtils(options);
    exampleUtils.setup();
    Pipeline pipeline = Pipeline.create(options);
    TableReference tableRef = new TableReference();
    tableRef.setProjectId(options.getProject());
    tableRef.setDatasetId(options.getBigQueryDataset());
    tableRef.setTableId(options.getBigQueryTable());
    pipeline.apply("ReadLines", new ReadFileAndExtractTimestamps(options.getInputFile())).apply(ParDo.of(new ExtractFlowInfoFn())).apply(Window.<KV<String, LaneInfo>>into(SlidingWindows.of(Duration.standardMinutes(options.getWindowDuration())).every(Duration.standardMinutes(options.getWindowSlideEvery())))).apply(new MaxLaneFlow()).apply(BigQueryIO.writeTableRows().to(tableRef).withSchema(FormatMaxesFn.getSchema()));
    // Run the pipeline.
    PipelineResult result = pipeline.run();
    // ExampleUtils will try to cancel the pipeline and the injector before the program exists.
    exampleUtils.waitToFinish(result);
}
Also used : TableReference(com.google.api.services.bigquery.model.TableReference) ExampleUtils(org.apache.beam.examples.common.ExampleUtils) PipelineResult(org.apache.beam.sdk.PipelineResult) Pipeline(org.apache.beam.sdk.Pipeline)

Aggregations

TableReference (com.google.api.services.bigquery.model.TableReference)139 Test (org.junit.Test)75 TableRow (com.google.api.services.bigquery.model.TableRow)68 Table (com.google.api.services.bigquery.model.Table)61 TableSchema (com.google.api.services.bigquery.model.TableSchema)36 DatasetServiceImpl (org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl)29 TableFieldSchema (com.google.api.services.bigquery.model.TableFieldSchema)22 TableDataInsertAllResponse (com.google.api.services.bigquery.model.TableDataInsertAllResponse)19 FakeBigQueryServices (org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices)19 MockSleeper (com.google.api.client.testing.util.MockSleeper)17 BigQueryHelpers.createTempTableReference (org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.createTempTableReference)17 FailsafeValueInSingleWindow (org.apache.beam.sdk.values.FailsafeValueInSingleWindow)16 JobStatus (com.google.api.services.bigquery.model.JobStatus)15 ReadSession (com.google.cloud.bigquery.storage.v1.ReadSession)15 BigQueryResourceNaming.createTempTableReference (org.apache.beam.sdk.io.gcp.bigquery.BigQueryResourceNaming.createTempTableReference)15 ErrorProto (com.google.api.services.bigquery.model.ErrorProto)14 JobStatistics (com.google.api.services.bigquery.model.JobStatistics)14 CreateReadSessionRequest (com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest)14 ByteString (com.google.protobuf.ByteString)14 IOException (java.io.IOException)14