Search in sources :

Example 56 with TableRow

use of com.google.api.services.bigquery.model.TableRow in project beam by apache.

the class BigQueryIOTest method testCoder_nullCell.

// Test that BigQuery's special null placeholder objects can be encoded.
@Test
public void testCoder_nullCell() throws CoderException {
    TableRow row = new TableRow();
    row.set("temperature", Data.nullOf(Object.class));
    row.set("max_temperature", Data.nullOf(Object.class));
    byte[] bytes = CoderUtils.encodeToByteArray(TableRowJsonCoder.of(), row);
    TableRow newRow = CoderUtils.decodeFromByteArray(TableRowJsonCoder.of(), bytes);
    byte[] newBytes = CoderUtils.encodeToByteArray(TableRowJsonCoder.of(), newRow);
    Assert.assertArrayEquals(bytes, newBytes);
}
Also used : TableRow(com.google.api.services.bigquery.model.TableRow) Test(org.junit.Test)

Example 57 with TableRow

use of com.google.api.services.bigquery.model.TableRow in project beam by apache.

the class BigQueryIOTest method testRuntimeOptionsNotCalledInApplyOutput.

@Test
public void testRuntimeOptionsNotCalledInApplyOutput() {
    RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class);
    BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
    bqOptions.setTempLocation("gs://testbucket/testdir");
    Pipeline pipeline = TestPipeline.create(options);
    BigQueryIO.Write<TableRow> write = BigQueryIO.writeTableRows().to(options.getOutputTable()).withSchema(NestedValueProvider.of(options.getOutputSchema(), new JsonSchemaToTableSchema())).withoutValidation();
    pipeline.apply(Create.empty(TableRowJsonCoder.of())).apply(write);
    // Test that this doesn't throw.
    DisplayData.from(write);
}
Also used : TableRow(com.google.api.services.bigquery.model.TableRow) JsonSchemaToTableSchema(org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.JsonSchemaToTableSchema) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) Pipeline(org.apache.beam.sdk.Pipeline) Test(org.junit.Test)

Example 58 with TableRow

use of com.google.api.services.bigquery.model.TableRow in project beam by apache.

the class BigQueryUtilTest method testRead.

@Test
public void testRead() throws IOException, InterruptedException {
    onTableGet(basicTableSchema());
    TableDataList dataList = rawDataList(rawRow("Arthur", 42));
    onTableList(dataList);
    try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromTable(BigQueryHelpers.parseTableSpec("project:dataset.table"), mockClient)) {
        iterator.open();
        Assert.assertTrue(iterator.advance());
        TableRow row = iterator.getCurrent();
        Assert.assertTrue(row.containsKey("name"));
        Assert.assertTrue(row.containsKey("answer"));
        Assert.assertEquals("Arthur", row.get("name"));
        Assert.assertEquals(42, row.get("answer"));
        Assert.assertFalse(iterator.advance());
        verifyTableGet();
        verifyTabledataList();
    }
}
Also used : TableRow(com.google.api.services.bigquery.model.TableRow) TableDataList(com.google.api.services.bigquery.model.TableDataList) Test(org.junit.Test)

Example 59 with TableRow

use of com.google.api.services.bigquery.model.TableRow in project beam by apache.

the class FakeJobService method runCopyJob.

private JobStatus runCopyJob(JobConfigurationTableCopy copy) throws InterruptedException, IOException {
    List<TableReference> sources = copy.getSourceTables();
    TableReference destination = copy.getDestinationTable();
    WriteDisposition writeDisposition = WriteDisposition.valueOf(copy.getWriteDisposition());
    CreateDisposition createDisposition = CreateDisposition.valueOf(copy.getCreateDisposition());
    Table existingTable = datasetService.getTable(destination);
    if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
        return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
    }
    List<TableRow> allRows = Lists.newArrayList();
    for (TableReference source : sources) {
        allRows.addAll(datasetService.getAllRows(source.getProjectId(), source.getDatasetId(), source.getTableId()));
    }
    datasetService.createTable(new Table().setTableReference(destination));
    datasetService.insertAll(destination, allRows, null);
    return new JobStatus().setState("DONE");
}
Also used : JobStatus(com.google.api.services.bigquery.model.JobStatus) TableReference(com.google.api.services.bigquery.model.TableReference) CreateDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition) HashBasedTable(com.google.common.collect.HashBasedTable) Table(com.google.api.services.bigquery.model.Table) ErrorProto(com.google.api.services.bigquery.model.ErrorProto) TableRow(com.google.api.services.bigquery.model.TableRow) WriteDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition)

Example 60 with TableRow

use of com.google.api.services.bigquery.model.TableRow in project beam by apache.

the class FakeJobService method runExtractJob.

private JobStatus runExtractJob(Job job, JobConfigurationExtract extract) throws InterruptedException, IOException {
    TableReference sourceTable = extract.getSourceTable();
    List<TableRow> rows = datasetService.getAllRows(sourceTable.getProjectId(), sourceTable.getDatasetId(), sourceTable.getTableId());
    TableSchema schema = datasetService.getTable(sourceTable).getSchema();
    List<Long> destinationFileCounts = Lists.newArrayList();
    for (String destination : extract.getDestinationUris()) {
        destinationFileCounts.add(writeRows(sourceTable.getTableId(), rows, schema, destination));
    }
    job.setStatistics(new JobStatistics().setExtract(new JobStatistics4().setDestinationUriFileCounts(destinationFileCounts)));
    return new JobStatus().setState("DONE");
}
Also used : JobStatus(com.google.api.services.bigquery.model.JobStatus) JobStatistics(com.google.api.services.bigquery.model.JobStatistics) TableReference(com.google.api.services.bigquery.model.TableReference) TableSchema(com.google.api.services.bigquery.model.TableSchema) JobStatistics4(com.google.api.services.bigquery.model.JobStatistics4) TableRow(com.google.api.services.bigquery.model.TableRow)

Aggregations

TableRow (com.google.api.services.bigquery.model.TableRow)73 Test (org.junit.Test)43 TableReference (com.google.api.services.bigquery.model.TableReference)24 TableSchema (com.google.api.services.bigquery.model.TableSchema)18 Pipeline (org.apache.beam.sdk.Pipeline)16 KV (org.apache.beam.sdk.values.KV)15 TableFieldSchema (com.google.api.services.bigquery.model.TableFieldSchema)14 JsonSchemaToTableSchema (org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.JsonSchemaToTableSchema)14 BigQueryHelpers.toJsonString (org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.toJsonString)13 TestPipeline (org.apache.beam.sdk.testing.TestPipeline)12 BigQueryHelpers.createTempTableReference (org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.createTempTableReference)11 Table (com.google.api.services.bigquery.model.Table)10 HashBasedTable (com.google.common.collect.HashBasedTable)10 JobStatus (com.google.api.services.bigquery.model.JobStatus)9 TableDataInsertAllResponse (com.google.api.services.bigquery.model.TableDataInsertAllResponse)8 ArrayList (java.util.ArrayList)8 List (java.util.List)8 Map (java.util.Map)8 ValueInSingleWindow (org.apache.beam.sdk.values.ValueInSingleWindow)7 JobStatistics (com.google.api.services.bigquery.model.JobStatistics)6