use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class BigQueryIOTest method testCoder_nullCell.
// Test that BigQuery's special null placeholder objects can be encoded.
@Test
public void testCoder_nullCell() throws CoderException {
TableRow row = new TableRow();
row.set("temperature", Data.nullOf(Object.class));
row.set("max_temperature", Data.nullOf(Object.class));
byte[] bytes = CoderUtils.encodeToByteArray(TableRowJsonCoder.of(), row);
TableRow newRow = CoderUtils.decodeFromByteArray(TableRowJsonCoder.of(), bytes);
byte[] newBytes = CoderUtils.encodeToByteArray(TableRowJsonCoder.of(), newRow);
Assert.assertArrayEquals(bytes, newBytes);
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class BigQueryIOTest method testRuntimeOptionsNotCalledInApplyOutput.
@Test
public void testRuntimeOptionsNotCalledInApplyOutput() {
RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class);
BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
bqOptions.setTempLocation("gs://testbucket/testdir");
Pipeline pipeline = TestPipeline.create(options);
BigQueryIO.Write<TableRow> write = BigQueryIO.writeTableRows().to(options.getOutputTable()).withSchema(NestedValueProvider.of(options.getOutputSchema(), new JsonSchemaToTableSchema())).withoutValidation();
pipeline.apply(Create.empty(TableRowJsonCoder.of())).apply(write);
// Test that this doesn't throw.
DisplayData.from(write);
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class BigQueryUtilTest method testRead.
@Test
public void testRead() throws IOException, InterruptedException {
onTableGet(basicTableSchema());
TableDataList dataList = rawDataList(rawRow("Arthur", 42));
onTableList(dataList);
try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromTable(BigQueryHelpers.parseTableSpec("project:dataset.table"), mockClient)) {
iterator.open();
Assert.assertTrue(iterator.advance());
TableRow row = iterator.getCurrent();
Assert.assertTrue(row.containsKey("name"));
Assert.assertTrue(row.containsKey("answer"));
Assert.assertEquals("Arthur", row.get("name"));
Assert.assertEquals(42, row.get("answer"));
Assert.assertFalse(iterator.advance());
verifyTableGet();
verifyTabledataList();
}
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeJobService method runCopyJob.
private JobStatus runCopyJob(JobConfigurationTableCopy copy) throws InterruptedException, IOException {
List<TableReference> sources = copy.getSourceTables();
TableReference destination = copy.getDestinationTable();
WriteDisposition writeDisposition = WriteDisposition.valueOf(copy.getWriteDisposition());
CreateDisposition createDisposition = CreateDisposition.valueOf(copy.getCreateDisposition());
Table existingTable = datasetService.getTable(destination);
if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
}
List<TableRow> allRows = Lists.newArrayList();
for (TableReference source : sources) {
allRows.addAll(datasetService.getAllRows(source.getProjectId(), source.getDatasetId(), source.getTableId()));
}
datasetService.createTable(new Table().setTableReference(destination));
datasetService.insertAll(destination, allRows, null);
return new JobStatus().setState("DONE");
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeJobService method runExtractJob.
private JobStatus runExtractJob(Job job, JobConfigurationExtract extract) throws InterruptedException, IOException {
TableReference sourceTable = extract.getSourceTable();
List<TableRow> rows = datasetService.getAllRows(sourceTable.getProjectId(), sourceTable.getDatasetId(), sourceTable.getTableId());
TableSchema schema = datasetService.getTable(sourceTable).getSchema();
List<Long> destinationFileCounts = Lists.newArrayList();
for (String destination : extract.getDestinationUris()) {
destinationFileCounts.add(writeRows(sourceTable.getTableId(), rows, schema, destination));
}
job.setStatistics(new JobStatistics().setExtract(new JobStatistics4().setDestinationUriFileCounts(destinationFileCounts)));
return new JobStatus().setState("DONE");
}
Aggregations