use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testUntriggeredFileLoadsWithTempTables.
@Test
public void testUntriggeredFileLoadsWithTempTables() throws Exception {
// Test only non-streaming inserts.
if (useStorageApi || useStreaming) {
return;
}
List<TableRow> elements = Lists.newArrayList();
for (int i = 0; i < 30; ++i) {
elements.add(new TableRow().set("number", i));
}
p.apply(Create.of(elements)).apply(BigQueryIO.writeTableRows().to("project-id:dataset-id.table-id").withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("number").setType("INTEGER")))).withTestServices(fakeBqServices).withMaxBytesPerPartition(1).withMaxFilesPerPartition(1).withoutValidation());
p.run();
assertThat(fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"), containsInAnyOrder(Iterables.toArray(elements, TableRow.class)));
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testWrite.
@Test
public void testWrite() throws Exception {
p.apply(Create.of(new TableRow().set("name", "a").set("number", 1), new TableRow().set("name", "b").set("number", 2), new TableRow().set("name", "c").set("number", 3)).withCoder(TableRowJsonCoder.of())).apply(BigQueryIO.writeTableRows().to("dataset-id.table-id").withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER")))).withTestServices(fakeBqServices).withoutValidation());
p.run();
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testTimePartitioningClustering.
void testTimePartitioningClustering(BigQueryIO.Write.Method insertMethod, boolean enablePartitioning, boolean enableClustering) throws Exception {
TableRow row1 = new TableRow().set("date", "2018-01-01").set("number", "1");
TableRow row2 = new TableRow().set("date", "2018-01-02").set("number", "2");
TimePartitioning timePartitioning = new TimePartitioning().setType("DAY").setField("date");
Clustering clustering = new Clustering().setFields(ImmutableList.of("date"));
TableSchema schema = new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("date").setType("DATE"), new TableFieldSchema().setName("number").setType("INTEGER")));
Write<TableRow> writeTransform = BigQueryIO.writeTableRows().to("project-id:dataset-id.table-id").withTestServices(fakeBqServices).withMethod(insertMethod).withSchema(schema).withoutValidation();
if (enablePartitioning) {
writeTransform = writeTransform.withTimePartitioning(timePartitioning);
}
if (enableClustering) {
writeTransform = writeTransform.withClustering(clustering);
}
p.apply(Create.of(row1, row2)).apply(writeTransform);
p.run();
Table table = fakeDatasetService.getTable(BigQueryHelpers.parseTableSpec("project-id:dataset-id.table-id"));
assertEquals(schema, table.getSchema());
if (enablePartitioning) {
assertEquals(timePartitioning, table.getTimePartitioning());
}
if (enableClustering) {
assertEquals(clustering, table.getClustering());
}
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testWriteWithSuccessfulBatchInserts.
@Test
public void testWriteWithSuccessfulBatchInserts() throws Exception {
if (useStreaming || useStorageApi) {
return;
}
WriteResult result = p.apply(Create.of(new TableRow().set("name", "a").set("number", 1), new TableRow().set("name", "b").set("number", 2), new TableRow().set("name", "c").set("number", 3)).withCoder(TableRowJsonCoder.of())).apply(BigQueryIO.writeTableRows().to("dataset-id.table-id").withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER")))).withTestServices(fakeBqServices).withoutValidation());
PAssert.that(result.getSuccessfulTableLoads()).containsInAnyOrder(new TableDestination("project-id:dataset-id.table-id", null));
p.run();
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method schemaUpdateOptionsTest.
void schemaUpdateOptionsTest(BigQueryIO.Write.Method insertMethod, Set<SchemaUpdateOption> schemaUpdateOptions) throws Exception {
TableRow row = new TableRow().set("date", "2019-01-01").set("number", "1");
TableSchema schema = new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("date").setType("DATE").setName("number").setType("INTEGER")));
Write<TableRow> writeTransform = BigQueryIO.writeTableRows().to("project-id:dataset-id.table-id").withTestServices(fakeBqServices).withMethod(insertMethod).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND).withSchemaUpdateOptions(schemaUpdateOptions);
p.apply("Create" + insertMethod, Create.<TableRow>of(row)).apply("Write" + insertMethod, writeTransform);
p.run();
List<String> expectedOptions = schemaUpdateOptions.stream().map(SchemaUpdateOption::name).collect(Collectors.toList());
for (Job job : fakeJobService.getAllJobs()) {
JobConfigurationLoad configuration = job.getConfiguration().getLoad();
assertEquals(expectedOptions, configuration.getSchemaUpdateOptions());
}
}
Aggregations