use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryUtilsTest method testToTableSchema_row.
@Test
public void testToTableSchema_row() {
TableSchema schema = toTableSchema(ROW_TYPE);
assertThat(schema.getFields().size(), equalTo(1));
TableFieldSchema field = schema.getFields().get(0);
assertThat(field.getName(), equalTo("row"));
assertThat(field.getType(), equalTo(StandardSQLTypeName.STRUCT.toString()));
assertThat(field.getMode(), nullValue());
assertThat(field.getFields(), containsInAnyOrder(ID, VALUE, NAME, TIMESTAMP_VARIANT1, TIMESTAMP_VARIANT2, TIMESTAMP_VARIANT3, TIMESTAMP_VARIANT4, DATETIME, DATETIME_0MS, DATETIME_0S_NS, DATETIME_0S_0NS, DATE, TIME, TIME_0MS, TIME_0S_NS, TIME_0S_0NS, VALID, BINARY, NUMERIC, BOOLEAN, LONG, DOUBLE));
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class NexmarkLauncher method sinkResultsToBigQuery.
/**
* Send {@code formattedResults} to BigQuery.
*/
private void sinkResultsToBigQuery(PCollection<String> formattedResults, long now, String version) {
String tableSpec = NexmarkUtils.tableSpec(options, queryName, now, version);
TableSchema tableSchema = new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("result").setType("STRING"), new TableFieldSchema().setName("records").setMode("REPEATED").setType("RECORD").setFields(ImmutableList.of(new TableFieldSchema().setName("index").setType("INTEGER"), new TableFieldSchema().setName("value").setType("STRING")))));
NexmarkUtils.console("Writing results to BigQuery table %s", tableSpec);
BigQueryIO.Write io = BigQueryIO.write().to(tableSpec).withSchema(tableSchema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND);
formattedResults.apply(queryName + ".StringToTableRow", ParDo.of(new StringToTableRow())).apply(queryName + ".WriteBigQueryResults", io);
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testWriteAvroWithCustomWriter.
@Test
public void testWriteAvroWithCustomWriter() throws Exception {
if (useStorageApi || useStreaming) {
return;
}
SerializableFunction<AvroWriteRequest<InputRecord>, GenericRecord> formatFunction = r -> {
GenericRecord rec = new GenericData.Record(r.getSchema());
InputRecord i = r.getElement();
rec.put("strVal", i.strVal());
rec.put("longVal", i.longVal());
rec.put("doubleVal", i.doubleVal());
rec.put("instantVal", i.instantVal().getMillis() * 1000);
return rec;
};
SerializableFunction<org.apache.avro.Schema, DatumWriter<GenericRecord>> customWriterFactory = s -> new GenericDatumWriter<GenericRecord>() {
@Override
protected void writeString(org.apache.avro.Schema schema, Object datum, Encoder out) throws IOException {
super.writeString(schema, datum.toString() + "_custom", out);
}
};
p.apply(Create.of(InputRecord.create("test", 1, 1.0, Instant.parse("2019-01-01T00:00:00Z")), InputRecord.create("test2", 2, 2.0, Instant.parse("2019-02-01T00:00:00Z"))).withCoder(INPUT_RECORD_CODER)).apply(BigQueryIO.<InputRecord>write().to("dataset-id.table-id").withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("strVal").setType("STRING"), new TableFieldSchema().setName("longVal").setType("INTEGER"), new TableFieldSchema().setName("doubleVal").setType("FLOAT"), new TableFieldSchema().setName("instantVal").setType("TIMESTAMP")))).withTestServices(fakeBqServices).withAvroWriter(formatFunction, customWriterFactory).withoutValidation());
p.run();
assertThat(fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"), containsInAnyOrder(new TableRow().set("strVal", "test_custom").set("longVal", "1").set("doubleVal", 1.0D).set("instantVal", "2019-01-01 00:00:00 UTC"), new TableRow().set("strVal", "test2_custom").set("longVal", "2").set("doubleVal", 2.0D).set("instantVal", "2019-02-01 00:00:00 UTC")));
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testWriteToTableDecorator.
@Test
public void testWriteToTableDecorator() throws Exception {
TableRow row1 = new TableRow().set("name", "a").set("number", "1");
TableRow row2 = new TableRow().set("name", "b").set("number", "2");
// withMethod overrides the pipeline option, so we need to explicitly requiest
// STORAGE_API_WRITES.
BigQueryIO.Write.Method method = useStorageApi ? (useStorageApiApproximate ? Method.STORAGE_API_AT_LEAST_ONCE : Method.STORAGE_WRITE_API) : Method.STREAMING_INSERTS;
TableSchema schema = new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER")));
p.apply(Create.of(row1, row2)).apply(BigQueryIO.writeTableRows().to("project-id:dataset-id.table-id$20171127").withTestServices(fakeBqServices).withMethod(method).withSchema(schema).withoutValidation());
p.run();
}
use of com.google.api.services.bigquery.model.TableFieldSchema in project beam by apache.
the class BigQueryIOWriteTest method testWrongErrorConfigs.
@Test
public void testWrongErrorConfigs() {
if (useStorageApi) {
return;
}
p.enableAutoRunIfMissing(true);
TableRow row1 = new TableRow().set("name", "a").set("number", "1");
BigQueryIO.Write<TableRow> bqIoWrite = BigQueryIO.writeTableRows().to("project-id:dataset-id.table-id").withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS).withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER")))).withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors()).withTestServices(fakeBqServices).withoutValidation();
try {
p.apply("Create1", Create.<TableRow>of(row1)).apply("Write 1", bqIoWrite).getFailedInsertsWithErr();
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("Cannot use getFailedInsertsWithErr as this WriteResult " + "does not use extended errors. Use getFailedInserts instead"));
}
try {
p.apply("Create2", Create.<TableRow>of(row1)).apply("Write2", bqIoWrite.withExtendedErrorInfo()).getFailedInserts();
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("Cannot use getFailedInserts as this WriteResult " + "uses extended errors information. Use getFailedInsertsWithErr instead"));
}
}
Aggregations