use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class FakeJobService method writeRows.
private long writeRows(String tableId, List<TableRow> rows, TableSchema schema, String destinationPattern) throws IOException {
Schema avroSchema = BigQueryAvroUtils.toGenericAvroSchema(tableId, schema.getFields());
List<TableRow> rowsToWrite = Lists.newArrayList();
int shard = 0;
for (int i = 0; i < rows.size(); ++i) {
rowsToWrite.add(rows.get(i));
if (rowsToWrite.size() == 5) {
writeRowsHelper(rowsToWrite, avroSchema, destinationPattern, shard++);
rowsToWrite.clear();
}
}
if (!rowsToWrite.isEmpty()) {
writeRowsHelper(rowsToWrite, avroSchema, destinationPattern, shard++);
}
return shard;
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class BigQueryUtilTest method testReadWithTime.
@Test
public void testReadWithTime() throws IOException, InterruptedException {
// The BigQuery JSON API returns timestamps in the following format: floating-point seconds
// since epoch (UTC) with microsecond precision. Test that we faithfully preserve a set of
// known values.
TableDataList input = rawDataList(rawRow("1.430397296789E9"), rawRow("1.45206228E9"), rawRow("1.452062291E9"), rawRow("1.4520622911E9"), rawRow("1.45206229112E9"), rawRow("1.452062291123E9"), rawRow("1.4520622911234E9"), rawRow("1.45206229112345E9"), rawRow("1.452062291123456E9"));
onTableGet(basicTableSchemaWithTime());
onTableList(input);
// Known results verified from BigQuery's export to JSON on GCS API.
List<String> expected = ImmutableList.of("2015-04-30 12:34:56.789 UTC", "2016-01-06 06:38:00 UTC", "2016-01-06 06:38:11 UTC", "2016-01-06 06:38:11.1 UTC", "2016-01-06 06:38:11.12 UTC", "2016-01-06 06:38:11.123 UTC", "2016-01-06 06:38:11.1234 UTC", "2016-01-06 06:38:11.12345 UTC", "2016-01-06 06:38:11.123456 UTC");
// Download the rows, verify the interactions.
List<TableRow> rows = new ArrayList<>();
try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromTable(BigQueryHelpers.parseTableSpec("project:dataset.table"), mockClient)) {
iterator.open();
while (iterator.advance()) {
rows.add(iterator.getCurrent());
}
}
verifyTableGet();
verifyTabledataList();
// Verify the timestamp converted as desired.
assertEquals("Expected input and output rows to have the same size", expected.size(), rows.size());
for (int i = 0; i < expected.size(); ++i) {
assertEquals("i=" + i, expected.get(i), rows.get(i).get("time"));
}
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class BigQueryTornadoesTest method testNoTornadoes.
@Test
public void testNoTornadoes() throws Exception {
TableRow row = new TableRow().set("month", 6).set("tornado", false);
DoFnTester<TableRow, Integer> extractWordsFn = DoFnTester.of(new ExtractTornadoesFn());
Assert.assertTrue(extractWordsFn.processBundle(row).isEmpty());
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class JoinExamplesTest method testExtractCountryInfoFn.
@Test
public void testExtractCountryInfoFn() throws Exception {
DoFnTester<TableRow, KV<String, String>> extractCountryInfoFn = DoFnTester.of(new ExtractCountryInfoFn());
List<KV<String, String>> results = extractCountryInfoFn.processBundle(CCS);
Assert.assertThat(results, CoreMatchers.hasItem(kv3));
Assert.assertThat(results, CoreMatchers.hasItem(kv4));
}
use of com.google.api.services.bigquery.model.TableRow in project beam by apache.
the class TriggerExampleTest method testTotalFlow.
@Test
@Category(ValidatesRunner.class)
public void testTotalFlow() {
PCollection<KV<String, Integer>> flow = pipeline.apply(Create.timestamped(TIME_STAMPED_INPUT)).apply(ParDo.of(new ExtractFlowInfo()));
PCollection<TableRow> totalFlow = flow.apply(Window.<KV<String, Integer>>into(FixedWindows.of(Duration.standardMinutes(1)))).apply(new TotalFlow("default"));
PCollection<String> results = totalFlow.apply(ParDo.of(new FormatResults()));
PAssert.that(results).containsInAnyOrder(canonicalFormat(OUT_ROW_1), canonicalFormat(OUT_ROW_2));
pipeline.run().waitUntilFinish();
}
Aggregations