use of org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable in project beam by apache.
the class PubsubLiteTableProviderTest method validTopicTables.
@Test
public void validTopicTables() {
BeamSqlTable basic = makeTable(FULL_WRITE_SCHEMA, example(TopicPath.class).toString(), ImmutableMap.of());
assertTrue(basic instanceof PubsubLiteTopicTable);
BeamSqlTable row = makeTable(Schema.builder().addRowField(RowHandler.PAYLOAD_FIELD, Schema.builder().addStringField("abc").build()).build(), example(TopicPath.class).toString(), // Defaults to json
ImmutableMap.of("format", "json"));
assertTrue(row instanceof PubsubLiteTopicTable);
BeamSqlTable dlq = makeTable(Schema.builder().addRowField(RowHandler.PAYLOAD_FIELD, Schema.builder().addStringField("abc").build()).build(), example(TopicPath.class).toString(), ImmutableMap.of("deadLetterQueue", // Defaults to json
"pubsub:projects/abc/topics/def"));
assertTrue(dlq instanceof PubsubLiteTopicTable);
}
use of org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable in project beam by apache.
the class PubsubLiteTableProviderTest method subscriptionTableCannotWrite.
@Test
@SuppressWarnings("argument.type.incompatible")
public void subscriptionTableCannotWrite() {
BeamSqlTable basic = makeTable(FULL_READ_SCHEMA, example(SubscriptionPath.class).toString(), ImmutableMap.of());
assertThrows(UnsupportedOperationException.class, () -> basic.buildIOWriter(null));
}
use of org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable in project beam by apache.
the class BigQueryRowCountIT method testNonEmptyTable.
@Test
public void testNonEmptyTable() {
BigQueryTableProvider provider = new BigQueryTableProvider();
Table table = getTable("testTable", bigQuery.tableSpec());
pipeline.apply(Create.of(new TableRow().set("id", 1).set("name", "name1"), new TableRow().set("id", 2).set("name", "name2"), new TableRow().set("id", 3).set("name", "name3")).withCoder(TableRowJsonCoder.of())).apply(BigQueryIO.writeTableRows().to(bigQuery.tableSpec()).withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("id").setType("INTEGER"), new TableFieldSchema().setName("name").setType("STRING")))).withoutValidation());
pipeline.run().waitUntilFinish();
BeamSqlTable sqlTable = provider.buildBeamSqlTable(table);
BeamTableStatistics size1 = sqlTable.getTableStatistics(TestPipeline.testingPipelineOptions());
assertNotNull(size1);
assertEquals(3d, size1.getRowCount(), 0.1);
}
use of org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable in project beam by apache.
the class BigQueryRowCountIT method testFakeTable.
@Test
public void testFakeTable() {
BigQueryTableProvider provider = new BigQueryTableProvider();
Table table = getTable("fakeTable", "project:dataset.table");
BeamSqlTable sqlTable = provider.buildBeamSqlTable(table);
BeamTableStatistics size = sqlTable.getTableStatistics(TestPipeline.testingPipelineOptions());
assertTrue(size.isUnknown());
}
use of org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable in project beam by apache.
the class SchemaIOTableProviderWrapperTest method testBuildIOReader_withProjectionPushdown.
@Test
public void testBuildIOReader_withProjectionPushdown() {
TestSchemaIOTableProviderWrapper provider = new TestSchemaIOTableProviderWrapper();
BeamSqlTable beamSqlTable = provider.buildBeamSqlTable(testTable);
PCollection<Row> result = beamSqlTable.buildIOReader(pipeline.begin(), new DefaultTableFilter(ImmutableList.of()), ImmutableList.of("f_long"));
Schema outputSchema = Schema.builder().addInt64Field("f_long").build();
PAssert.that(result).containsInAnyOrder(Row.withSchema(outputSchema).addValues(0L).build(), Row.withSchema(outputSchema).addValues(1L).build());
pipeline.run();
}
Aggregations