use of org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv in project beam by apache.
the class BigtableTableFlatTest method testSelectFlatKeyRegexQuery.
@Test
public void testSelectFlatKeyRegexQuery() {
final String tableId = "regexTable";
createReadTable(tableId, emulatorWrapper);
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new BigtableTableProvider());
sqlEnv.executeDdl(createFlatTableString(tableId, location(tableId)));
String query = "SELECT key FROM regexTable WHERE key LIKE '^key[0134]{1}'";
sqlEnv.parseQuery(query);
PCollection<Row> queryOutput = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(query));
assertThat(queryOutput.getSchema(), equalTo(filterSchema()));
PAssert.that(queryOutput).containsInAnyOrder(filterRow(KEY1));
readPipeline.run().waitUntilFinish();
}
use of org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv in project beam by apache.
the class BigtableTableFlatTest method testSimpleSelectFlat.
@Test
public void testSimpleSelectFlat() {
final String tableId = "flatTable";
createReadTable(tableId, emulatorWrapper);
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new BigtableTableProvider());
sqlEnv.executeDdl(createFlatTableString(tableId, location(tableId)));
String query = "SELECT key, boolColumn, longColumn, stringColumn, doubleColumn FROM flatTable";
sqlEnv.parseQuery(query);
PCollection<Row> queryOutput = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(query));
assertThat(queryOutput.getSchema(), equalTo(TEST_FLAT_SCHEMA));
PAssert.that(queryOutput).containsInAnyOrder(flatRow(KEY1), flatRow(KEY2));
readPipeline.run().waitUntilFinish();
}
use of org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv in project beam by apache.
the class DataStoreReadWriteIT method testReadAllSupportedTypes.
@Test
public void testReadAllSupportedTypes() {
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new DataStoreV1TableProvider());
String projectId = options.getProject();
final Schema expectedSchema = Schema.builder().addNullableField("__key__", VARBINARY).addNullableField("boolean", BOOLEAN).addNullableField("datetime", DATETIME).addNullableField("floatingnumber", DOUBLE).addNullableField("integer", INT64).addNullableField("primitivearray", FieldType.array(STRING)).addNullableField("string", STRING).addNullableField("text", STRING).build();
String createTableStatement = "CREATE EXTERNAL TABLE TEST( \n" + " `__key__` VARBINARY, \n" + " `boolean` BOOLEAN, \n" + " `datetime` TIMESTAMP, \n" + // + " `embeddedentity` ROW(`property1` VARCHAR, `property2` BIGINT), \n"
" `floatingnumber` DOUBLE, \n" + " `integer` BIGINT, \n" + " `primitivearray` ARRAY<VARCHAR>, \n" + " `string` VARCHAR, \n" + " `text` VARCHAR" + ") \n" + "TYPE 'datastoreV1' \n" + "LOCATION '" + projectId + "/" + KIND_ALL_TYPES + "'";
sqlEnv.executeDdl(createTableStatement);
String selectTableStatement = "SELECT * FROM TEST";
PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(selectTableStatement));
assertThat(output.getSchema(), equalTo(expectedSchema));
PipelineResult.State state = readPipeline.run().waitUntilFinish(Duration.standardMinutes(5));
assertThat(state, equalTo(State.DONE));
}
use of org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv in project beam by apache.
the class DataStoreReadWriteIT method testDataStoreV1SqlWriteRead.
@Test
public void testDataStoreV1SqlWriteRead() {
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new DataStoreV1TableProvider());
String projectId = options.getProject();
String createTableStatement = "CREATE EXTERNAL TABLE TEST( \n" + " `__key__` VARBINARY, \n" + " `content` VARCHAR \n" + ") \n" + "TYPE 'datastoreV1' \n" + "LOCATION '" + projectId + "/" + KIND + "'";
sqlEnv.executeDdl(createTableStatement);
Key ancestor = makeKey(KIND, UUID.randomUUID().toString()).build();
Key itemKey = makeKey(ancestor, KIND, UUID.randomUUID().toString()).build();
String insertStatement = "INSERT INTO TEST VALUES ( \n" + keyToSqlByteString(itemKey) + ", \n" + "'2000' \n" + ")";
BeamSqlRelUtils.toPCollection(writePipeline, sqlEnv.parseQuery(insertStatement));
writePipeline.run().waitUntilFinish();
String selectTableStatement = "SELECT * FROM TEST";
PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(selectTableStatement));
assertThat(output.getSchema(), equalTo(SOURCE_SCHEMA));
PipelineResult.State state = readPipeline.run().waitUntilFinish(Duration.standardMinutes(5));
assertThat(state, equalTo(State.DONE));
}
use of org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv in project beam by apache.
the class KafkaTableProviderIT method testFakeNested.
@Test
public void testFakeNested() throws InterruptedException {
Assume.assumeFalse(topic.equals("csv_topic"));
pipeline.getOptions().as(DirectOptions.class).setBlockOnRun(false);
String createTableString = String.format("CREATE EXTERNAL TABLE kafka_table(\n" + "headers ARRAY<ROW<key VARCHAR, `values` ARRAY<VARBINARY>>>," + "payload ROW<" + "f_long BIGINT NOT NULL, \n" + "f_int INTEGER NOT NULL, \n" + "f_string VARCHAR NOT NULL \n" + ">" + ") \n" + "TYPE 'kafka' \n" + "LOCATION '%s'\n" + "TBLPROPERTIES '%s'", buildLocation(), objectsProvider.getKafkaPropertiesString());
TableProvider tb = new KafkaTableProvider();
BeamSqlEnv env = BeamSqlEnv.inMemory(tb);
env.executeDdl(createTableString);
PCollection<Row> queryOutput = BeamSqlRelUtils.toPCollection(pipeline, env.parseQuery("SELECT kafka_table.payload.f_long, kafka_table.payload.f_int, kafka_table.payload.f_string FROM kafka_table"));
queryOutput.apply(ParDo.of(new FakeKvPair())).setCoder(KvCoder.of(StringUtf8Coder.of(), RowCoder.of(TEST_TABLE_SCHEMA))).apply("waitForSuccess", ParDo.of(new StreamAssertEqual(ImmutableSet.of(generateRow(0), generateRow(1), generateRow(2)))));
queryOutput.apply(logRecords(""));
pipeline.run();
TimeUnit.SECONDS.sleep(4);
produceSomeRecords(3);
for (int i = 0; i < 200; i++) {
if (FLAG.getOrDefault(pipeline.getOptions().getOptionsId(), false)) {
return;
}
TimeUnit.MILLISECONDS.sleep(90);
}
Assert.fail();
}
Aggregations