use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlDialectSpecTest method testUNNESTExpression.
@Test
@Ignore("[BEAM-9515] ArrayScanToUncollectConverter Unnest does not support sub-queries")
public void testUNNESTExpression() {
String sql = "SELECT * FROM UNNEST(ARRAY(SELECT Value FROM KeyValue));";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("KeyValue234").build(), Row.withSchema(schema).addValues("KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlDialectSpecTest method testSubstrWithLargeValueExpectException.
@Test
public void testSubstrWithLargeValueExpectException() {
String sql = "SELECT substr(@p0, @p1, @p2)";
ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("abc"), "p1", Value.createInt64Value(Integer.MAX_VALUE + 1L), "p2", Value.createInt64Value(Integer.MIN_VALUE - 1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
thrown.expect(RuntimeException.class);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlDialectSpecTest method testArrayStructLiteral.
@Test
public void testArrayStructLiteral() {
String sql = "SELECT ARRAY<STRUCT<INT64, INT64>>[(11, 12)];";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema = Schema.of(Field.of("s", FieldType.INT64), Field.of("i", FieldType.INT64));
final Schema schema = Schema.of(Field.of("field1", FieldType.array(FieldType.row(innerSchema))));
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of(Row.withSchema(innerSchema).addValues(11L, 12L).build())).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlDialectSpecTest method execute.
private PCollection<Row> execute(String sql, QueryParameters params) {
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
return BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlDialectSpecTest method testZetaSQLFullOuterJoinFalse.
@Test
public void testZetaSQLFullOuterJoinFalse() {
String sql = "SELECT * FROM KeyValue AS t1 FULL JOIN BigTable AS t2 ON false";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
thrown.expect(UnsupportedOperationException.class);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
}
Aggregations