use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlJavaUdfTest method testUdaf.
@Test
public void testUdaf() {
String sql = String.format("CREATE AGGREGATE FUNCTION my_sum(f INT64) RETURNS INT64 LANGUAGE java OPTIONS (path='%s'); " + "SELECT my_sum(f_int_1) from aggregate_test_table", jarPath);
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema singleField = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(singleField).addValues(28L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlJavaUdfTypeTest method runUdfTypeTest.
private void runUdfTypeTest(String query, Object result, Schema.FieldType fieldType) {
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(query);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema outputSchema = Schema.builder().addField("res", fieldType).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(outputSchema).addValues(result).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlNativeUdfTest method testUnaryUdf.
@Test
public void testUnaryUdf() {
String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x); SELECT triple(triple(1));";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder(Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class ZetaSqlNativeUdfTest method testUDTVF.
@Test
public void testUDTVF() {
String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM KeyValue\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM CustomerRange(10, 14)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema singleField = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(singleField).addValues(14L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
use of org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode in project beam by apache.
the class StreamingSqlTest method runHopWindow.
@Test
public void runHopWindow() throws Exception {
String sql = "SELECT f_long, COUNT(*) AS `getFieldCount`," + " `window_start`, " + " `window_end` " + " FROM HOP((select * from streaming_sql_test_table_a), descriptor(f_timestamp), " + " \"INTERVAL 30 MINUTE\", \"INTERVAL 1 HOUR\")" + " GROUP BY f_long, window_start, window_end";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("f_long").addInt64Field("size").addDateTimeField("window_start").addDateTimeField("window_end").build();
List<Row> expectedRows = Arrays.asList(Row.withSchema(resultType).addValues(1000L, 3L, parseTimestampWithUTCTimeZone("2017-01-01 00:30:00"), parseTimestampWithUTCTimeZone("2017-01-01 01:30:00")).build(), Row.withSchema(resultType).addValues(1000L, 3L, parseTimestampWithUTCTimeZone("2017-01-01 01:00:00"), parseTimestampWithUTCTimeZone("2017-01-01 02:00:00")).build(), Row.withSchema(resultType).addValues(4000L, 1L, parseTimestampWithUTCTimeZone("2017-01-01 01:30:00"), parseTimestampWithUTCTimeZone("2017-01-01 02:30:00")).build(), Row.withSchema(resultType).addValues(4000L, 1L, parseTimestampWithUTCTimeZone("2017-01-01 02:00:00"), parseTimestampWithUTCTimeZone("2017-01-01 03:00:00")).build());
PAssert.that(stream).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
Aggregations