use of org.apache.flink.table.api.TableResult in project flink by apache.
the class FunctionITCase method testSpecializedFunction.
@Test
public void testSpecializedFunction() {
final List<Row> sourceData = Arrays.asList(Row.of("Bob", 1, new BigDecimal("123.45")), Row.of("Alice", 2, new BigDecimal("123.456")));
TestCollectionTableFactory.reset();
TestCollectionTableFactory.initData(sourceData);
tEnv().executeSql("CREATE TABLE SourceTable(" + " s STRING, " + " i INT," + " d DECIMAL(6, 3)" + ")" + "WITH (" + " 'connector' = 'COLLECTION'" + ")");
tEnv().createTemporarySystemFunction("TypeOfScalarFunction", TypeOfScalarFunction.class);
final TableResult result = tEnv().executeSql("SELECT " + " TypeOfScalarFunction('LITERAL'), " + " TypeOfScalarFunction(s), " + " TypeOfScalarFunction(i), " + " TypeOfScalarFunction(d) " + "FROM SourceTable");
final List<Row> actual = CollectionUtil.iteratorToList(result.collect());
final List<Row> expected = Arrays.asList(Row.of("CHAR(7) NOT NULL", "STRING", "INT", "DECIMAL(6, 3)"), Row.of("CHAR(7) NOT NULL", "STRING", "INT", "DECIMAL(6, 3)"));
assertThat(actual, equalTo(expected));
}
use of org.apache.flink.table.api.TableResult in project flink by apache.
the class FunctionITCase method testVarArgScalarFunction.
@Test
public void testVarArgScalarFunction() throws Exception {
final List<Row> sourceData = Arrays.asList(Row.of("Bob", 1), Row.of("Alice", 2));
TestCollectionTableFactory.reset();
TestCollectionTableFactory.initData(sourceData);
tEnv().executeSql("CREATE TABLE SourceTable(" + " s STRING, " + " i INT" + ")" + "WITH (" + " 'connector' = 'COLLECTION'" + ")");
tEnv().createTemporarySystemFunction("VarArgScalarFunction", VarArgScalarFunction.class);
final TableResult result = tEnv().executeSql("SELECT " + " VarArgScalarFunction(), " + " VarArgScalarFunction(i), " + " VarArgScalarFunction(i, i), " + " VarArgScalarFunction(s), " + " VarArgScalarFunction(s, i) " + "FROM SourceTable");
final List<Row> actual = CollectionUtil.iteratorToList(result.collect());
final List<Row> expected = Arrays.asList(Row.of("(INT...)", "(INT...)", "(INT...)", "(STRING, INT...)", "(STRING, INT...)"), Row.of("(INT...)", "(INT...)", "(INT...)", "(STRING, INT...)", "(STRING, INT...)"));
assertThat(actual, equalTo(expected));
}
use of org.apache.flink.table.api.TableResult in project flink by apache.
the class BuiltInFunctionTestBase method testResult.
// --------------------------------------------------------------------------------------------
// Test utilities
// --------------------------------------------------------------------------------------------
private static void testResult(DataTypeFactory dataTypeFactory, TableEnvironment env, Table inputTable, ResultTestItem<?> testItem) {
final Table resultTable = testItem.query(env, inputTable);
final List<DataType> expectedDataTypes = createDataTypes(dataTypeFactory, testItem.dataTypes);
final TableResult result = resultTable.execute();
final Iterator<Row> iterator = result.collect();
assertThat(iterator).hasNext();
final Row row = iterator.next();
assertThat(iterator).as("No more rows expected.").isExhausted();
for (int i = 0; i < row.getArity(); i++) {
assertThat(result.getResolvedSchema().getColumnDataTypes().get(i).getLogicalType()).as("Logical type for spec [%d] of test [%s] doesn't match.", i, testItem).isEqualTo(expectedDataTypes.get(i).getLogicalType());
assertThat(Row.of(row.getField(i))).as("Result for spec [%d] of test [%s] doesn't match.", i, testItem).isEqualTo(// byte[]
Row.of(testItem.results.get(i)));
}
}
use of org.apache.flink.table.api.TableResult in project flink by apache.
the class BuiltInFunctionTestBase method testError.
private static void testError(TableEnvironment env, Table inputTable, ErrorTestItem<?> testItem) {
AtomicReference<TableResult> tableResult = new AtomicReference<>();
Throwable t = catchThrowable(() -> tableResult.set(testItem.query(env, inputTable).execute()));
if (testItem.expectedDuringValidation) {
assertThat(t).as("Expected a validation exception").isNotNull().satisfies(testItem.errorMatcher());
return;
} else {
assertThat(t).as("Error while validating the query").isNull();
}
assertThatThrownBy(() -> tableResult.get().await()).isNotNull().satisfies(testItem.errorMatcher());
}
Aggregations