use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.
// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
StringBuilder buffer = new StringBuilder("create table t1(\n");
for (int i = 0; i < testItems.size(); i++) {
buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
if (i == testItems.size() - 1) {
buffer.append(")");
} else {
buffer.append(",\n");
}
}
final String sql = buffer.toString();
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testSqlExecuteWithSelect.
@Test
public void testSqlExecuteWithSelect() {
final String sql = "execute select a, b, c, d from t2 where a > 1";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(QueryOperation.class);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateViewWithMatchRecognize.
@Test
public void testCreateViewWithMatchRecognize() {
Map<String, String> prop = new HashMap<>();
prop.put("connector", "values");
prop.put("bounded", "true");
CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("id", DataTypes.INT().notNull()).column("measurement", DataTypes.BIGINT().notNull()).column("ts", DataTypes.ROW(DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))).build(), null, Collections.emptyList(), prop);
catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "events"), false);
final String sql = "" + "CREATE TEMPORARY VIEW foo AS " + "SELECT * " + "FROM events MATCH_RECOGNIZE (" + " PARTITION BY id " + " ORDER BY ts ASC " + " MEASURES " + " next_step.measurement - this_step.measurement AS diff " + " AFTER MATCH SKIP TO NEXT ROW " + " PATTERN (this_step next_step)" + " DEFINE " + " this_step AS TRUE," + " next_step AS TRUE" + ")";
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(CreateViewOperation.class);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testAlterDatabase.
@Test
public void testAlterDatabase() throws Exception {
catalogManager.registerCatalog("cat1", new GenericInMemoryCatalog("default", "default"));
catalogManager.getCatalog("cat1").get().createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), "db1_comment"), true);
final String sql = "alter database cat1.db1 set ('k1'='v1', 'K2'='V2')";
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(AlterDatabaseOperation.class);
Map<String, String> properties = new HashMap<>();
properties.put("k1", "v1");
properties.put("K2", "V2");
AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
assertThat(alterDatabaseOperation.getDatabaseName()).isEqualTo("db1");
assertThat(alterDatabaseOperation.getCatalogName()).isEqualTo("cat1");
assertThat(alterDatabaseOperation.getCatalogDatabase().getComment()).isEqualTo("db1_comment");
assertThat(alterDatabaseOperation.getCatalogDatabase().getProperties()).isEqualTo(properties);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testUseDatabase.
@Test
public void testUseDatabase() {
final String sql1 = "USE db1";
Operation operation1 = parse(sql1, SqlDialect.DEFAULT);
assertThat(operation1).isInstanceOf(UseDatabaseOperation.class);
assertThat(((UseDatabaseOperation) operation1).getCatalogName()).isEqualTo("builtin");
assertThat(((UseDatabaseOperation) operation1).getDatabaseName()).isEqualTo("db1");
final String sql2 = "USE cat1.db1";
Operation operation2 = parse(sql2, SqlDialect.DEFAULT);
assertThat(operation2).isInstanceOf(UseDatabaseOperation.class);
assertThat(((UseDatabaseOperation) operation2).getCatalogName()).isEqualTo("cat1");
assertThat(((UseDatabaseOperation) operation2).getDatabaseName()).isEqualTo("db1");
}
Aggregations