Search in sources :

Example 11 with FlinkPlannerImpl

use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithComputedColumn.

@Test
public void testCreateTableWithComputedColumn() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a int,\n" + "  b varchar, \n" + "  c as a - 1, \n" + "  d as b || '$$', \n" + "  e as my_udf1(a)," + "  f as `default`.my_udf2(a) + 1," + "  g as builtin.`default`.my_udf3(a) || '##'\n" + ")\n" + "  with (\n" + "    'connector' = 'kafka', \n" + "    'kafka.topic' = 'log.test'\n" + ")\n";
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$);
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d", "e", "f", "g" });
    assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() });
    String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream().filter(ComputedColumn.class::isInstance).map(ComputedColumn.class::cast).map(ComputedColumn::getExpression).toArray(String[]::new);
    String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || '##'" };
    assertThat(columnExpressions).isEqualTo(expected);
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 12 with FlinkPlannerImpl

use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.

the class ParserImpl method parse.

/**
 * When parsing statement, it first uses {@link ExtendedParser} to parse statements. If {@link
 * ExtendedParser} fails to parse statement, it uses the {@link CalciteParser} to parse
 * statements.
 *
 * @param statement input statement.
 * @return parsed operations.
 */
@Override
public List<Operation> parse(String statement) {
    CalciteParser parser = calciteParserSupplier.get();
    FlinkPlannerImpl planner = validatorSupplier.get();
    Optional<Operation> command = EXTENDED_PARSER.parse(statement);
    if (command.isPresent()) {
        return Collections.singletonList(command.get());
    }
    // parse the sql query
    // use parseSqlList here because we need to support statement end with ';' in sql client.
    SqlNodeList sqlNodeList = parser.parseSqlList(statement);
    List<SqlNode> parsed = sqlNodeList.getList();
    Preconditions.checkArgument(parsed.size() == 1, "only single statement supported");
    return Collections.singletonList(SqlToOperationConverter.convert(planner, catalogManager, parsed.get(0)).orElseThrow(() -> new TableException("Unsupported query: " + statement)));
}
Also used : TableException(org.apache.flink.table.api.TableException) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) SqlNodeList(org.apache.calcite.sql.SqlNodeList) Operation(org.apache.flink.table.operations.Operation) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode)

Example 13 with FlinkPlannerImpl

use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.

// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
    final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
    createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
    StringBuilder buffer = new StringBuilder("create table t1(\n");
    for (int i = 0; i < testItems.size(); i++) {
        buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
        if (i == testItems.size() - 1) {
            buffer.append(")");
        } else {
            buffer.append(",\n");
        }
    }
    final String sql = buffer.toString();
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    SqlNode node = parser.parse(sql);
    assertThat(node).isInstanceOf(SqlCreateTable.class);
    Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
    TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
    Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
    assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
Also used : FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ExplainDetail(org.apache.flink.table.api.ExplainDetail) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) Map(java.util.Map) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ModuleManager(org.apache.flink.table.module.ModuleManager) TableConfig(org.apache.flink.table.api.TableConfig) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) JavaUserDefinedScalarFunctions(org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ExpressionResolverMocks(org.apache.flink.table.utils.ExpressionResolverMocks) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ParserImpl(org.apache.flink.table.planner.delegation.ParserImpl) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Func8$(org.apache.flink.table.planner.expressions.utils.Func8$) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) Func0$(org.apache.flink.table.planner.expressions.utils.Func0$) Supplier(java.util.function.Supplier) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) TableColumn(org.apache.flink.table.api.TableColumn) Catalog(org.apache.flink.table.catalog.Catalog) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) Nullable(javax.annotation.Nullable) Before(org.junit.Before) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) FunctionScope(org.apache.flink.table.operations.ShowFunctionsOperation.FunctionScope) Func1$(org.apache.flink.table.planner.expressions.utils.Func1$) Test(org.junit.Test) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) TreeMap(java.util.TreeMap) CatalogManagerMocks(org.apache.flink.table.utils.CatalogManagerMocks) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) OperationMatchers.partitionedBy(org.apache.flink.table.planner.utils.OperationMatchers.partitionedBy) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) SetOperation(org.apache.flink.table.operations.command.SetOperation) After(org.junit.After) ExecutionOptions(org.apache.flink.configuration.ExecutionOptions) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) InstanceOfAssertFactories.type(org.assertj.core.api.InstanceOfAssertFactories.type) Collectors(java.util.stream.Collectors) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) List(java.util.List) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SqlRichExplain(org.apache.flink.sql.parser.dql.SqlRichExplain) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) ValidationException(org.apache.flink.table.api.ValidationException) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HamcrestCondition(org.assertj.core.api.HamcrestCondition) OperationMatchers.entry(org.apache.flink.table.planner.utils.OperationMatchers.entry) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) HashMap(java.util.HashMap) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) OperationMatchers.withOptions(org.apache.flink.table.planner.utils.OperationMatchers.withOptions) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) HashSet(java.util.HashSet) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Configuration(org.apache.flink.configuration.Configuration) Parser(org.apache.flink.table.delegation.Parser) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DataTypes(org.apache.flink.table.api.DataTypes) SqlDialect(org.apache.flink.table.api.SqlDialect) RuntimeExecutionMode(org.apache.flink.api.common.RuntimeExecutionMode) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 14 with FlinkPlannerImpl

use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.

the class SqlToOperationConverterTest method testSqlExecuteWithSelect.

@Test
public void testSqlExecuteWithSelect() {
    final String sql = "execute select a, b, c, d from t2 where a > 1";
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, parser);
    assertThat(operation).isInstanceOf(QueryOperation.class);
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) Test(org.junit.Test)

Example 15 with FlinkPlannerImpl

use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.

the class SqlToOperationConverterTest method testExplainDetailsWithInsert.

@Test
public void testExplainDetailsWithInsert() {
    final String sql = "explain estimated_cost, changelog_mode insert into t1 select a, b, c, d from t2";
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    assertExplainDetails(parse(sql, planner, parser));
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) Test(org.junit.Test)

Aggregations

FlinkPlannerImpl (org.apache.flink.table.planner.calcite.FlinkPlannerImpl)25 CalciteParser (org.apache.flink.table.planner.parse.CalciteParser)23 Test (org.junit.Test)21 Operation (org.apache.flink.table.operations.Operation)17 BeginStatementSetOperation (org.apache.flink.table.operations.BeginStatementSetOperation)16 EndStatementSetOperation (org.apache.flink.table.operations.EndStatementSetOperation)16 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)16 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)16 QueryOperation (org.apache.flink.table.operations.QueryOperation)16 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)16 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)16 SinkModifyOperation (org.apache.flink.table.operations.SinkModifyOperation)16 SourceQueryOperation (org.apache.flink.table.operations.SourceQueryOperation)16 StatementSetOperation (org.apache.flink.table.operations.StatementSetOperation)16 UnloadModuleOperation (org.apache.flink.table.operations.UnloadModuleOperation)16 UseCatalogOperation (org.apache.flink.table.operations.UseCatalogOperation)16 UseDatabaseOperation (org.apache.flink.table.operations.UseDatabaseOperation)16 UseModulesOperation (org.apache.flink.table.operations.UseModulesOperation)16 AddJarOperation (org.apache.flink.table.operations.command.AddJarOperation)16 RemoveJarOperation (org.apache.flink.table.operations.command.RemoveJarOperation)16