Search in sources :

Example 6 with Parser

use of org.apache.flink.table.delegation.Parser in project zeppelin by apache.

the class Flink112Shims method parseSql.

/**
 * Parse it via flink SqlParser first, then fallback to regular expression matching.
 *
 * @param tableEnv
 * @param stmt
 * @return
 */
@Override
public Optional<SqlCommandParser.SqlCommandCall> parseSql(Object tableEnv, String stmt) {
    Parser sqlParser = ((TableEnvironmentInternal) tableEnv).getParser();
    SqlCommandCall sqlCommandCall = null;
    try {
        // parse statement via regex matching first
        Optional<SqlCommandCall> callOpt = parseByRegexMatching(stmt);
        if (callOpt.isPresent()) {
            sqlCommandCall = callOpt.get();
        } else {
            sqlCommandCall = parseBySqlParser(sqlParser, stmt);
        }
    } catch (Exception e) {
        return Optional.empty();
    }
    return Optional.of(sqlCommandCall);
}
Also used : TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) SqlCommandCall(org.apache.zeppelin.flink.sql.SqlCommandParser.SqlCommandCall) FlinkException(org.apache.flink.util.FlinkException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) SqlCommandParser(org.apache.zeppelin.flink.sql.SqlCommandParser) Parser(org.apache.flink.table.delegation.Parser)

Example 7 with Parser

use of org.apache.flink.table.delegation.Parser in project flink by apache.

the class LocalExecutor method parseStatement.

@Override
public Operation parseStatement(String sessionId, String statement) throws SqlExecutionException {
    final ExecutionContext context = getExecutionContext(sessionId);
    final TableEnvironment tableEnv = context.getTableEnvironment();
    Parser parser = ((TableEnvironmentInternal) tableEnv).getParser();
    List<Operation> operations;
    try {
        operations = context.wrapClassLoader(() -> parser.parse(statement));
    } catch (Exception e) {
        throw new SqlExecutionException("Failed to parse statement: " + statement, e);
    }
    if (operations.isEmpty()) {
        throw new SqlExecutionException("Failed to parse statement: " + statement);
    }
    return operations.get(0);
}
Also used : SqlExecutionException(org.apache.flink.table.client.gateway.SqlExecutionException) ExecutionContext(org.apache.flink.table.client.gateway.context.ExecutionContext) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) TableEnvironment(org.apache.flink.table.api.TableEnvironment) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) Operation(org.apache.flink.table.operations.Operation) SqlExecutionException(org.apache.flink.table.client.gateway.SqlExecutionException) Parser(org.apache.flink.table.delegation.Parser)

Example 8 with Parser

use of org.apache.flink.table.delegation.Parser in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.

// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
    final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
    createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
    StringBuilder buffer = new StringBuilder("create table t1(\n");
    for (int i = 0; i < testItems.size(); i++) {
        buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
        if (i == testItems.size() - 1) {
            buffer.append(")");
        } else {
            buffer.append(",\n");
        }
    }
    final String sql = buffer.toString();
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    SqlNode node = parser.parse(sql);
    assertThat(node).isInstanceOf(SqlCreateTable.class);
    Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
    TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
    Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
    assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
Also used : FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ExplainDetail(org.apache.flink.table.api.ExplainDetail) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) Map(java.util.Map) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ModuleManager(org.apache.flink.table.module.ModuleManager) TableConfig(org.apache.flink.table.api.TableConfig) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) JavaUserDefinedScalarFunctions(org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ExpressionResolverMocks(org.apache.flink.table.utils.ExpressionResolverMocks) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ParserImpl(org.apache.flink.table.planner.delegation.ParserImpl) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Func8$(org.apache.flink.table.planner.expressions.utils.Func8$) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) Func0$(org.apache.flink.table.planner.expressions.utils.Func0$) Supplier(java.util.function.Supplier) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) TableColumn(org.apache.flink.table.api.TableColumn) Catalog(org.apache.flink.table.catalog.Catalog) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) Nullable(javax.annotation.Nullable) Before(org.junit.Before) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) FunctionScope(org.apache.flink.table.operations.ShowFunctionsOperation.FunctionScope) Func1$(org.apache.flink.table.planner.expressions.utils.Func1$) Test(org.junit.Test) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) TreeMap(java.util.TreeMap) CatalogManagerMocks(org.apache.flink.table.utils.CatalogManagerMocks) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) OperationMatchers.partitionedBy(org.apache.flink.table.planner.utils.OperationMatchers.partitionedBy) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) SetOperation(org.apache.flink.table.operations.command.SetOperation) After(org.junit.After) ExecutionOptions(org.apache.flink.configuration.ExecutionOptions) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) InstanceOfAssertFactories.type(org.assertj.core.api.InstanceOfAssertFactories.type) Collectors(java.util.stream.Collectors) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) List(java.util.List) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SqlRichExplain(org.apache.flink.sql.parser.dql.SqlRichExplain) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) ValidationException(org.apache.flink.table.api.ValidationException) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HamcrestCondition(org.assertj.core.api.HamcrestCondition) OperationMatchers.entry(org.apache.flink.table.planner.utils.OperationMatchers.entry) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) HashMap(java.util.HashMap) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) OperationMatchers.withOptions(org.apache.flink.table.planner.utils.OperationMatchers.withOptions) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) HashSet(java.util.HashSet) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Configuration(org.apache.flink.configuration.Configuration) Parser(org.apache.flink.table.delegation.Parser) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DataTypes(org.apache.flink.table.api.DataTypes) SqlDialect(org.apache.flink.table.api.SqlDialect) RuntimeExecutionMode(org.apache.flink.api.common.RuntimeExecutionMode) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Aggregations

Parser (org.apache.flink.table.delegation.Parser)8 TableEnvironmentInternal (org.apache.flink.table.api.internal.TableEnvironmentInternal)7 IOException (java.io.IOException)3 TableException (org.apache.flink.table.api.TableException)3 HiveParser (org.apache.flink.table.planner.delegation.hive.HiveParser)3 FlinkException (org.apache.flink.util.FlinkException)3 SqlCommandParser (org.apache.zeppelin.flink.sql.SqlCommandParser)3 SqlCommandCall (org.apache.zeppelin.flink.sql.SqlCommandParser.SqlCommandCall)3 Test (org.junit.Test)3 Operation (org.apache.flink.table.operations.Operation)2 QueryOperation (org.apache.flink.table.operations.QueryOperation)2 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Set (java.util.Set)1 TreeMap (java.util.TreeMap)1 AtomicReference (java.util.concurrent.atomic.AtomicReference)1