Search in sources :

Example 61 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableLikeNestedWatermark.

@Test
public void testCreateTableLikeNestedWatermark() {
    CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).column("f1", DataTypes.ROW(DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))).build(), null, Collections.emptyList(), Collections.emptyMap());
    catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false);
    final String sql = "create table derivedTable(\n" + "  a int,\n" + "  watermark for f1.t as f1.t - interval '5' second\n" + ")\n" + "like sourceTable";
    assertThatThrownBy(() -> parseAndConvert(sql)).isInstanceOf(ValidationException.class).hasMessageContaining("The rowtime attribute field 'f1.t' is not defined in the table schema," + " at line 3, column 20\n" + "Nested field 't' was not found in a composite type:" + " ROW<`tmstmp` TIMESTAMP(3)>.");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 62 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithWatermark.

@Test
public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException {
    CatalogFunction cf = new CatalogFunctionImpl(JavaUserDefinedScalarFunctions.JavaFunc5.class.getName());
    catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true);
    final String sql = "create table source_table(\n" + "  a int,\n" + "  b bigint,\n" + "  c timestamp(3),\n" + "  watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + "  'connector.type' = 'kafka')\n";
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    SqlNode node = parser.parse(sql);
    assertThat(node).isInstanceOf(SqlCreateTable.class);
    Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    Map<String, String> properties = catalogTable.toProperties();
    Map<String, String> expected = new HashMap<>();
    expected.put("schema.0.name", "a");
    expected.put("schema.0.data-type", "INT");
    expected.put("schema.1.name", "b");
    expected.put("schema.1.data-type", "BIGINT");
    expected.put("schema.2.name", "c");
    expected.put("schema.2.data-type", "TIMESTAMP(3)");
    expected.put("schema.watermark.0.rowtime", "c");
    expected.put("schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND");
    expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
    expected.put("connector.type", "kafka");
    assertThat(properties).isEqualTo(expected);
}
Also used : HashMap(java.util.HashMap) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 63 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTable.

@Test
public void testCreateTable() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a bigint,\n" + "  b varchar, \n" + "  c int, \n" + "  d varchar" + ")\n" + "  PARTITIONED BY (a, d)\n" + "  with (\n" + "    'connector' = 'kafka', \n" + "    'kafka.topic' = 'log.test'\n" + ")\n";
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, parser);
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    assertThat(catalogTable.getPartitionKeys()).hasSameElementsAs(Arrays.asList("a", "d"));
    assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d" });
    assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.BIGINT(), DataTypes.VARCHAR(Integer.MAX_VALUE), DataTypes.INT(), DataTypes.VARCHAR(Integer.MAX_VALUE) });
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) Test(org.junit.Test)

Example 64 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class FlinkCalciteCatalogReaderTest method testGetFlinkPreparingTableBase.

@Test
public void testGetFlinkPreparingTableBase() {
    // Mock CatalogSchemaTable.
    final ObjectIdentifier objectIdentifier = ObjectIdentifier.of("a", "b", "c");
    final ResolvedSchema schema = new ResolvedSchema(Collections.emptyList(), Collections.emptyList(), null);
    final CatalogTable catalogTable = ConnectorCatalogTable.source(new TestTableSource(true, TableSchema.fromResolvedSchema(schema)), true);
    final ResolvedCatalogTable resolvedCatalogTable = new ResolvedCatalogTable(catalogTable, schema);
    CatalogSchemaTable mockTable = new CatalogSchemaTable(ContextResolvedTable.permanent(objectIdentifier, CatalogManagerMocks.createEmptyCatalog(), resolvedCatalogTable), FlinkStatistic.UNKNOWN(), true);
    rootSchemaPlus.add(tableMockName, mockTable);
    Prepare.PreparingTable preparingTable = catalogReader.getTable(Collections.singletonList(tableMockName));
    assertTrue(preparingTable instanceof FlinkPreparingTableBase);
}
Also used : TestTableSource(org.apache.flink.table.planner.utils.TestTableSource) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) CatalogSchemaTable(org.apache.flink.table.planner.catalog.CatalogSchemaTable) Prepare(org.apache.calcite.prepare.Prepare) FlinkPreparingTableBase(org.apache.flink.table.planner.plan.schema.FlinkPreparingTableBase) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Test(org.junit.Test)

Example 65 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverter method convertAlterTable.

/**
 * convert ALTER TABLE statement.
 */
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
    ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogView) {
        throw new ValidationException("ALTER TABLE for a view is not allowed");
    }
    if (sqlAlterTable instanceof SqlAlterTableRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
    } else if (sqlAlterTable instanceof SqlAlterTableOptions) {
        return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableReset) {
        return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
        SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
        validateTableConstraint(constraint);
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        // Sanity check for constraint.
        TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
        if (constraint.getConstraintName().isPresent()) {
            builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
        } else {
            builder.primaryKey(constraint.getColumnNames());
        }
        builder.build();
        return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
    } else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
        SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
        String constraintName = dropConstraint.getConstraintName().getSimple();
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
            throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
        }
        return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
    } else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
        return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlChangeColumn) {
        return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlAddPartitions) {
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        List<CatalogPartition> partitions = new ArrayList<>();
        SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
        for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
            Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
            partitions.add(new CatalogPartitionImpl(props, null));
        }
        return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
    } else if (sqlAlterTable instanceof SqlDropPartitions) {
        SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
        }
        return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
    } else if (sqlAlterTable instanceof SqlAlterTableCompact) {
        return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) SqlAlterTableReset(org.apache.flink.sql.parser.ddl.SqlAlterTableReset) ArrayList(java.util.ArrayList) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) SqlAlterTableRename(org.apache.flink.sql.parser.ddl.SqlAlterTableRename) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) SqlAddReplaceColumns(org.apache.flink.sql.parser.ddl.SqlAddReplaceColumns) SqlAddPartitions(org.apache.flink.sql.parser.ddl.SqlAddPartitions) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlAlterTableCompact(org.apache.flink.sql.parser.ddl.SqlAlterTableCompact) ArrayList(java.util.ArrayList) List(java.util.List) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) CatalogView(org.apache.flink.table.catalog.CatalogView) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) RelHint(org.apache.calcite.rel.hint.RelHint) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) SqlChangeColumn(org.apache.flink.sql.parser.ddl.SqlChangeColumn) SqlAlterTableOptions(org.apache.flink.sql.parser.ddl.SqlAlterTableOptions) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) SqlDropPartitions(org.apache.flink.sql.parser.ddl.SqlDropPartitions)

Aggregations

CatalogTable (org.apache.flink.table.catalog.CatalogTable)68 Test (org.junit.Test)35 HashMap (java.util.HashMap)30 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)24 TableSchema (org.apache.flink.table.api.TableSchema)17 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)17 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)14 ValidationException (org.apache.flink.table.api.ValidationException)13 ObjectPath (org.apache.flink.table.catalog.ObjectPath)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)12 Operation (org.apache.flink.table.operations.Operation)12 AlterTableAddConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation)12 AlterTableDropConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation)12 AlterTableOptionsOperation (org.apache.flink.table.operations.ddl.AlterTableOptionsOperation)12 AlterTableRenameOperation (org.apache.flink.table.operations.ddl.AlterTableRenameOperation)12 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)11 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)11 QueryOperation (org.apache.flink.table.operations.QueryOperation)11 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)11 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)11