Search in sources :

Example 41 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method before.

@Before
public void before() throws TableAlreadyExistException, DatabaseNotExistException {
    catalogManager.initSchemaResolver(isStreamingMode, ExpressionResolverMocks.basicResolver(catalogManager, functionCatalog, parser));
    final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1");
    final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2");
    final TableSchema tableSchema = TableSchema.builder().field("a", DataTypes.BIGINT()).field("b", DataTypes.VARCHAR(Integer.MAX_VALUE)).field("c", DataTypes.INT()).field("d", DataTypes.VARCHAR(Integer.MAX_VALUE)).build();
    Map<String, String> options = new HashMap<>();
    options.put("connector", "COLLECTION");
    final CatalogTable catalogTable = new CatalogTableImpl(tableSchema, options, "");
    catalog.createTable(path1, catalogTable, true);
    catalog.createTable(path2, catalogTable, true);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Before(org.junit.Before)

Example 42 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableLikeWithFullPath.

@Test
public void testCreateTableLikeWithFullPath() {
    Map<String, String> sourceProperties = new HashMap<>();
    sourceProperties.put("connector.type", "kafka");
    sourceProperties.put("format.type", "json");
    CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).column("f1", DataTypes.TIMESTAMP(3)).build(), null, Collections.emptyList(), sourceProperties);
    catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false);
    final String sql = "create table mytable like `builtin`.`default`.sourceTable";
    Operation operation = parseAndConvert(sql);
    assertThat(operation).is(new HamcrestCondition<>(isCreateTableOperation(withSchema(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).column("f1", DataTypes.TIMESTAMP(3)).build()), withOptions(entry("connector.type", "kafka"), entry("format.type", "json")))));
}
Also used : HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) Test(org.junit.Test)

Example 43 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithPrimaryKey.

@Test
public void testCreateTableWithPrimaryKey() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a bigint,\n" + "  b varchar, \n" + "  c int, \n" + "  d varchar, \n" + "  constraint ct1 primary key(a, b) not enforced\n" + ") with (\n" + "  'connector' = 'kafka', \n" + "  'kafka.topic' = 'log.test'\n" + ")\n";
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, parser);
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    TableSchema tableSchema = catalogTable.getSchema();
    assertThat(tableSchema.getPrimaryKey().map(UniqueConstraint::asSummaryString).orElse("fakeVal")).isEqualTo("CONSTRAINT ct1 PRIMARY KEY (a, b)");
    assertThat(tableSchema.getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d" });
    assertThat(tableSchema.getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.BIGINT().notNull(), DataTypes.STRING().notNull(), DataTypes.INT(), DataTypes.STRING() });
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) Test(org.junit.Test)

Example 44 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class DynamicTableSinkSpecSerdeTest method testDynamicTableSinkSpecSerde.

static Stream<DynamicTableSinkSpec> testDynamicTableSinkSpecSerde() {
    Map<String, String> options1 = new HashMap<>();
    options1.put("connector", FileSystemTableFactory.IDENTIFIER);
    options1.put("format", TestCsvFormatFactory.IDENTIFIER);
    options1.put("path", "/tmp");
    final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
    final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
    DynamicTableSinkSpec spec1 = new DynamicTableSinkSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable1, resolvedSchema1)), null);
    Map<String, String> options2 = new HashMap<>();
    options2.put("connector", FileSystemTableFactory.IDENTIFIER);
    options2.put("format", TestCsvFormatFactory.IDENTIFIER);
    options2.put("path", "/tmp");
    final ResolvedSchema resolvedSchema2 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.physical("p", DataTypes.STRING())), Collections.emptyList(), null);
    final CatalogTable catalogTable2 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema2).build(), null, Collections.emptyList(), options2);
    DynamicTableSinkSpec spec2 = new DynamicTableSinkSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable2, resolvedSchema2)), Arrays.asList(new OverwriteSpec(true), new PartitioningSpec(new HashMap<String, String>() {

        {
            put("p", "A");
        }
    })));
    Map<String, String> options3 = new HashMap<>();
    options3.put("connector", TestValuesTableFactory.IDENTIFIER);
    options3.put("writable-metadata", "m:STRING");
    final ResolvedSchema resolvedSchema3 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.metadata("m", DataTypes.STRING(), null, false)), Collections.emptyList(), null);
    final CatalogTable catalogTable3 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema3).build(), null, Collections.emptyList(), options3);
    DynamicTableSinkSpec spec3 = new DynamicTableSinkSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable3, resolvedSchema3)), Collections.singletonList(new WritingMetadataSpec(Collections.singletonList("m"), RowType.of(new BigIntType(), new IntType()))));
    return Stream.of(spec1, spec2, spec3);
}
Also used : WritingMetadataSpec(org.apache.flink.table.planner.plan.abilities.sink.WritingMetadataSpec) HashMap(java.util.HashMap) BigIntType(org.apache.flink.table.types.logical.BigIntType) OverwriteSpec(org.apache.flink.table.planner.plan.abilities.sink.OverwriteSpec) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) DynamicTableSinkSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSinkSpec) PartitioningSpec(org.apache.flink.table.planner.plan.abilities.sink.PartitioningSpec) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 45 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithComputedColumn.

@Test
public void testCreateTableWithComputedColumn() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a int,\n" + "  b varchar, \n" + "  c as a - 1, \n" + "  d as b || '$$', \n" + "  e as my_udf1(a)," + "  f as `default`.my_udf2(a) + 1," + "  g as builtin.`default`.my_udf3(a) || '##'\n" + ")\n" + "  with (\n" + "    'connector' = 'kafka', \n" + "    'kafka.topic' = 'log.test'\n" + ")\n";
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$);
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d", "e", "f", "g" });
    assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() });
    String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream().filter(ComputedColumn.class::isInstance).map(ComputedColumn.class::cast).map(ComputedColumn::getExpression).toArray(String[]::new);
    String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || '##'" };
    assertThat(columnExpressions).isEqualTo(expected);
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Aggregations

CatalogTable (org.apache.flink.table.catalog.CatalogTable)68 Test (org.junit.Test)35 HashMap (java.util.HashMap)30 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)24 TableSchema (org.apache.flink.table.api.TableSchema)17 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)17 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)14 ValidationException (org.apache.flink.table.api.ValidationException)13 ObjectPath (org.apache.flink.table.catalog.ObjectPath)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)12 Operation (org.apache.flink.table.operations.Operation)12 AlterTableAddConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation)12 AlterTableDropConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation)12 AlterTableOptionsOperation (org.apache.flink.table.operations.ddl.AlterTableOptionsOperation)12 AlterTableRenameOperation (org.apache.flink.table.operations.ddl.AlterTableRenameOperation)12 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)11 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)11 QueryOperation (org.apache.flink.table.operations.QueryOperation)11 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)11 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)11