use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class TableEnvironmentImpl method registerTableSinkInternal.
@Override
public void registerTableSinkInternal(String name, TableSink<?> tableSink) {
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSink().isPresent()) {
throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
} else {
// wrapper contains only sink (not source)
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(sink, objectIdentifier, false);
}
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class TableEnvironmentTest method testCreateTableFromDescriptor.
@Test
public void testCreateTableFromDescriptor() throws Exception {
final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
final String catalog = tEnv.getCurrentCatalog();
final String database = tEnv.getCurrentDatabase();
final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
tEnv.createTable("T", TableDescriptor.forConnector("fake").schema(schema).option("a", "Test").build());
final ObjectPath objectPath = new ObjectPath(database, "T");
assertThat(tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(objectPath)).isTrue();
final CatalogBaseTable catalogTable = tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).getTable(objectPath);
assertThat(catalogTable).isInstanceOf(CatalogTable.class);
assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
assertThat(catalogTable.getOptions()).contains(entry("connector", "fake"), entry("a", "Test"));
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class SqlToOperationConverter method convertAlterView.
/**
* convert ALTER VIEW statement.
*/
private Operation convertAlterView(SqlAlterView alterView) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogTable) {
throw new ValidationException("ALTER VIEW for a table is not allowed");
}
if (alterView instanceof SqlAlterViewRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
} else if (alterView instanceof SqlAlterViewProperties) {
SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
CatalogView oldView = (CatalogView) baseTable;
Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
return new AlterViewPropertiesOperation(viewIdentifier, newView);
} else if (alterView instanceof SqlAlterViewAs) {
SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
final SqlNode newQuery = alterViewAs.getNewQuery();
CatalogView oldView = (CatalogView) baseTable;
CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
return new AlterViewAsOperation(viewIdentifier, newView);
} else {
throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class TableEnvHiveConnectorITCase method testPKConstraint.
@Test
public void testPKConstraint() throws Exception {
// While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in
// 2.x versions.
// So let's only test for 3.x.
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
// test rely PK constraints
tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue(tableSchema.getPrimaryKey().isPresent());
UniqueConstraint pk = tableSchema.getPrimaryKey().get();
assertEquals(2, pk.getColumns().size());
assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
// test norely PK constraints
tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
tableSchema = catalogTable.getSchema();
assertFalse(tableSchema.getPrimaryKey().isPresent());
// test table w/o PK
tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
tableSchema = catalogTable.getSchema();
assertFalse(tableSchema.getPrimaryKey().isPresent());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class TableEnvHiveConnectorITCase method testNotNullConstraints.
@Test
public void testNotNullConstraints() throws Exception {
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.tbl (x int,y bigint not null enable rely,z string not null enable norely)");
CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue("By default columns should be nullable", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
assertFalse("NOT NULL columns should be reflected in table schema", tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
assertTrue("NOT NULL NORELY columns should be considered nullable", tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
Aggregations