use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class HiveDialectITCase method testCreateTableWithConstraints.
@Test
public void testCreateTableWithConstraints() throws Exception {
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
tableEnv.executeSql("create table tbl (x int,y int not null disable novalidate rely,z int not null disable novalidate norely," + "constraint pk_name primary key (x) disable rely)");
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(new ObjectPath("default", "tbl"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue("PK not present", tableSchema.getPrimaryKey().isPresent());
assertEquals("pk_name", tableSchema.getPrimaryKey().get().getName());
assertFalse("PK cannot be null", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
assertFalse("RELY NOT NULL should be reflected in schema", tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
assertTrue("NORELY NOT NULL shouldn't be reflected in schema", tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class HiveDynamicTableFactoryTest method getTableSource.
private DynamicTableSource getTableSource(String tableName) throws Exception {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
return FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class HiveCatalog method alterTable.
@Override
public void alterTable(ObjectPath tablePath, CatalogBaseTable newCatalogTable, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException {
checkNotNull(tablePath, "tablePath cannot be null");
checkNotNull(newCatalogTable, "newCatalogTable cannot be null");
Table hiveTable;
try {
hiveTable = getHiveTable(tablePath);
} catch (TableNotExistException e) {
if (!ignoreIfNotExists) {
throw e;
}
return;
}
CatalogBaseTable existingTable = instantiateCatalogTable(hiveTable);
if (existingTable.getTableKind() != newCatalogTable.getTableKind()) {
throw new CatalogException(String.format("Table types don't match. Existing table is '%s' and new table is '%s'.", existingTable.getTableKind(), newCatalogTable.getTableKind()));
}
disallowChangeCatalogTableType(existingTable.getOptions(), newCatalogTable.getOptions());
boolean isHiveTable = isHiveTable(hiveTable.getParameters());
if (isHiveTable) {
AlterTableOp op = HiveTableUtil.extractAlterTableOp(newCatalogTable.getOptions());
if (op == null) {
// the alter operation isn't encoded as properties
hiveTable = HiveTableUtil.alterTableViaCatalogBaseTable(tablePath, newCatalogTable, hiveTable, hiveConf, false);
} else {
alterTableViaProperties(op, hiveTable, (CatalogTable) newCatalogTable, hiveTable.getParameters(), newCatalogTable.getOptions(), hiveTable.getSd());
}
} else {
hiveTable = HiveTableUtil.alterTableViaCatalogBaseTable(tablePath, newCatalogTable, hiveTable, hiveConf, ManagedTableListener.isManagedTable(this, newCatalogTable));
}
if (isHiveTable) {
hiveTable.getParameters().remove(CONNECTOR.key());
}
try {
client.alter_table(tablePath.getDatabaseName(), tablePath.getObjectName(), hiveTable);
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter table %s", tablePath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class SqlToOperationConverterTest method testCreateViewWithMatchRecognize.
@Test
public void testCreateViewWithMatchRecognize() {
Map<String, String> prop = new HashMap<>();
prop.put("connector", "values");
prop.put("bounded", "true");
CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("id", DataTypes.INT().notNull()).column("measurement", DataTypes.BIGINT().notNull()).column("ts", DataTypes.ROW(DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))).build(), null, Collections.emptyList(), prop);
catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "events"), false);
final String sql = "" + "CREATE TEMPORARY VIEW foo AS " + "SELECT * " + "FROM events MATCH_RECOGNIZE (" + " PARTITION BY id " + " ORDER BY ts ASC " + " MEASURES " + " next_step.measurement - this_step.measurement AS diff " + " AFTER MATCH SKIP TO NEXT ROW " + " PATTERN (this_step next_step)" + " DEFINE " + " this_step AS TRUE," + " next_step AS TRUE" + ")";
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(CreateViewOperation.class);
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class SqlToOperationConverterTest method testCreateViewWithDynamicTableOptions.
@Test
public void testCreateViewWithDynamicTableOptions() {
Map<String, String> prop = new HashMap<>();
prop.put("connector", "values");
prop.put("bounded", "true");
CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("f0", DataTypes.INT()).column("f1", DataTypes.VARCHAR(20)).build(), null, Collections.emptyList(), prop);
catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "sourceA"), false);
final String sql = "" + "create view test_view as\n" + "select *\n" + "from sourceA /*+ OPTIONS('changelog-mode'='I') */";
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(CreateViewOperation.class);
}
Aggregations