use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class PostgresCatalogTest method testPrimitiveDataTypes.
@Test
public void testPrimitiveDataTypes() throws TableNotExistException {
CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_PRIMITIVE_TYPE));
assertEquals(getPrimitiveTable().schema, table.getUnresolvedSchema());
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterView.
private Operation convertAlterView(HiveParserASTNode ast) throws SemanticException {
Operation operation = null;
String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
CatalogBaseTable alteredTable = getAlteredTable(tableName, true);
if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
// alter view as
operation = convertCreateView(ast);
} else {
ast = (HiveParserASTNode) ast.getChild(1);
switch(ast.getType()) {
case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, false);
break;
case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, true);
break;
case HiveASTParser.TOK_ALTERVIEW_RENAME:
operation = convertAlterTableRename(tableName, ast, true);
break;
case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
handleUnsupportedOperation("ADD/DROP PARTITION for view is not supported");
break;
default:
throw new ValidationException("Unknown AST node for ALTER VIEW: " + ast);
}
}
return operation;
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class HiveDialectITCase method testView.
@Test
public void testView() throws Exception {
tableEnv.executeSql("create table tbl (x int,y string)");
// create
tableEnv.executeSql("create view v(vx) comment 'v comment' tblproperties ('k1'='v1') as select x from tbl");
ObjectPath viewPath = new ObjectPath("default", "v");
CatalogBaseTable catalogBaseTable = hiveCatalog.getTable(viewPath);
assertTrue(catalogBaseTable instanceof CatalogView);
assertEquals("vx", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
assertEquals("v1", catalogBaseTable.getOptions().get("k1"));
// change properties
tableEnv.executeSql("alter view v set tblproperties ('k1'='v11')");
catalogBaseTable = hiveCatalog.getTable(viewPath);
assertEquals("v11", catalogBaseTable.getOptions().get("k1"));
// change query
tableEnv.executeSql("alter view v as select y from tbl");
catalogBaseTable = hiveCatalog.getTable(viewPath);
assertEquals("y", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
// rename
tableEnv.executeSql("alter view v rename to v1");
viewPath = new ObjectPath("default", "v1");
assertTrue(hiveCatalog.tableExists(viewPath));
// drop
tableEnv.executeSql("drop view v1");
assertFalse(hiveCatalog.tableExists(viewPath));
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class HiveCatalog method createTable.
@Override
public void createTable(ObjectPath tablePath, CatalogBaseTable table, boolean ignoreIfExists) throws TableAlreadyExistException, DatabaseNotExistException, CatalogException {
checkNotNull(tablePath, "tablePath cannot be null");
checkNotNull(table, "table cannot be null");
if (!databaseExists(tablePath.getDatabaseName())) {
throw new DatabaseNotExistException(getName(), tablePath.getDatabaseName());
}
boolean managedTable = ManagedTableListener.isManagedTable(this, table);
Table hiveTable = HiveTableUtil.instantiateHiveTable(tablePath, table, hiveConf, managedTable);
UniqueConstraint pkConstraint = null;
List<String> notNullCols = new ArrayList<>();
boolean isHiveTable = isHiveTable(table.getOptions());
if (isHiveTable) {
pkConstraint = table.getSchema().getPrimaryKey().orElse(null);
String nnColStr = hiveTable.getParameters().remove(NOT_NULL_COLS);
if (nnColStr != null) {
notNullCols.addAll(Arrays.asList(nnColStr.split(HiveDDLUtils.COL_DELIMITER)));
} else {
for (int i = 0; i < table.getSchema().getFieldDataTypes().length; i++) {
if (!table.getSchema().getFieldDataTypes()[i].getLogicalType().isNullable()) {
notNullCols.add(table.getSchema().getFieldNames()[i]);
}
}
}
// remove the 'connector' option for hive table
hiveTable.getParameters().remove(CONNECTOR.key());
}
try {
if (pkConstraint != null || !notNullCols.isEmpty()) {
// extract constraint traits from table properties
String pkTraitStr = hiveTable.getParameters().remove(PK_CONSTRAINT_TRAIT);
byte pkTrait = pkTraitStr == null ? HiveDDLUtils.defaultTrait() : Byte.parseByte(pkTraitStr);
List<Byte> pkTraits = Collections.nCopies(pkConstraint == null ? 0 : pkConstraint.getColumns().size(), pkTrait);
List<Byte> nnTraits;
String nnTraitsStr = hiveTable.getParameters().remove(NOT_NULL_CONSTRAINT_TRAITS);
if (nnTraitsStr != null) {
String[] traits = nnTraitsStr.split(HiveDDLUtils.COL_DELIMITER);
Preconditions.checkArgument(traits.length == notNullCols.size(), "Number of NOT NULL columns and constraint traits mismatch");
nnTraits = Arrays.stream(traits).map(Byte::new).collect(Collectors.toList());
} else {
nnTraits = Collections.nCopies(notNullCols.size(), HiveDDLUtils.defaultTrait());
}
client.createTableWithConstraints(hiveTable, hiveConf, pkConstraint, pkTraits, notNullCols, nnTraits);
} else {
client.createTable(hiveTable);
}
} catch (AlreadyExistsException e) {
if (!ignoreIfExists) {
throw new TableAlreadyExistException(getName(), tablePath, e);
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to create table %s", tablePath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class HiveCatalog method alterTable.
@Override
public void alterTable(ObjectPath tablePath, CatalogBaseTable newCatalogTable, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException {
checkNotNull(tablePath, "tablePath cannot be null");
checkNotNull(newCatalogTable, "newCatalogTable cannot be null");
Table hiveTable;
try {
hiveTable = getHiveTable(tablePath);
} catch (TableNotExistException e) {
if (!ignoreIfNotExists) {
throw e;
}
return;
}
CatalogBaseTable existingTable = instantiateCatalogTable(hiveTable);
if (existingTable.getTableKind() != newCatalogTable.getTableKind()) {
throw new CatalogException(String.format("Table types don't match. Existing table is '%s' and new table is '%s'.", existingTable.getTableKind(), newCatalogTable.getTableKind()));
}
disallowChangeCatalogTableType(existingTable.getOptions(), newCatalogTable.getOptions());
boolean isHiveTable = isHiveTable(hiveTable.getParameters());
if (isHiveTable) {
AlterTableOp op = HiveTableUtil.extractAlterTableOp(newCatalogTable.getOptions());
if (op == null) {
// the alter operation isn't encoded as properties
hiveTable = HiveTableUtil.alterTableViaCatalogBaseTable(tablePath, newCatalogTable, hiveTable, hiveConf, false);
} else {
alterTableViaProperties(op, hiveTable, (CatalogTable) newCatalogTable, hiveTable.getParameters(), newCatalogTable.getOptions(), hiveTable.getSd());
}
} else {
hiveTable = HiveTableUtil.alterTableViaCatalogBaseTable(tablePath, newCatalogTable, hiveTable, hiveConf, ManagedTableListener.isManagedTable(this, newCatalogTable));
}
if (isHiveTable) {
hiveTable.getParameters().remove(CONNECTOR.key());
}
try {
client.alter_table(tablePath.getDatabaseName(), tablePath.getObjectName(), hiveTable);
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter table %s", tablePath.getFullName()), e);
}
}
Aggregations