use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class HiveCatalogHiveMetadataTest method testViewCompatibility.
// ------ table and column stats ------
@Test
public void testViewCompatibility() throws Exception {
// we always store view schema via properties now
// make sure non-generic views created previously can still be used
catalog.createDatabase(db1, createDb(), false);
Table hiveView = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(path1.getDatabaseName(), path1.getObjectName());
// mark as a view
hiveView.setTableType(TableType.VIRTUAL_VIEW.name());
final String originQuery = "view origin query";
final String expandedQuery = "view expanded query";
hiveView.setViewOriginalText(originQuery);
hiveView.setViewExpandedText(expandedQuery);
// set schema in SD
Schema schema = Schema.newBuilder().fromFields(new String[] { "i", "s" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.STRING() }).build();
List<FieldSchema> fields = new ArrayList<>();
for (Schema.UnresolvedColumn column : schema.getColumns()) {
String name = column.getName();
DataType type = (DataType) ((Schema.UnresolvedPhysicalColumn) column).getDataType();
fields.add(new FieldSchema(name, HiveTypeUtil.toHiveTypeInfo(type, true).getTypeName(), null));
}
hiveView.getSd().setCols(fields);
// test mark as non-generic with is_generic
hiveView.getParameters().put(CatalogPropertiesUtil.IS_GENERIC, "false");
// add some other properties
hiveView.getParameters().put("k1", "v1");
((HiveCatalog) catalog).client.createTable(hiveView);
CatalogBaseTable baseTable = catalog.getTable(path1);
assertTrue(baseTable instanceof CatalogView);
CatalogView catalogView = (CatalogView) baseTable;
assertEquals(schema, catalogView.getUnresolvedSchema());
assertEquals(originQuery, catalogView.getOriginalQuery());
assertEquals(expandedQuery, catalogView.getExpandedQuery());
assertEquals("v1", catalogView.getOptions().get("k1"));
// test mark as non-generic with connector
hiveView.setDbName(path3.getDatabaseName());
hiveView.setTableName(path3.getObjectName());
hiveView.getParameters().remove(CatalogPropertiesUtil.IS_GENERIC);
hiveView.getParameters().put(CONNECTOR.key(), IDENTIFIER);
((HiveCatalog) catalog).client.createTable(hiveView);
baseTable = catalog.getTable(path3);
assertTrue(baseTable instanceof CatalogView);
catalogView = (CatalogView) baseTable;
assertEquals(schema, catalogView.getUnresolvedSchema());
assertEquals(originQuery, catalogView.getOriginalQuery());
assertEquals(expandedQuery, catalogView.getExpandedQuery());
assertEquals("v1", catalogView.getOptions().get("k1"));
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class PostgresCatalogTest method testArrayDataTypes.
@Test
public void testArrayDataTypes() throws TableNotExistException {
CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_ARRAY_TYPE));
assertEquals(getArrayTable().schema, table.getUnresolvedSchema());
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class PostgresCatalogTest method testPrimitiveDataTypes.
@Test
public void testPrimitiveDataTypes() throws TableNotExistException {
CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_PRIMITIVE_TYPE));
assertEquals(getPrimitiveTable().schema, table.getUnresolvedSchema());
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.
the class PostgresCatalogTest method testGetTable.
@Test
public void testGetTable() throws org.apache.flink.table.catalog.exceptions.TableNotExistException {
// test postgres.public.user1
Schema schema = getSimpleTable().schema;
CatalogBaseTable table = catalog.getTable(new ObjectPath("postgres", TABLE1));
assertEquals(schema, table.getUnresolvedSchema());
table = catalog.getTable(new ObjectPath("postgres", "public.t1"));
assertEquals(schema, table.getUnresolvedSchema());
// test testdb.public.user2
table = catalog.getTable(new ObjectPath(TEST_DB, TABLE2));
assertEquals(schema, table.getUnresolvedSchema());
table = catalog.getTable(new ObjectPath(TEST_DB, "public.t2"));
assertEquals(schema, table.getUnresolvedSchema());
// test testdb.testschema.user2
table = catalog.getTable(new ObjectPath(TEST_DB, TEST_SCHEMA + ".t3"));
assertEquals(schema, table.getUnresolvedSchema());
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterTable.
private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
Operation operation = null;
HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
HashMap<String, String> partSpec = null;
HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
if (partSpecNode != null) {
partSpec = getPartSpec(partSpecNode);
}
CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
switch(ast.getType()) {
case HiveASTParser.TOK_ALTERTABLE_RENAME:
operation = convertAlterTableRename(tableName, ast, false);
break;
case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
break;
case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
break;
case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
operation = convertAlterTableAddParts(qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
operation = convertAlterTableDropParts(qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
break;
case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
break;
case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
break;
case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_LOCATION:
operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_TOUCH:
case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
case HiveASTParser.TOK_ALTERTABLE_SKEWED:
case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
case HiveASTParser.TOK_ALTERTABLE_COMPACT:
case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
handleUnsupportedOperation(ast);
break;
default:
throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
}
return operation;
}
Aggregations