use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertCreateFunction.
private Operation convertCreateFunction(HiveParserASTNode ast) {
// ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
String functionName = ast.getChild(0).getText().toLowerCase();
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
String className = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
// Temp functions are not allowed to have qualified names.
if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
// belong to a catalog/db
throw new ValidationException("Temporary function cannot be created with a qualified name.");
}
if (isTemporaryFunction) {
FunctionDefinition funcDefinition = funcDefFactory.createFunctionDefinition(functionName, new CatalogFunctionImpl(className, FunctionLanguage.JAVA));
return new CreateTempSystemFunctionOperation(functionName, false, funcDefinition);
} else {
ObjectIdentifier identifier = parseObjectIdentifier(functionName);
CatalogFunction catalogFunction = new CatalogFunctionImpl(className, FunctionLanguage.JAVA);
return new CreateCatalogFunctionOperation(identifier, catalogFunction, false, false);
}
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableRename.
private Operation convertAlterTableRename(String sourceName, HiveParserASTNode ast, boolean expectView) throws SemanticException {
String[] target = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String targetName = HiveParserBaseSemanticAnalyzer.getDotName(target);
ObjectIdentifier objectIdentifier = parseObjectIdentifier(sourceName);
return expectView ? new AlterViewRenameOperation(objectIdentifier, parseObjectIdentifier(targetName)) : new AlterTableRenameOperation(objectIdentifier, parseObjectIdentifier(targetName));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method getAlteredTable.
private CatalogBaseTable getAlteredTable(String tableName, boolean expectView) {
ObjectIdentifier objectIdentifier = parseObjectIdentifier(tableName);
CatalogBaseTable catalogBaseTable = getCatalogBaseTable(objectIdentifier);
if (expectView) {
if (catalogBaseTable instanceof CatalogTable) {
throw new ValidationException("ALTER VIEW for a table is not allowed");
}
} else {
if (catalogBaseTable instanceof CatalogView) {
throw new ValidationException("ALTER TABLE for a view is not allowed");
}
}
return catalogBaseTable;
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.
private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
// [CASCADE|RESTRICT]
String oldColName = ast.getChild(0).getText();
String newColName = ast.getChild(1).getText();
String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
int childCount = ast.getChildCount();
for (int i = 3; i < childCount; i++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveASTParser.StringLiteral:
newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
break;
case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
break;
case HiveASTParser.KW_FIRST:
first = true;
break;
case HiveASTParser.TOK_CASCADE:
isCascade = true;
break;
case HiveASTParser.TOK_RESTRICT:
break;
default:
throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
}
}
// Validate the operation of renaming a column name.
Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
CatalogTable oldTable = (CatalogTable) alteredTable;
String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
if (oldTable.getPartitionKeys().contains(oldName)) {
// disallow changing partition columns
throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
}
TableSchema oldSchema = oldTable.getSchema();
TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
Map<String, String> props = new HashMap<>(oldTable.getOptions());
props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
if (isCascade) {
props.put(ALTER_COL_CASCADE, "true");
}
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveLookupJoinITCase method testPartitionFetcherAndReader.
@Test
public void testPartitionFetcherAndReader() throws Exception {
// constructs test data using dynamic partition
TableEnvironment batchEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
batchEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
batchEnv.useCatalog(hiveCatalog.getName());
batchEnv.executeSql("insert overwrite partition_table values " + "(1,'a',08,2019,'08','01')," + "(1,'a',10,2020,'08','31')," + "(2,'a',21,2020,'08','31')," + "(2,'b',22,2020,'08','31')," + "(3,'c',33,2020,'09','31')").await();
FileSystemLookupFunction<HiveTablePartition> lookupFunction = getLookupFunction("partition_table");
lookupFunction.open(null);
PartitionFetcher<HiveTablePartition> fetcher = lookupFunction.getPartitionFetcher();
PartitionFetcher.Context<HiveTablePartition> context = lookupFunction.getFetcherContext();
List<HiveTablePartition> partitions = fetcher.fetch(context);
// fetch latest partition by partition-name
assertEquals(1, partitions.size());
PartitionReader<HiveTablePartition, RowData> reader = lookupFunction.getPartitionReader();
reader.open(partitions);
List<RowData> res = new ArrayList<>();
ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", "partition_table");
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
GenericRowData reuse = new GenericRowData(catalogTable.getSchema().getFieldCount());
TypeSerializer<RowData> serializer = InternalSerializers.create(catalogTable.getSchema().toRowDataType().getLogicalType());
RowData row;
while ((row = reader.read(reuse)) != null) {
res.add(serializer.copy(row));
}
res.sort(Comparator.comparingInt(o -> o.getInt(0)));
assertEquals("[+I(3,c,33,2020,09,31)]", res.toString());
}
Aggregations