use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableRename.
private Operation convertAlterTableRename(String sourceName, HiveParserASTNode ast, boolean expectView) throws SemanticException {
String[] target = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String targetName = HiveParserBaseSemanticAnalyzer.getDotName(target);
ObjectIdentifier objectIdentifier = parseObjectIdentifier(sourceName);
return expectView ? new AlterViewRenameOperation(objectIdentifier, parseObjectIdentifier(targetName)) : new AlterTableRenameOperation(objectIdentifier, parseObjectIdentifier(targetName));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.
private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
boolean isCascade = false;
if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
isCascade = true;
}
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
CatalogTable oldTable = (CatalogTable) alteredTable;
// prepare properties
Map<String, String> props = new HashMap<>(oldTable.getOptions());
props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
if (isCascade) {
props.put(ALTER_COL_CASCADE, "true");
}
TableSchema oldSchema = oldTable.getSchema();
final int numPartCol = oldTable.getPartitionKeys().size();
TableSchema.Builder builder = TableSchema.builder();
// add existing non-part col if we're not replacing
if (!replace) {
List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
for (TableColumn column : nonPartCols) {
builder.add(column);
}
setWatermarkAndPK(builder, oldSchema);
}
// add new cols
for (FieldSchema col : newCols) {
builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
}
// add part cols
List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
for (TableColumn column : partCols) {
builder.add(column);
}
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableAddParts.
/**
* Add one or more partitions to a table. Useful when the data has been copied to the right
* location by some other process.
*/
private Operation convertAlterTableAddParts(String[] qualified, CommonTree ast) {
// ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists?
// alterStatementSuffixAddPartitionsElement+)
boolean ifNotExists = ast.getChild(0).getType() == HiveASTParser.TOK_IFNOTEXISTS;
Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
boolean isView = tab.isView();
validateAlterTableType(tab);
int numCh = ast.getChildCount();
int start = ifNotExists ? 1 : 0;
String currentLocation = null;
Map<String, String> currentPartSpec = null;
// Parser has done some verification, so the order of tokens doesn't need to be verified
// here.
List<CatalogPartitionSpec> specs = new ArrayList<>();
List<CatalogPartition> partitions = new ArrayList<>();
for (int num = start; num < numCh; num++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
switch(child.getToken().getType()) {
case HiveASTParser.TOK_PARTSPEC:
if (currentPartSpec != null) {
specs.add(new CatalogPartitionSpec(currentPartSpec));
Map<String, String> props = new HashMap<>();
if (currentLocation != null) {
props.put(TABLE_LOCATION_URI, currentLocation);
}
partitions.add(new CatalogPartitionImpl(props, null));
currentLocation = null;
}
currentPartSpec = getPartSpec(child);
// validate reserved values
validatePartitionValues(currentPartSpec);
break;
case HiveASTParser.TOK_PARTITIONLOCATION:
// if location specified, set in partition
if (isView) {
throw new ValidationException("LOCATION clause illegal for view partition");
}
currentLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
default:
throw new ValidationException("Unknown child: " + child);
}
}
// add the last one
if (currentPartSpec != null) {
specs.add(new CatalogPartitionSpec(currentPartSpec));
Map<String, String> props = new HashMap<>();
if (currentLocation != null) {
props.put(TABLE_LOCATION_URI, currentLocation);
}
partitions.add(new CatalogPartitionImpl(props, null));
}
ObjectIdentifier tableIdentifier = tab.getDbName() == null ? parseObjectIdentifier(tab.getTableName()) : catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(tab.getDbName(), tab.getTableName()));
return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs, partitions);
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertDropFunction.
private Operation convertDropFunction(HiveParserASTNode ast) {
// ^(TOK_DROPFUNCTION identifier ifExists? $temp?)
String functionName = ast.getChild(0).getText();
boolean ifExists = (ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null);
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
if (isTemporaryFunction) {
return new DropTempSystemFunctionOperation(functionName, ifExists);
} else {
ObjectIdentifier identifier = parseObjectIdentifier(functionName);
return new DropCatalogFunctionOperation(identifier, ifExists, false);
}
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class HiveDynamicTableFactoryTest method getTableSource.
private DynamicTableSource getTableSource(String tableName) throws Exception {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
return FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
Aggregations