use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateTable.
private Operation convertCreateTable(HiveParserASTNode ast) throws SemanticException {
String[] qualifiedTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String dbDotTab = HiveParserBaseSemanticAnalyzer.getDotName(qualifiedTabName);
String likeTableName;
List<FieldSchema> cols = new ArrayList<>();
List<FieldSchema> partCols = new ArrayList<>();
List<PrimaryKey> primaryKeys = new ArrayList<>();
List<NotNullConstraint> notNulls = new ArrayList<>();
String comment = null;
String location = null;
Map<String, String> tblProps = null;
boolean ifNotExists = false;
boolean isExt = false;
boolean isTemporary = false;
HiveParserASTNode selectStmt = null;
// regular CREATE TABLE
final int createTable = 0;
// CREATE TABLE LIKE ... (CTLT)
final int ctlt = 1;
// CREATE TABLE AS SELECT ... (CTAS)
final int ctas = 2;
int commandType = createTable;
HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams rowFormatParams = new HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams();
HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine());
int numCh = ast.getChildCount();
// 3) CTAS does not support partitioning (for now).
for (int num = 1; num < numCh; num++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
if (storageFormat.fillStorageFormat(child)) {
continue;
}
switch(child.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.KW_EXTERNAL:
isExt = true;
break;
case HiveASTParser.KW_TEMPORARY:
isTemporary = true;
break;
case HiveASTParser.TOK_LIKETABLE:
if (child.getChildCount() > 0) {
likeTableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) child.getChild(0));
if (likeTableName != null) {
if (commandType == ctas) {
throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
if (cols.size() != 0) {
throw new ValidationException(ErrorMsg.CTLT_COLLST_COEXISTENCE.getMsg());
}
}
commandType = ctlt;
handleUnsupportedOperation("CREATE TABLE LIKE is not supported");
}
break;
case // CTAS
HiveASTParser.TOK_QUERY:
if (commandType == ctlt) {
throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
if (cols.size() != 0) {
throw new ValidationException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
}
if (partCols.size() != 0) {
throw new ValidationException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
}
if (isExt) {
throw new ValidationException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
}
commandType = ctas;
selectStmt = child;
break;
case HiveASTParser.TOK_TABCOLLIST:
cols = HiveParserBaseSemanticAnalyzer.getColumns(child, true, primaryKeys, notNulls);
break;
case HiveASTParser.TOK_TABLECOMMENT:
comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
case HiveASTParser.TOK_TABLEPARTCOLS:
partCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) child.getChild(0), false);
break;
case HiveASTParser.TOK_TABLEROWFORMAT:
rowFormatParams.analyzeRowFormat(child);
break;
case HiveASTParser.TOK_TABLELOCATION:
location = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
case HiveASTParser.TOK_TABLEPROPERTIES:
tblProps = getProps((HiveParserASTNode) child.getChild(0));
break;
case HiveASTParser.TOK_TABLESERIALIZER:
child = (HiveParserASTNode) child.getChild(0);
storageFormat.setSerde(HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
if (child.getChildCount() == 2) {
HiveParserBaseSemanticAnalyzer.readProps((HiveParserASTNode) (child.getChild(1).getChild(0)), storageFormat.getSerdeProps());
}
break;
case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
handleUnsupportedOperation("Bucketed table is not supported");
break;
case HiveASTParser.TOK_TABLESKEWED:
handleUnsupportedOperation("Skewed table is not supported");
break;
default:
throw new ValidationException("Unknown AST node for CREATE TABLE: " + child);
}
}
if (storageFormat.getStorageHandler() != null) {
handleUnsupportedOperation("Storage handler table is not supported");
}
if (commandType == createTable || commandType == ctlt) {
queryState.setCommandType(HiveOperation.CREATETABLE);
} else {
queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
}
storageFormat.fillDefaultStorageFormat(isExt, false);
if (isTemporary) {
if (partCols.size() > 0) {
handleUnsupportedOperation("Partition columns are not supported on temporary tables");
}
handleUnsupportedOperation("Temporary hive table is not supported");
}
// Handle different types of CREATE TABLE command
switch(commandType) {
case // REGULAR CREATE TABLE DDL
createTable:
tblProps = addDefaultProperties(tblProps);
return convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
case // create table like <tbl_name>
ctlt:
tblProps = addDefaultProperties(tblProps);
throw new SemanticException("CREATE TABLE LIKE is not supported yet");
case // create table as select
ctas:
tblProps = addDefaultProperties(tblProps);
// analyze the query
HiveParserCalcitePlanner calcitePlanner = hiveParser.createCalcitePlanner(context, queryState, hiveShim);
calcitePlanner.setCtasCols(cols);
RelNode queryRelNode = calcitePlanner.genLogicalPlan(selectStmt);
// create a table to represent the dest table
String[] dbTblName = dbDotTab.split("\\.");
Table destTable = new Table(Table.getEmptyTable(dbTblName[0], dbTblName[1]));
destTable.getSd().setCols(cols);
Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> insertOperationInfo = dmlHelper.createInsertOperationInfo(queryRelNode, destTable, Collections.emptyMap(), Collections.emptyList(), false);
CreateTableOperation createTableOperation = convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
return new CreateTableASOperation(createTableOperation, insertOperationInfo.f2, insertOperationInfo.f1, insertOperationInfo.f3);
default:
throw new ValidationException("Unrecognized command.");
}
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.
private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
boolean isCascade = false;
if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
isCascade = true;
}
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
CatalogTable oldTable = (CatalogTable) alteredTable;
// prepare properties
Map<String, String> props = new HashMap<>(oldTable.getOptions());
props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
if (isCascade) {
props.put(ALTER_COL_CASCADE, "true");
}
TableSchema oldSchema = oldTable.getSchema();
final int numPartCol = oldTable.getPartitionKeys().size();
TableSchema.Builder builder = TableSchema.builder();
// add existing non-part col if we're not replacing
if (!replace) {
List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
for (TableColumn column : nonPartCols) {
builder.add(column);
}
setWatermarkAndPK(builder, oldSchema);
}
// add new cols
for (FieldSchema col : newCols) {
builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
}
// add part cols
List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
for (TableColumn column : partCols) {
builder.add(column);
}
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertDropTable.
private Operation convertDropTable(HiveParserASTNode ast, TableType expectedType) {
String tableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
boolean ifExists = (ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null);
ObjectIdentifier identifier = parseObjectIdentifier(tableName);
CatalogBaseTable baseTable = getCatalogBaseTable(identifier, true);
if (expectedType == TableType.VIRTUAL_VIEW) {
if (baseTable instanceof CatalogTable) {
throw new ValidationException("DROP VIEW for a table is not allowed");
}
return new DropViewOperation(identifier, ifExists, false);
} else {
if (baseTable instanceof CatalogView) {
throw new ValidationException("DROP TABLE for a view is not allowed");
}
return new DropTableOperation(identifier, ifExists, false);
}
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateTable.
private CreateTableOperation convertCreateTable(String compoundName, boolean isExternal, boolean ifNotExists, boolean isTemporary, List<FieldSchema> cols, List<FieldSchema> partCols, String comment, String location, Map<String, String> tblProps, HiveParserRowFormatParams rowFormatParams, HiveParserStorageFormat storageFormat, List<PrimaryKey> primaryKeys, List<NotNullConstraint> notNullConstraints) {
Map<String, String> props = new HashMap<>();
if (tblProps != null) {
props.putAll(tblProps);
}
markHiveConnector(props);
// external
if (isExternal) {
props.put(TABLE_IS_EXTERNAL, "true");
}
// PK trait
UniqueConstraint uniqueConstraint = null;
if (primaryKeys != null && !primaryKeys.isEmpty()) {
PrimaryKey primaryKey = primaryKeys.get(0);
byte trait = 0;
if (primaryKey.isEnable()) {
trait = HiveDDLUtils.enableConstraint(trait);
}
if (primaryKey.isValidate()) {
trait = HiveDDLUtils.validateConstraint(trait);
}
if (primaryKey.isRely()) {
trait = HiveDDLUtils.relyConstraint(trait);
}
props.put(PK_CONSTRAINT_TRAIT, String.valueOf(trait));
List<String> pkCols = primaryKeys.stream().map(PrimaryKey::getPk).collect(Collectors.toList());
String constraintName = primaryKey.getConstraintName();
if (constraintName == null) {
constraintName = pkCols.stream().collect(Collectors.joining("_", "PK_", ""));
}
uniqueConstraint = UniqueConstraint.primaryKey(constraintName, pkCols);
}
// NOT NULL constraints
List<String> notNullCols = new ArrayList<>();
if (!notNullConstraints.isEmpty()) {
List<String> traits = new ArrayList<>();
for (NotNullConstraint notNull : notNullConstraints) {
byte trait = 0;
if (notNull.isEnable()) {
trait = HiveDDLUtils.enableConstraint(trait);
}
if (notNull.isValidate()) {
trait = HiveDDLUtils.validateConstraint(trait);
}
if (notNull.isRely()) {
trait = HiveDDLUtils.relyConstraint(trait);
}
traits.add(String.valueOf(trait));
notNullCols.add(notNull.getColName());
}
props.put(NOT_NULL_CONSTRAINT_TRAITS, String.join(COL_DELIMITER, traits));
props.put(NOT_NULL_COLS, String.join(COL_DELIMITER, notNullCols));
}
// row format
if (rowFormatParams != null) {
encodeRowFormat(rowFormatParams, props);
}
// storage format
if (storageFormat != null) {
encodeStorageFormat(storageFormat, props);
}
// location
if (location != null) {
props.put(TABLE_LOCATION_URI, location);
}
ObjectIdentifier identifier = parseObjectIdentifier(compoundName);
Set<String> notNullColSet = new HashSet<>(notNullCols);
if (uniqueConstraint != null) {
notNullColSet.addAll(uniqueConstraint.getColumns());
}
TableSchema tableSchema = HiveTableUtil.createTableSchema(cols, partCols, notNullColSet, uniqueConstraint);
return new CreateTableOperation(identifier, new CatalogTableImpl(tableSchema, HiveCatalog.getFieldNames(partCols), props, comment), ifNotExists, isTemporary);
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableRename.
private Operation convertAlterTableRename(String sourceName, HiveParserASTNode ast, boolean expectView) throws SemanticException {
String[] target = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String targetName = HiveParserBaseSemanticAnalyzer.getDotName(target);
ObjectIdentifier objectIdentifier = parseObjectIdentifier(sourceName);
return expectView ? new AlterViewRenameOperation(objectIdentifier, parseObjectIdentifier(targetName)) : new AlterTableRenameOperation(objectIdentifier, parseObjectIdentifier(targetName));
}
Aggregations