use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class ReplDumpTask method dumpConstraintMetadata.
private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
try {
Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
Hive db = getHive();
List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
serializer.writeTo(jsonWriter, null);
}
}
if (fks != null && !fks.isEmpty()) {
try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
serializer.writeTo(jsonWriter, null);
}
}
} catch (NoSuchObjectException e) {
// Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
// Just log a debug message and skip it.
LOG.debug(e.getMessage());
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class BaseSemanticAnalyzer method processForeignKeys.
/**
* Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list
* @param child Foreign Key token node
* @param foreignKeys SQLForeignKey list
* @throws SemanticException
*/
protected static void processForeignKeys(String databaseName, String tableName, ASTNode child, List<SQLForeignKey> foreignKeys) throws SemanticException {
// The ANTLR grammar looks like :
// 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
// KW_REFERENCES tabName=tableName parCols=columnParenthesesList
// enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
// -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
// when the user specifies the constraint name (i.e. child.getChildCount() == 7)
// 2. KW_FOREIGN KW_KEY fkCols=columnParenthesesList
// KW_REFERENCES tabName=tableName parCols=columnParenthesesList
// enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
// -> ^(TOK_FOREIGN_KEY $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
// when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
String constraintName = null;
boolean enable = true;
boolean validate = true;
boolean rely = false;
int fkIndex = -1;
for (int i = 0; i < child.getChildCount(); i++) {
ASTNode grandChild = (ASTNode) child.getChild(i);
int type = grandChild.getToken().getType();
if (type == HiveParser.TOK_CONSTRAINT_NAME) {
constraintName = unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase());
} else if (type == HiveParser.TOK_ENABLE) {
enable = true;
// validate is true by default if we enable the constraint
validate = true;
} else if (type == HiveParser.TOK_DISABLE) {
enable = false;
// validate is false by default if we disable the constraint
validate = false;
} else if (type == HiveParser.TOK_VALIDATE) {
validate = true;
} else if (type == HiveParser.TOK_NOVALIDATE) {
validate = false;
} else if (type == HiveParser.TOK_RELY) {
rely = true;
} else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) {
fkIndex = i;
}
}
if (enable) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " + "Please use DISABLE instead."));
}
if (validate) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " + "Please use NOVALIDATE instead."));
}
int ptIndex = fkIndex + 1;
int pkIndex = ptIndex + 1;
if (child.getChild(fkIndex).getChildCount() != child.getChild(pkIndex).getChildCount()) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
}
String[] parentDBTbl = getQualifiedTableName((ASTNode) child.getChild(ptIndex));
for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) {
SQLForeignKey sqlForeignKey = new SQLForeignKey();
sqlForeignKey.setFktable_db(databaseName);
sqlForeignKey.setFktable_name(tableName);
Tree fkgrandChild = child.getChild(fkIndex).getChild(j);
checkColumnName(fkgrandChild.getText());
sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
sqlForeignKey.setPktable_db(parentDBTbl[0]);
sqlForeignKey.setPktable_name(parentDBTbl[1]);
Tree pkgrandChild = child.getChild(pkIndex).getChild(j);
sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
sqlForeignKey.setKey_seq(j + 1);
sqlForeignKey.setFk_name(constraintName);
sqlForeignKey.setEnable_cstr(enable);
sqlForeignKey.setValidate_cstr(validate);
sqlForeignKey.setRely_cstr(rely);
foreignKeys.add(sqlForeignKey);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.
private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
String oldColName = ast.getChild(0).getText();
String newColName = ast.getChild(1).getText();
String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
ASTNode constraintChild = null;
int childCount = ast.getChildCount();
for (int i = 3; i < childCount; i++) {
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.StringLiteral:
newComment = unescapeSQLString(child.getText());
break;
case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = unescapeIdentifier(child.getChild(0).getText());
break;
case HiveParser.KW_FIRST:
first = true;
break;
case HiveParser.TOK_CASCADE:
isCascade = true;
break;
case HiveParser.TOK_RESTRICT:
break;
default:
constraintChild = (ASTNode) child;
}
}
List<SQLPrimaryKey> primaryKeys = null;
List<SQLForeignKey> foreignKeys = null;
List<SQLUniqueConstraint> uniqueConstraints = null;
List<SQLNotNullConstraint> notNullConstraints = null;
List<SQLDefaultConstraint> defaultConstraints = null;
List<SQLCheckConstraint> checkConstraints = null;
if (constraintChild != null) {
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
checkConstraints = new ArrayList<>();
processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
break;
case HiveParser.TOK_DEFAULT_VALUE:
defaultConstraints = new ArrayList<>();
processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
break;
case HiveParser.TOK_NOT_NULL:
notNullConstraints = new ArrayList<>();
processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
uniqueConstraints = new ArrayList<>();
processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
primaryKeys = new ArrayList<>();
processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
foreignKeys = new ArrayList<>();
processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
/* Validate the operation of renaming a column name. */
Table tab = getTable(qualified);
if (checkConstraints != null && !checkConstraints.isEmpty()) {
validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
}
if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
}
SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String tblName = getDotName(qualified);
AlterTableDesc alterTblDesc;
if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
} else {
alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
}
addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class DbNotificationListener method onAddForeignKey.
/**
* @param addForeignKeyEvent add foreign key event
* @throws MetaException
*/
@Override
public void onAddForeignKey(AddForeignKeyEvent addForeignKeyEvent) throws MetaException {
List<SQLForeignKey> cols = addForeignKeyEvent.getForeignKeyCols();
if (cols.size() > 0) {
AddForeignKeyMessage msg = MessageBuilder.getInstance().buildAddForeignKeyMessage(addForeignKeyEvent.getForeignKeyCols());
NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_FOREIGNKEY.toString(), msgEncoder.getSerializer().serialize(msg));
event.setCatName(cols.get(0).isSetCatName() ? cols.get(0).getCatName() : DEFAULT_CATALOG_NAME);
event.setDbName(cols.get(0).getPktable_db());
event.setTableName(cols.get(0).getPktable_name());
process(event, addForeignKeyEvent);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class Hive method getTableConstraints.
public TableConstraintsInfo getTableConstraints(String dbName, String tblName, boolean fetchReliable, boolean fetchEnabled, long tableId) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS);
try {
ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName);
AllTableConstraintsRequest request = new AllTableConstraintsRequest(dbName, tblName, getDefaultCatalog(conf));
request.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.writeToString() : null);
request.setTableId(tableId);
SQLAllTableConstraints tableConstraints = getMSC().getAllTableConstraints(request);
if (fetchReliable && tableConstraints != null) {
if (CollectionUtils.isNotEmpty(tableConstraints.getPrimaryKeys())) {
tableConstraints.setPrimaryKeys(tableConstraints.getPrimaryKeys().stream().filter(SQLPrimaryKey::isRely_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getForeignKeys())) {
tableConstraints.setForeignKeys(tableConstraints.getForeignKeys().stream().filter(SQLForeignKey::isRely_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getUniqueConstraints())) {
tableConstraints.setUniqueConstraints(tableConstraints.getUniqueConstraints().stream().filter(SQLUniqueConstraint::isRely_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getNotNullConstraints())) {
tableConstraints.setNotNullConstraints(tableConstraints.getNotNullConstraints().stream().filter(SQLNotNullConstraint::isRely_cstr).collect(Collectors.toList()));
}
}
if (fetchEnabled && tableConstraints != null) {
if (CollectionUtils.isNotEmpty(tableConstraints.getCheckConstraints())) {
tableConstraints.setCheckConstraints(tableConstraints.getCheckConstraints().stream().filter(SQLCheckConstraint::isEnable_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getDefaultConstraints())) {
tableConstraints.setDefaultConstraints(tableConstraints.getDefaultConstraints().stream().filter(SQLDefaultConstraint::isEnable_cstr).collect(Collectors.toList()));
}
}
return new TableConstraintsInfo(new PrimaryKeyInfo(tableConstraints.getPrimaryKeys(), tblName, dbName), new ForeignKeyInfo(tableConstraints.getForeignKeys(), tblName, dbName), new UniqueConstraint(tableConstraints.getUniqueConstraints(), tblName, dbName), new DefaultConstraint(tableConstraints.getDefaultConstraints(), tblName, dbName), new CheckConstraint(tableConstraints.getCheckConstraints()), new NotNullConstraint(tableConstraints.getNotNullConstraints(), tblName, dbName));
} catch (Exception e) {
throw new HiveException(e);
} finally {
perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS, "HS2-cache");
}
}
Aggregations