Search in sources :

Example 31 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class ReplDumpTask method dumpConstraintMetadata.

private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
    try {
        Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
        Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
        Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
        Hive db = getHive();
        List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
        List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
        List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
        List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
        if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
            try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
        if (fks != null && !fks.isEmpty()) {
            try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
    } catch (NoSuchObjectException e) {
        // Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
        // Just log a debug message and skip it.
        LOG.debug(e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) JsonWriter(org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) Hive(org.apache.hadoop.hive.ql.metadata.Hive) ConstraintsSerializer(org.apache.hadoop.hive.ql.parse.repl.dump.io.ConstraintsSerializer) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 32 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class BaseSemanticAnalyzer method processForeignKeys.

/**
 * Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list
 * @param child Foreign Key token node
 * @param foreignKeys SQLForeignKey list
 * @throws SemanticException
 */
protected static void processForeignKeys(String databaseName, String tableName, ASTNode child, List<SQLForeignKey> foreignKeys) throws SemanticException {
    // The ANTLR grammar looks like :
    // 1.  KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user specifies the constraint name (i.e. child.getChildCount() == 7)
    // 2.  KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $fkCols  $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
    String constraintName = null;
    boolean enable = true;
    boolean validate = true;
    boolean rely = false;
    int fkIndex = -1;
    for (int i = 0; i < child.getChildCount(); i++) {
        ASTNode grandChild = (ASTNode) child.getChild(i);
        int type = grandChild.getToken().getType();
        if (type == HiveParser.TOK_CONSTRAINT_NAME) {
            constraintName = unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase());
        } else if (type == HiveParser.TOK_ENABLE) {
            enable = true;
            // validate is true by default if we enable the constraint
            validate = true;
        } else if (type == HiveParser.TOK_DISABLE) {
            enable = false;
            // validate is false by default if we disable the constraint
            validate = false;
        } else if (type == HiveParser.TOK_VALIDATE) {
            validate = true;
        } else if (type == HiveParser.TOK_NOVALIDATE) {
            validate = false;
        } else if (type == HiveParser.TOK_RELY) {
            rely = true;
        } else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) {
            fkIndex = i;
        }
    }
    if (enable) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " + "Please use DISABLE instead."));
    }
    if (validate) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " + "Please use NOVALIDATE instead."));
    }
    int ptIndex = fkIndex + 1;
    int pkIndex = ptIndex + 1;
    if (child.getChild(fkIndex).getChildCount() != child.getChild(pkIndex).getChildCount()) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
    }
    String[] parentDBTbl = getQualifiedTableName((ASTNode) child.getChild(ptIndex));
    for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) {
        SQLForeignKey sqlForeignKey = new SQLForeignKey();
        sqlForeignKey.setFktable_db(databaseName);
        sqlForeignKey.setFktable_name(tableName);
        Tree fkgrandChild = child.getChild(fkIndex).getChild(j);
        checkColumnName(fkgrandChild.getText());
        sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setPktable_db(parentDBTbl[0]);
        sqlForeignKey.setPktable_name(parentDBTbl[1]);
        Tree pkgrandChild = child.getChild(pkIndex).getChild(j);
        sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setKey_seq(j + 1);
        sqlForeignKey.setFk_name(constraintName);
        sqlForeignKey.setEnable_cstr(enable);
        sqlForeignKey.setValidate_cstr(validate);
        sqlForeignKey.setRely_cstr(rely);
        foreignKeys.add(sqlForeignKey);
    }
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 33 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.

private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
    ASTNode constraintChild = null;
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.StringLiteral:
                newComment = unescapeSQLString(child.getText());
                break;
            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveParser.KW_FIRST:
                first = true;
                break;
            case HiveParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveParser.TOK_RESTRICT:
                break;
            default:
                constraintChild = (ASTNode) child;
        }
    }
    List<SQLPrimaryKey> primaryKeys = null;
    List<SQLForeignKey> foreignKeys = null;
    List<SQLUniqueConstraint> uniqueConstraints = null;
    List<SQLNotNullConstraint> notNullConstraints = null;
    List<SQLDefaultConstraint> defaultConstraints = null;
    List<SQLCheckConstraint> checkConstraints = null;
    if (constraintChild != null) {
        // Process column constraint
        switch(constraintChild.getToken().getType()) {
            case HiveParser.TOK_CHECK_CONSTRAINT:
                checkConstraints = new ArrayList<>();
                processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
                break;
            case HiveParser.TOK_DEFAULT_VALUE:
                defaultConstraints = new ArrayList<>();
                processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
                break;
            case HiveParser.TOK_NOT_NULL:
                notNullConstraints = new ArrayList<>();
                processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
                break;
            case HiveParser.TOK_UNIQUE:
                uniqueConstraints = new ArrayList<>();
                processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                primaryKeys = new ArrayList<>();
                processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                foreignKeys = new ArrayList<>();
                processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
                break;
            default:
                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
        }
    }
    /* Validate the operation of renaming a column name. */
    Table tab = getTable(qualified);
    if (checkConstraints != null && !checkConstraints.isEmpty()) {
        validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
    }
    if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
    }
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = getDotName(qualified);
    AlterTableDesc alterTblDesc;
    if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
    } else {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
    }
    addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 34 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class DbNotificationListener method onAddForeignKey.

/**
 * @param addForeignKeyEvent add foreign key event
 * @throws MetaException
 */
@Override
public void onAddForeignKey(AddForeignKeyEvent addForeignKeyEvent) throws MetaException {
    List<SQLForeignKey> cols = addForeignKeyEvent.getForeignKeyCols();
    if (cols.size() > 0) {
        AddForeignKeyMessage msg = MessageBuilder.getInstance().buildAddForeignKeyMessage(addForeignKeyEvent.getForeignKeyCols());
        NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_FOREIGNKEY.toString(), msgEncoder.getSerializer().serialize(msg));
        event.setCatName(cols.get(0).isSetCatName() ? cols.get(0).getCatName() : DEFAULT_CATALOG_NAME);
        event.setDbName(cols.get(0).getPktable_db());
        event.setTableName(cols.get(0).getPktable_name());
        process(event, addForeignKeyEvent);
    }
}
Also used : AddForeignKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent)

Example 35 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class Hive method getTableConstraints.

public TableConstraintsInfo getTableConstraints(String dbName, String tblName, boolean fetchReliable, boolean fetchEnabled, long tableId) throws HiveException {
    PerfLogger perfLogger = SessionState.getPerfLogger();
    perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS);
    try {
        ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName);
        AllTableConstraintsRequest request = new AllTableConstraintsRequest(dbName, tblName, getDefaultCatalog(conf));
        request.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.writeToString() : null);
        request.setTableId(tableId);
        SQLAllTableConstraints tableConstraints = getMSC().getAllTableConstraints(request);
        if (fetchReliable && tableConstraints != null) {
            if (CollectionUtils.isNotEmpty(tableConstraints.getPrimaryKeys())) {
                tableConstraints.setPrimaryKeys(tableConstraints.getPrimaryKeys().stream().filter(SQLPrimaryKey::isRely_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getForeignKeys())) {
                tableConstraints.setForeignKeys(tableConstraints.getForeignKeys().stream().filter(SQLForeignKey::isRely_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getUniqueConstraints())) {
                tableConstraints.setUniqueConstraints(tableConstraints.getUniqueConstraints().stream().filter(SQLUniqueConstraint::isRely_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getNotNullConstraints())) {
                tableConstraints.setNotNullConstraints(tableConstraints.getNotNullConstraints().stream().filter(SQLNotNullConstraint::isRely_cstr).collect(Collectors.toList()));
            }
        }
        if (fetchEnabled && tableConstraints != null) {
            if (CollectionUtils.isNotEmpty(tableConstraints.getCheckConstraints())) {
                tableConstraints.setCheckConstraints(tableConstraints.getCheckConstraints().stream().filter(SQLCheckConstraint::isEnable_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getDefaultConstraints())) {
                tableConstraints.setDefaultConstraints(tableConstraints.getDefaultConstraints().stream().filter(SQLDefaultConstraint::isEnable_cstr).collect(Collectors.toList()));
            }
        }
        return new TableConstraintsInfo(new PrimaryKeyInfo(tableConstraints.getPrimaryKeys(), tblName, dbName), new ForeignKeyInfo(tableConstraints.getForeignKeys(), tblName, dbName), new UniqueConstraint(tableConstraints.getUniqueConstraints(), tblName, dbName), new DefaultConstraint(tableConstraints.getDefaultConstraints(), tblName, dbName), new CheckConstraint(tableConstraints.getCheckConstraints()), new NotNullConstraint(tableConstraints.getNotNullConstraints(), tblName, dbName));
    } catch (Exception e) {
        throw new HiveException(e);
    } finally {
        perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS, "HS2-cache");
    }
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) PerfLogger(org.apache.hadoop.hive.ql.log.PerfLogger) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TApplicationException(org.apache.thrift.TApplicationException) TException(org.apache.thrift.TException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FileNotFoundException(java.io.FileNotFoundException) JDODataStoreException(javax.jdo.JDODataStoreException) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints) ValidWriteIdList(org.apache.hadoop.hive.common.ValidWriteIdList) AllTableConstraintsRequest(org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Aggregations

SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)46 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)28 Test (org.junit.Test)20 Table (org.apache.hadoop.hive.metastore.api.Table)19 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)15 ArrayList (java.util.ArrayList)13 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)13 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)13 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)13 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)12 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 TApplicationException (org.apache.thrift.TApplicationException)5 Tree (org.antlr.runtime.tree.Tree)3