Search in sources :

Example 21 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class BaseSemanticAnalyzer method getColumns.

/**
 * Get the list of FieldSchema out of the ASTNode.
 * Additionally, populate the primaryKeys and foreignKeys if any.
 */
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws SemanticException {
    List<FieldSchema> colList = new ArrayList<FieldSchema>();
    Tree parent = ast.getParent();
    for (int i = 0; i < ast.getChildCount(); i++) {
        FieldSchema col = new FieldSchema();
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.TOK_UNIQUE:
                {
                    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], child, uniqueConstraints);
                }
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                {
                    if (!primaryKeys.isEmpty()) {
                        throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                    }
                    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys);
                }
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                {
                    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys);
                }
                break;
            default:
                Tree grandChild = child.getChild(0);
                if (grandChild != null) {
                    String name = grandChild.getText();
                    if (lowerCase) {
                        name = name.toLowerCase();
                    }
                    checkColumnName(name);
                    // child 0 is the name of the column
                    col.setName(unescapeIdentifier(name));
                    // child 1 is the type of the column
                    ASTNode typeChild = (ASTNode) (child.getChild(1));
                    col.setType(getTypeStringFromAST(typeChild));
                    // child 2 is the optional comment of the column
                    // child 3 is the optional constraint
                    ASTNode constraintChild = null;
                    if (child.getChildCount() == 4) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                        constraintChild = (ASTNode) child.getChild(3);
                    } else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                    } else if (child.getChildCount() == 3) {
                        constraintChild = (ASTNode) child.getChild(2);
                    }
                    if (constraintChild != null) {
                        String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                        // Process column constraint
                        switch(constraintChild.getToken().getType()) {
                            case HiveParser.TOK_CHECK_CONSTRAINT:
                                processCheckConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
                                break;
                            case HiveParser.TOK_DEFAULT_VALUE:
                                processDefaultConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild);
                                break;
                            case HiveParser.TOK_NOT_NULL:
                                processNotNullConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
                                break;
                            case HiveParser.TOK_UNIQUE:
                                processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
                                break;
                            case HiveParser.TOK_PRIMARY_KEY:
                                if (!primaryKeys.isEmpty()) {
                                    throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                                }
                                processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), primaryKeys);
                                break;
                            case HiveParser.TOK_FOREIGN_KEY:
                                processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, foreignKeys);
                                break;
                            default:
                                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
                        }
                    }
                }
                colList.add(col);
                break;
        }
    }
    return colList;
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 22 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.

private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
    ASTNode constraintChild = null;
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.StringLiteral:
                newComment = unescapeSQLString(child.getText());
                break;
            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveParser.KW_FIRST:
                first = true;
                break;
            case HiveParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveParser.TOK_RESTRICT:
                break;
            default:
                constraintChild = (ASTNode) child;
        }
    }
    List<SQLPrimaryKey> primaryKeys = null;
    List<SQLForeignKey> foreignKeys = null;
    List<SQLUniqueConstraint> uniqueConstraints = null;
    List<SQLNotNullConstraint> notNullConstraints = null;
    List<SQLDefaultConstraint> defaultConstraints = null;
    List<SQLCheckConstraint> checkConstraints = null;
    if (constraintChild != null) {
        // Process column constraint
        switch(constraintChild.getToken().getType()) {
            case HiveParser.TOK_CHECK_CONSTRAINT:
                checkConstraints = new ArrayList<>();
                processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
                break;
            case HiveParser.TOK_DEFAULT_VALUE:
                defaultConstraints = new ArrayList<>();
                processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
                break;
            case HiveParser.TOK_NOT_NULL:
                notNullConstraints = new ArrayList<>();
                processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
                break;
            case HiveParser.TOK_UNIQUE:
                uniqueConstraints = new ArrayList<>();
                processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                primaryKeys = new ArrayList<>();
                processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                foreignKeys = new ArrayList<>();
                processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
                break;
            default:
                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
        }
    }
    /* Validate the operation of renaming a column name. */
    Table tab = getTable(qualified);
    if (checkConstraints != null && !checkConstraints.isEmpty()) {
        validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
    }
    if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
    }
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = getDotName(qualified);
    AlterTableDesc alterTblDesc;
    if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
    } else {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
    }
    addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 23 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class AddUniqueConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
    List<SQLUniqueConstraint> uks;
    try {
        uks = msg.getUniqueConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (uks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
    final String actualTblName = uks.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLUniqueConstraint uk : uks) {
        uk.setTable_db(actualDbName);
        uk.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, null, uks, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AddUniqueConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 24 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class BaseSemanticAnalyzer method getColumns.

/**
 * Get the list of FieldSchema out of the ASTNode.
 * Additionally, populate the primaryKeys and foreignKeys if any.
 */
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints, Configuration conf) throws SemanticException {
    List<FieldSchema> colList = new ArrayList<FieldSchema>();
    Tree parent = ast.getParent();
    for (int i = 0; i < ast.getChildCount(); i++) {
        FieldSchema col = new FieldSchema();
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.TOK_UNIQUE:
                {
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
                    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
                    // the user specified a catalog
                    ConstraintsUtils.processUniqueConstraints(tName, child, uniqueConstraints);
                }
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                {
                    if (!primaryKeys.isEmpty()) {
                        throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                    }
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    ConstraintsUtils.processPrimaryKeys(tName, child, primaryKeys);
                }
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                {
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    ConstraintsUtils.processForeignKeys(tName, child, foreignKeys);
                }
                break;
            case HiveParser.TOK_CHECK_CONSTRAINT:
                {
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
                    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
                    // the user specified a catalog
                    ConstraintsUtils.processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream);
                }
                break;
            default:
                Tree grandChild = child.getChild(0);
                if (grandChild != null) {
                    String name = grandChild.getText();
                    if (lowerCase) {
                        name = name.toLowerCase();
                    }
                    checkColumnName(name);
                    // child 0 is the name of the column
                    col.setName(unescapeIdentifier(name));
                    // child 1 is the type of the column
                    ASTNode typeChild = (ASTNode) (child.getChild(1));
                    col.setType(getTypeStringFromAST(typeChild));
                    // child 2 is the optional comment of the column
                    // child 3 is the optional constraint
                    ASTNode constraintChild = null;
                    if (child.getChildCount() == 4) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                        constraintChild = (ASTNode) child.getChild(3);
                    } else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                    } else if (child.getChildCount() == 3) {
                        constraintChild = (ASTNode) child.getChild(2);
                    }
                    if (constraintChild != null) {
                        final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
                        // Process column constraint
                        switch(constraintChild.getToken().getType()) {
                            case HiveParser.TOK_CHECK_CONSTRAINT:
                                ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
                                break;
                            case HiveParser.TOK_DEFAULT_VALUE:
                                ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream);
                                break;
                            case HiveParser.TOK_NOT_NULL:
                                ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
                                break;
                            case HiveParser.TOK_UNIQUE:
                                ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
                                break;
                            case HiveParser.TOK_PRIMARY_KEY:
                                if (!primaryKeys.isEmpty()) {
                                    throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                                }
                                ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys);
                                break;
                            case HiveParser.TOK_FOREIGN_KEY:
                                ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys);
                                break;
                            default:
                                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
                        }
                    }
                }
                colList.add(col);
                break;
        }
    }
    return colList;
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 25 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class Hive method getTableConstraints.

public TableConstraintsInfo getTableConstraints(String dbName, String tblName, boolean fetchReliable, boolean fetchEnabled, long tableId) throws HiveException {
    PerfLogger perfLogger = SessionState.getPerfLogger();
    perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS);
    try {
        ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName);
        AllTableConstraintsRequest request = new AllTableConstraintsRequest(dbName, tblName, getDefaultCatalog(conf));
        request.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.writeToString() : null);
        request.setTableId(tableId);
        SQLAllTableConstraints tableConstraints = getMSC().getAllTableConstraints(request);
        if (fetchReliable && tableConstraints != null) {
            if (CollectionUtils.isNotEmpty(tableConstraints.getPrimaryKeys())) {
                tableConstraints.setPrimaryKeys(tableConstraints.getPrimaryKeys().stream().filter(SQLPrimaryKey::isRely_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getForeignKeys())) {
                tableConstraints.setForeignKeys(tableConstraints.getForeignKeys().stream().filter(SQLForeignKey::isRely_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getUniqueConstraints())) {
                tableConstraints.setUniqueConstraints(tableConstraints.getUniqueConstraints().stream().filter(SQLUniqueConstraint::isRely_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getNotNullConstraints())) {
                tableConstraints.setNotNullConstraints(tableConstraints.getNotNullConstraints().stream().filter(SQLNotNullConstraint::isRely_cstr).collect(Collectors.toList()));
            }
        }
        if (fetchEnabled && tableConstraints != null) {
            if (CollectionUtils.isNotEmpty(tableConstraints.getCheckConstraints())) {
                tableConstraints.setCheckConstraints(tableConstraints.getCheckConstraints().stream().filter(SQLCheckConstraint::isEnable_cstr).collect(Collectors.toList()));
            }
            if (CollectionUtils.isNotEmpty(tableConstraints.getDefaultConstraints())) {
                tableConstraints.setDefaultConstraints(tableConstraints.getDefaultConstraints().stream().filter(SQLDefaultConstraint::isEnable_cstr).collect(Collectors.toList()));
            }
        }
        return new TableConstraintsInfo(new PrimaryKeyInfo(tableConstraints.getPrimaryKeys(), tblName, dbName), new ForeignKeyInfo(tableConstraints.getForeignKeys(), tblName, dbName), new UniqueConstraint(tableConstraints.getUniqueConstraints(), tblName, dbName), new DefaultConstraint(tableConstraints.getDefaultConstraints(), tblName, dbName), new CheckConstraint(tableConstraints.getCheckConstraints()), new NotNullConstraint(tableConstraints.getNotNullConstraints(), tblName, dbName));
    } catch (Exception e) {
        throw new HiveException(e);
    } finally {
        perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS, "HS2-cache");
    }
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) PerfLogger(org.apache.hadoop.hive.ql.log.PerfLogger) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TApplicationException(org.apache.thrift.TApplicationException) TException(org.apache.thrift.TException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FileNotFoundException(java.io.FileNotFoundException) JDODataStoreException(javax.jdo.JDODataStoreException) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints) ValidWriteIdList(org.apache.hadoop.hive.common.ValidWriteIdList) AllTableConstraintsRequest(org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Aggregations

SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)32 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)16 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)15 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)15 Test (org.junit.Test)14 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)13 ArrayList (java.util.ArrayList)11 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)11 UniqueConstraintsRequest (org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest)11 SQLUniqueConstraintBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder)10 Table (org.apache.hadoop.hive.metastore.api.Table)7 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)4 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)3 SQLDefaultConstraintBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLDefaultConstraintBuilder)3 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)3 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)3 AddUniqueConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage)3 Table (org.apache.hadoop.hive.ql.metadata.Table)3