Search in sources :

Example 6 with SQLNotNullConstraint

use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.

the class DDLTask method createTable.

/**
 * Create a new table.
 *
 * @param db
 *          The database in question.
 * @param crtTbl
 *          This is the table we're creating.
 * @return Returns 0 when execution succeeds and above 0 if it fails.
 * @throws HiveException
 *           Throws this exception if an unexpected error occurs.
 */
private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException {
    // create the table
    Table tbl = crtTbl.toTable(conf);
    List<SQLPrimaryKey> primaryKeys = crtTbl.getPrimaryKeys();
    List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys();
    List<SQLUniqueConstraint> uniqueConstraints = crtTbl.getUniqueConstraints();
    List<SQLNotNullConstraint> notNullConstraints = crtTbl.getNotNullConstraints();
    List<SQLDefaultConstraint> defaultConstraints = crtTbl.getDefaultConstraints();
    List<SQLCheckConstraint> checkConstraints = crtTbl.getCheckConstraints();
    LOG.debug("creating table {} on {}", tbl.getFullyQualifiedName(), tbl.getDataLocation());
    if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())) {
        // if this is a replication spec, then replace-mode semantics might apply.
        // if we're already asking for a table replacement, then we can skip this check.
        // however, otherwise, if in replication scope, and we've not been explicitly asked
        // to replace, we should check if the object we're looking at exists, and if so,
        // trigger replace-mode semantics.
        Table existingTable = db.getTable(tbl.getDbName(), tbl.getTableName(), false);
        if (existingTable != null) {
            if (crtTbl.getReplicationSpec().allowEventReplacementInto(existingTable.getParameters())) {
                // we replace existing table.
                crtTbl.setReplaceMode(true);
            } else {
                LOG.debug("DDLTask: Create Table is skipped as table {} is newer than update", crtTbl.getTableName());
                // no replacement, the existing table state is newer than our update.
                return 0;
            }
        }
    }
    // create the table
    if (crtTbl.getReplaceMode()) {
        // replace-mode creates are really alters using CreateTableDesc.
        db.alterTable(tbl, null);
    } else {
        if ((foreignKeys != null && foreignKeys.size() > 0) || (primaryKeys != null && primaryKeys.size() > 0) || (uniqueConstraints != null && uniqueConstraints.size() > 0) || (notNullConstraints != null && notNullConstraints.size() > 0) || (checkConstraints != null && checkConstraints.size() > 0) || defaultConstraints != null && defaultConstraints.size() > 0) {
            db.createTable(tbl, crtTbl.getIfNotExists(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
        } else {
            db.createTable(tbl, crtTbl.getIfNotExists());
        }
        Long mmWriteId = crtTbl.getInitialMmWriteId();
        if (crtTbl.isCTAS() || mmWriteId != null) {
            Table createdTable = db.getTable(tbl.getDbName(), tbl.getTableName());
            if (crtTbl.isCTAS()) {
                DataContainer dc = new DataContainer(createdTable.getTTable());
                queryState.getLineageState().setLineage(createdTable.getPath(), dc, createdTable.getCols());
            }
        }
    }
    addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
    return 0;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) TextMetaDataTable(org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) DataContainer(org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 7 with SQLNotNullConstraint

use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.

the class DbNotificationListener method onAddNotNullConstraint.

/**
 * @param addNotNullConstraintEvent add not null constraint event
 * @throws MetaException
 */
@Override
public void onAddNotNullConstraint(AddNotNullConstraintEvent addNotNullConstraintEvent) throws MetaException {
    List<SQLNotNullConstraint> cols = addNotNullConstraintEvent.getNotNullConstraintCols();
    if (cols.size() > 0) {
        NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_NOTNULLCONSTRAINT.toString(), msgFactory.buildAddNotNullConstraintMessage(addNotNullConstraintEvent.getNotNullConstraintCols()).toString());
        event.setDbName(cols.get(0).getTable_db());
        event.setTableName(cols.get(0).getTable_name());
        process(event, addNotNullConstraintEvent);
    }
}
Also used : SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent)

Example 8 with SQLNotNullConstraint

use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.

the class ReplDumpTask method dumpConstraintMetadata.

private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
    try {
        Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
        Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
        Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
        Hive db = getHive();
        List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
        List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
        List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
        List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
        if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
            try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
        if (fks != null && !fks.isEmpty()) {
            try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
    } catch (NoSuchObjectException e) {
        // Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
        // Just log a debug message and skip it.
        LOG.debug(e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) JsonWriter(org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) Hive(org.apache.hadoop.hive.ql.metadata.Hive) ConstraintsSerializer(org.apache.hadoop.hive.ql.parse.repl.dump.io.ConstraintsSerializer) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 9 with SQLNotNullConstraint

use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.

the class BaseSemanticAnalyzer method getColumns.

/**
 * Get the list of FieldSchema out of the ASTNode.
 * Additionally, populate the primaryKeys and foreignKeys if any.
 */
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws SemanticException {
    List<FieldSchema> colList = new ArrayList<FieldSchema>();
    Tree parent = ast.getParent();
    for (int i = 0; i < ast.getChildCount(); i++) {
        FieldSchema col = new FieldSchema();
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.TOK_UNIQUE:
                {
                    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], child, uniqueConstraints);
                }
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                {
                    if (!primaryKeys.isEmpty()) {
                        throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                    }
                    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys);
                }
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                {
                    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys);
                }
                break;
            default:
                Tree grandChild = child.getChild(0);
                if (grandChild != null) {
                    String name = grandChild.getText();
                    if (lowerCase) {
                        name = name.toLowerCase();
                    }
                    checkColumnName(name);
                    // child 0 is the name of the column
                    col.setName(unescapeIdentifier(name));
                    // child 1 is the type of the column
                    ASTNode typeChild = (ASTNode) (child.getChild(1));
                    col.setType(getTypeStringFromAST(typeChild));
                    // child 2 is the optional comment of the column
                    // child 3 is the optional constraint
                    ASTNode constraintChild = null;
                    if (child.getChildCount() == 4) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                        constraintChild = (ASTNode) child.getChild(3);
                    } else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                    } else if (child.getChildCount() == 3) {
                        constraintChild = (ASTNode) child.getChild(2);
                    }
                    if (constraintChild != null) {
                        String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
                        // Process column constraint
                        switch(constraintChild.getToken().getType()) {
                            case HiveParser.TOK_CHECK_CONSTRAINT:
                                processCheckConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
                                break;
                            case HiveParser.TOK_DEFAULT_VALUE:
                                processDefaultConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild);
                                break;
                            case HiveParser.TOK_NOT_NULL:
                                processNotNullConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
                                break;
                            case HiveParser.TOK_UNIQUE:
                                processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
                                break;
                            case HiveParser.TOK_PRIMARY_KEY:
                                if (!primaryKeys.isEmpty()) {
                                    throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                                }
                                processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), primaryKeys);
                                break;
                            case HiveParser.TOK_FOREIGN_KEY:
                                processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, foreignKeys);
                                break;
                            default:
                                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
                        }
                    }
                }
                colList.add(col);
                break;
        }
    }
    return colList;
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 10 with SQLNotNullConstraint

use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.

private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
    ASTNode constraintChild = null;
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.StringLiteral:
                newComment = unescapeSQLString(child.getText());
                break;
            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveParser.KW_FIRST:
                first = true;
                break;
            case HiveParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveParser.TOK_RESTRICT:
                break;
            default:
                constraintChild = (ASTNode) child;
        }
    }
    List<SQLPrimaryKey> primaryKeys = null;
    List<SQLForeignKey> foreignKeys = null;
    List<SQLUniqueConstraint> uniqueConstraints = null;
    List<SQLNotNullConstraint> notNullConstraints = null;
    List<SQLDefaultConstraint> defaultConstraints = null;
    List<SQLCheckConstraint> checkConstraints = null;
    if (constraintChild != null) {
        // Process column constraint
        switch(constraintChild.getToken().getType()) {
            case HiveParser.TOK_CHECK_CONSTRAINT:
                checkConstraints = new ArrayList<>();
                processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
                break;
            case HiveParser.TOK_DEFAULT_VALUE:
                defaultConstraints = new ArrayList<>();
                processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
                break;
            case HiveParser.TOK_NOT_NULL:
                notNullConstraints = new ArrayList<>();
                processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
                break;
            case HiveParser.TOK_UNIQUE:
                uniqueConstraints = new ArrayList<>();
                processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                primaryKeys = new ArrayList<>();
                processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                foreignKeys = new ArrayList<>();
                processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
                break;
            default:
                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
        }
    }
    /* Validate the operation of renaming a column name. */
    Table tab = getTable(qualified);
    if (checkConstraints != null && !checkConstraints.isEmpty()) {
        validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
    }
    if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
    }
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = getDotName(qualified);
    AlterTableDesc alterTblDesc;
    if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
    } else {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
    }
    addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Aggregations

SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)11 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)10 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)8 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)8 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)6 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)6 ArrayList (java.util.ArrayList)5 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)3 Table (org.apache.hadoop.hive.ql.metadata.Table)3 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)3 Query (javax.jdo.Query)2 Path (org.apache.hadoop.fs.Path)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)2 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)2 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 Serializable (java.io.Serializable)1 Collection (java.util.Collection)1 HashMap (java.util.HashMap)1