Search in sources :

Example 1 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AlterTableAddConstraintAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
    // the user specified a catalog
    List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
    List<SQLForeignKey> foreignKeys = new ArrayList<>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    List<SQLCheckConstraint> checkConstraints = new ArrayList<>();
    ASTNode constraintNode = (ASTNode) command.getChild(0);
    switch(constraintNode.getToken().getType()) {
        case HiveParser.TOK_UNIQUE:
            ConstraintsUtils.processUniqueConstraints(tableName, constraintNode, uniqueConstraints);
            break;
        case HiveParser.TOK_PRIMARY_KEY:
            ConstraintsUtils.processPrimaryKeys(tableName, constraintNode, primaryKeys);
            break;
        case HiveParser.TOK_FOREIGN_KEY:
            ConstraintsUtils.processForeignKeys(tableName, constraintNode, foreignKeys);
            break;
        case HiveParser.TOK_CHECK_CONSTRAINT:
            ConstraintsUtils.processCheckConstraints(tableName, constraintNode, null, checkConstraints, command, ctx.getTokenRewriteStream());
            break;
        default:
            throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintNode.getToken().getText()));
    }
    Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, checkConstraints);
    AlterTableAddConstraintDesc desc = new AlterTableAddConstraintDesc(tableName, null, constraints);
    Table table = getTable(tableName);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AlterTableChangeColumnAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
    String oldColumnName = command.getChild(0).getText().toLowerCase();
    String newColumnName = command.getChild(1).getText().toLowerCase();
    String newType = getTypeStringFromAST((ASTNode) command.getChild(2));
    Table table = getTable(tableName);
    SkewedInfo skewInfo = table.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColumnName)) {
        throw new SemanticException(oldColumnName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    ASTNode constraintChild = null;
    for (int i = 3; i < command.getChildCount(); i++) {
        ASTNode child = (ASTNode) command.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.StringLiteral:
                newComment = unescapeSQLString(child.getText());
                break;
            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveParser.KW_FIRST:
                first = true;
                break;
            case HiveParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveParser.TOK_RESTRICT:
                break;
            default:
                constraintChild = child;
        }
    }
    Constraints constraints = getConstraints(tableName, command, newColumnName, table, constraintChild);
    AlterTableChangeColumnDesc desc = new AlterTableChangeColumnDesc(tableName, partitionSpec, isCascade, constraints, unescapeIdentifier(oldColumnName), unescapeIdentifier(newColumnName), newType, newComment, first, flagCol);
    if (AcidUtils.isTransactionalTable(table)) {
        // Note: we might actually need it only when certain changes (e.g. name or type?) are made.
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 3 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AddNotNullConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddNotNullConstraintMessage msg = deserializer.getAddNotNullConstraintMessage(context.dmd.getPayload());
    List<SQLNotNullConstraint> nns;
    try {
        nns = msg.getNotNullConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (nns.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName;
    final String actualTblName = nns.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLNotNullConstraint nn : nns) {
        nn.setTable_db(actualDbName);
        nn.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, nns, null, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) AddNotNullConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 4 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AddDefaultConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddDefaultConstraintMessage msg = deserializer.getAddDefaultConstraintMessage(context.dmd.getPayload());
    List<SQLDefaultConstraint> dcs;
    try {
        dcs = msg.getDefaultConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (dcs.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? dcs.get(0).getTable_db() : context.dbName;
    final String actualTblName = dcs.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLDefaultConstraint dc : dcs) {
        dc.setTable_db(actualDbName);
        dc.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, null, null, dcs, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) AddDefaultConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddDefaultConstraintMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 5 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AddPrimaryKeyHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
    List<SQLPrimaryKey> pks;
    try {
        pks = msg.getPrimaryKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (pks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
    final String actualTblName = pks.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLPrimaryKey pk : pks) {
        pk.setTable_db(actualDbName);
        pk.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(pks, null, null, null, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : AddPrimaryKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)8 Constraints (org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints)8 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)8 ArrayList (java.util.ArrayList)7 TableName (org.apache.hadoop.hive.common.TableName)6 AlterTableAddConstraintDesc (org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc)6 Task (org.apache.hadoop.hive.ql.exec.Task)6 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)3 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)3 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)2 Table (org.apache.hadoop.hive.ql.metadata.Table)2 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)2 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)1 AddCheckConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddCheckConstraintMessage)1 AddDefaultConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddDefaultConstraintMessage)1 AddForeignKeyMessage (org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage)1 AddNotNullConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage)1