Search in sources :

Example 6 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AddUniqueConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
    List<SQLUniqueConstraint> uks;
    try {
        uks = msg.getUniqueConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (uks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
    final String actualTblName = uks.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLUniqueConstraint uk : uks) {
        uk.setTable_db(actualDbName);
        uk.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, null, uks, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AddUniqueConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 7 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AddCheckConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddCheckConstraintMessage msg = deserializer.getAddCheckConstraintMessage(context.dmd.getPayload());
    List<SQLCheckConstraint> ccs;
    try {
        ccs = msg.getCheckConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (ccs.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? ccs.get(0).getTable_db() : context.dbName;
    final String actualTblName = ccs.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLCheckConstraint ck : ccs) {
        ck.setTable_db(actualDbName);
        ck.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, null, null, null, ccs);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) AddCheckConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddCheckConstraintMessage) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 8 with Constraints

use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.

the class AddForeignKeyHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
    List<SQLForeignKey> fks;
    try {
        fks = msg.getForeignKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (fks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
    final String actualTblName = fks.get(0).getFktable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLForeignKey fk : fks) {
        // Otherwise, keep db name
        if (fk.getPktable_db().equals(fk.getFktable_db())) {
            fk.setPktable_db(actualDbName);
        }
        fk.setFktable_db(actualDbName);
        fk.setFktable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, fks, null, null, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : AddForeignKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage) Task(org.apache.hadoop.hive.ql.exec.Task) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)8 Constraints (org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints)8 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)8 ArrayList (java.util.ArrayList)7 TableName (org.apache.hadoop.hive.common.TableName)6 AlterTableAddConstraintDesc (org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc)6 Task (org.apache.hadoop.hive.ql.exec.Task)6 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)3 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)3 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)2 Table (org.apache.hadoop.hive.ql.metadata.Table)2 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)2 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)1 AddCheckConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddCheckConstraintMessage)1 AddDefaultConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddDefaultConstraintMessage)1 AddForeignKeyMessage (org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage)1 AddNotNullConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage)1