Search in sources :

Example 21 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class AddForeignKeyHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
    List<SQLForeignKey> fks = null;
    try {
        fks = msg.getForeignKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
    if (fks.isEmpty()) {
        return tasks;
    }
    String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
    String actualTblName = context.isTableNameEmpty() ? fks.get(0).getFktable_name() : context.tableName;
    for (SQLForeignKey fk : fks) {
        // Otherwise, keep db name
        if (fk.getPktable_db().equals(fk.getFktable_db())) {
            fk.setPktable_db(actualDbName);
        }
        fk.setFktable_db(actualDbName);
        fk.setFktable_name(actualTblName);
    }
    AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList<SQLPrimaryKey>(), fks, new ArrayList<SQLUniqueConstraint>(), context.eventOnlyReplicationSpec());
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : AddForeignKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 22 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class AddPrimaryKeyHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
    List<SQLPrimaryKey> pks = null;
    try {
        pks = msg.getPrimaryKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
    if (pks.isEmpty()) {
        return tasks;
    }
    String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
    String actualTblName = context.isTableNameEmpty() ? pks.get(0).getTable_name() : context.tableName;
    for (SQLPrimaryKey pk : pks) {
        pk.setTable_db(actualDbName);
        pk.setTable_name(actualTblName);
    }
    AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, pks, new ArrayList<SQLForeignKey>(), new ArrayList<SQLUniqueConstraint>(), context.eventOnlyReplicationSpec());
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AddPrimaryKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 23 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class AddUniqueConstraintHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
    List<SQLUniqueConstraint> uks = null;
    try {
        uks = msg.getUniqueConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
    if (uks.isEmpty()) {
        return tasks;
    }
    String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
    String actualTblName = context.isTableNameEmpty() ? uks.get(0).getTable_name() : context.tableName;
    for (SQLUniqueConstraint uk : uks) {
        uk.setTable_db(actualDbName);
        uk.setTable_name(actualTblName);
    }
    AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList<SQLPrimaryKey>(), new ArrayList<SQLForeignKey>(), uks, context.eventOnlyReplicationSpec());
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AddUniqueConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 24 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class DropConstraintHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName;
    String constraintName = msg.getConstraint();
    AlterTableDesc dropConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, constraintName, context.eventOnlyReplicationSpec());
    Task<DDLWork> dropConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc));
    context.log.debug("Added drop constrain task : {}:{}", dropConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(dropConstraintsTask);
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DropConstraintMessage(org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 25 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class RenameTableHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
    if (!context.isTableNameEmpty()) {
        throw new SemanticException("RENAMES of tables are not supported for table-level replication");
    }
    try {
        String oldDbName = msg.getTableObjBefore().getDbName();
        String newDbName = msg.getTableObjAfter().getDbName();
        if (!context.isDbNameEmpty()) {
            // newDbName must be the same
            if (!oldDbName.equalsIgnoreCase(newDbName)) {
                throw new SemanticException("Cannot replicate an event renaming a table across" + " databases into a db level load " + oldDbName + "->" + newDbName);
            } else {
                // both were the same, and can be replaced by the new db we're loading into.
                oldDbName = context.dbName;
                newDbName = context.dbName;
            }
        }
        String oldName = StatsUtils.getFullyQualifiedTableName(oldDbName, msg.getTableObjBefore().getTableName());
        String newName = StatsUtils.getFullyQualifiedTableName(newDbName, msg.getTableObjAfter().getTableName());
        AlterTableDesc renameTableDesc = new AlterTableDesc(oldName, newName, false, context.eventOnlyReplicationSpec());
        Task<DDLWork> renameTableTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, renameTableDesc));
        context.log.debug("Added rename table task : {}:{}->{}", renameTableTask.getId(), oldName, newName);
        // oldDbName and newDbName *will* be the same if we're here
        updatedMetadata.set(context.dmd.getEventTo().toString(), newDbName, msg.getTableObjAfter().getTableName(), null);
        // if so. If that should ever change, this will need reworking.
        return Collections.singletonList(renameTableTask);
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AlterTableMessage(org.apache.hadoop.hive.metastore.messaging.AlterTableMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)24 ArrayList (java.util.ArrayList)10 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)8 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)6 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)6 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)6 ImmutableList (com.google.common.collect.ImmutableList)4 Serializable (java.io.Serializable)4 LinkedList (java.util.LinkedList)4 List (java.util.List)4 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)4 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)4 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)4 Task (org.apache.hadoop.hive.ql.exec.Task)4 Table (org.apache.hadoop.hive.ql.metadata.Table)4 HashMap (java.util.HashMap)3 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 FileNotFoundException (java.io.FileNotFoundException)2