Search in sources :

Example 51 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class AddNotNullConstraintHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    AddNotNullConstraintMessage msg = deserializer.getAddNotNullConstraintMessage(context.dmd.getPayload());
    List<SQLNotNullConstraint> nns = null;
    try {
        nns = msg.getNotNullConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
    if (nns.isEmpty()) {
        return tasks;
    }
    String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName;
    String actualTblName = context.isTableNameEmpty() ? nns.get(0).getTable_name() : context.tableName;
    for (SQLNotNullConstraint nn : nns) {
        nn.setTable_db(actualDbName);
        nn.setTable_name(actualTblName);
    }
    AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList<SQLPrimaryKey>(), new ArrayList<SQLForeignKey>(), new ArrayList<SQLUniqueConstraint>(), nns, new ArrayList<SQLDefaultConstraint>(), new ArrayList<SQLCheckConstraint>(), context.eventOnlyReplicationSpec());
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) AddNotNullConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 52 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class AlterDatabaseHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    if (!context.isTableNameEmpty()) {
        throw new SemanticException("Alter Database are not supported for table-level replication");
    }
    AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    try {
        Database oldDb = msg.getDbObjBefore();
        Database newDb = msg.getDbObjAfter();
        AlterDatabaseDesc alterDbDesc;
        if ((oldDb.getOwnerType() == newDb.getOwnerType()) && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
            // If owner information is unchanged, then DB properties would've changed
            Map<String, String> newDbProps = new HashMap<>();
            Map<String, String> dbProps = newDb.getParameters();
            for (Map.Entry<String, String> entry : dbProps.entrySet()) {
                String key = entry.getKey();
                // Ignore the keys which are local to source warehouse
                if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID.toString())) {
                    continue;
                }
                newDbProps.put(key, entry.getValue());
            }
            alterDbDesc = new AlterDatabaseDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec());
        } else {
            alterDbDesc = new AlterDatabaseDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec());
        }
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, alterDbDesc));
        context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName);
        // Only database object is updated
        updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
        return Collections.singletonList(alterDbTask);
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : HashMap(java.util.HashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) AlterDatabaseMessage(org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseDesc(org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc) HashMap(java.util.HashMap) Map(java.util.Map) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 53 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DropDatabaseHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    DropDatabaseMessage msg = deserializer.getDropDatabaseMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    DropDatabaseDesc desc = new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec());
    Task<? extends Serializable> dropDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc));
    context.log.info("Added drop database task : {}:{}", dropDBTask.getId(), desc.getDatabaseName());
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
    return Collections.singletonList(dropDBTask);
}
Also used : DropDatabaseDesc(org.apache.hadoop.hive.ql.plan.DropDatabaseDesc) DropDatabaseMessage(org.apache.hadoop.hive.metastore.messaging.DropDatabaseMessage) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) HashSet(java.util.HashSet)

Example 54 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DropPartitionHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    try {
        DropPartitionMessage msg = deserializer.getDropPartitionMessage(context.dmd.getPayload());
        String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
        String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName;
        Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = genPartSpecs(new Table(msg.getTableObj()), msg.getPartitions());
        if (partSpecs.size() > 0) {
            DropTableDesc dropPtnDesc = new DropTableDesc(actualDbName + "." + actualTblName, partSpecs, null, true, context.eventOnlyReplicationSpec());
            Task<DDLWork> dropPtnTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc));
            context.log.debug("Added drop ptn task : {}:{},{}", dropPtnTask.getId(), dropPtnDesc.getTableName(), msg.getPartitions());
            updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
            return Collections.singletonList(dropPtnTask);
        } else {
            throw new SemanticException("DROP PARTITION EVENT does not return any part descs for event message :" + context.dmd.getPayload());
        }
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) DropPartitionMessage(org.apache.hadoop.hive.metastore.messaging.DropPartitionMessage) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DropTableDesc(org.apache.hadoop.hive.ql.plan.DropTableDesc) ArrayList(java.util.ArrayList) List(java.util.List) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 55 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class RenamePartitionHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName;
    Map<String, String> newPartSpec = new LinkedHashMap<>();
    Map<String, String> oldPartSpec = new LinkedHashMap<>();
    String tableName = actualDbName + "." + actualTblName;
    try {
        Table tblObj = msg.getTableObj();
        Iterator<String> beforeIterator = msg.getPtnObjBefore().getValuesIterator();
        Iterator<String> afterIterator = msg.getPtnObjAfter().getValuesIterator();
        for (FieldSchema fs : tblObj.getPartitionKeys()) {
            oldPartSpec.put(fs.getName(), beforeIterator.next());
            newPartSpec.put(fs.getName(), afterIterator.next());
        }
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
    RenamePartitionDesc renamePtnDesc = new RenamePartitionDesc(tableName, oldPartSpec, newPartSpec, context.eventOnlyReplicationSpec());
    Task<DDLWork> renamePtnTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, renamePtnDesc));
    context.log.debug("Added rename ptn task : {}:{}->{}", renamePtnTask.getId(), oldPartSpec, newPartSpec);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, newPartSpec);
    return Collections.singletonList(renamePtnTask);
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) RenamePartitionDesc(org.apache.hadoop.hive.ql.plan.RenamePartitionDesc) AlterPartitionMessage(org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) LinkedHashMap(java.util.LinkedHashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)141 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)24 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)24 Table (org.apache.hadoop.hive.ql.metadata.Table)22 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)20 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)20 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)20 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)19 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)19 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)19 HashMap (java.util.HashMap)17 LinkedHashMap (java.util.LinkedHashMap)16 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)14 Task (org.apache.hadoop.hive.ql.exec.Task)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)11 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)10 Serializable (java.io.Serializable)9