Search in sources :

Example 86 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AddUniqueConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
    List<SQLUniqueConstraint> uks;
    try {
        uks = msg.getUniqueConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (uks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
    final String actualTblName = uks.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLUniqueConstraint uk : uks) {
        uk.setTable_db(actualDbName);
        uk.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, null, uks, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AddUniqueConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 87 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class CreateDatabaseHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    MetaData metaData;
    try {
        FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
        metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
    } catch (IOException e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
    }
    Database db = metaData.getDatabase();
    String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
    Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    if (!db.getParameters().isEmpty()) {
        AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbProperties);
    }
    if (StringUtils.isNotEmpty(db.getOwnerName())) {
        AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbTask);
    }
    updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
    return Collections.singletonList(createDBTask);
}
Also used : Path(org.apache.hadoop.fs.Path) IOException(java.io.IOException) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseSetPropertiesDesc(org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HashSet(java.util.HashSet)

Example 88 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class RenameTableHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
    try {
        Table tableObjBefore = msg.getTableObjBefore();
        Table tableObjAfter = msg.getTableObjAfter();
        String oldDbName = tableObjBefore.getDbName();
        String newDbName = tableObjAfter.getDbName();
        if (!context.isDbNameEmpty()) {
            // newDbName must be the same
            if (!oldDbName.equalsIgnoreCase(newDbName)) {
                throw new SemanticException("Cannot replicate an event renaming a table across" + " databases into a db level load " + oldDbName + "->" + newDbName);
            } else {
                // both were the same, and can be replaced by the new db we're loading into.
                oldDbName = context.dbName;
                newDbName = context.dbName;
            }
        }
        TableName oldName = TableName.fromString(tableObjBefore.getTableName(), null, oldDbName);
        TableName newName = TableName.fromString(tableObjAfter.getTableName(), null, newDbName);
        ReplicationSpec replicationSpec = context.eventOnlyReplicationSpec();
        AlterTableRenameDesc renameTableDesc = new AlterTableRenameDesc(oldName, replicationSpec, false, newName.getNotEmptyDbTable());
        renameTableDesc.setWriteId(msg.getWriteId());
        Task<DDLWork> renameTableTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, renameTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        context.log.debug("Added rename table task : {}:{}->{}", renameTableTask.getId(), oldName.getNotEmptyDbTable(), newName.getNotEmptyDbTable());
        // oldDbName and newDbName *will* be the same if we're here
        updatedMetadata.set(context.dmd.getEventTo().toString(), newDbName, tableObjAfter.getTableName(), null);
        // if so. If that should ever change, this will need reworking.
        return ReplUtils.addChildTask(renameTableTask);
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) ReplicationSpec(org.apache.hadoop.hive.ql.parse.ReplicationSpec) Table(org.apache.hadoop.hive.metastore.api.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableMessage(org.apache.hadoop.hive.metastore.messaging.AlterTableMessage) AlterTableRenameDesc(org.apache.hadoop.hive.ql.ddl.table.misc.rename.AlterTableRenameDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 89 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AddCheckConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddCheckConstraintMessage msg = deserializer.getAddCheckConstraintMessage(context.dmd.getPayload());
    List<SQLCheckConstraint> ccs;
    try {
        ccs = msg.getCheckConstraints();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (ccs.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? ccs.get(0).getTable_db() : context.dbName;
    final String actualTblName = ccs.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLCheckConstraint ck : ccs) {
        ck.setTable_db(actualDbName);
        ck.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, null, null, null, null, ccs);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) AddCheckConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddCheckConstraintMessage) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 90 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AddForeignKeyHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
    List<SQLForeignKey> fks;
    try {
        fks = msg.getForeignKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (fks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
    final String actualTblName = fks.get(0).getFktable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLForeignKey fk : fks) {
        // Otherwise, keep db name
        if (fk.getPktable_db().equals(fk.getFktable_db())) {
            fk.setPktable_db(actualDbName);
        }
        fk.setFktable_db(actualDbName);
        fk.setFktable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, fks, null, null, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : AddForeignKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage) Task(org.apache.hadoop.hive.ql.exec.Task) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8