Search in sources :

Example 81 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class TruncatePartitionHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload());
    final TableName tName = TableName.fromString(msg.getTable(), null, context.isDbNameEmpty() ? msg.getDB() : context.dbName);
    Map<String, String> partSpec = new LinkedHashMap<>();
    org.apache.hadoop.hive.metastore.api.Table tblObj;
    try {
        tblObj = msg.getTableObj();
        Iterator<String> afterIterator = msg.getPtnObjAfter().getValuesIterator();
        for (FieldSchema fs : tblObj.getPartitionKeys()) {
            partSpec.put(fs.getName(), afterIterator.next());
        }
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    TruncateTableDesc truncateTableDesc = new TruncateTableDesc(tName, partSpec, context.eventOnlyReplicationSpec());
    truncateTableDesc.setWriteId(msg.getWriteId());
    Task<DDLWork> truncatePtnTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    context.log.debug("Added truncate ptn task : {}:{}:{}", truncatePtnTask.getId(), truncateTableDesc.getTableName(), truncateTableDesc.getWriteId());
    updatedMetadata.set(context.dmd.getEventTo().toString(), tName.getDb(), tName.getTable(), partSpec);
    try {
        return ReplUtils.addChildTask(truncatePtnTask);
    } catch (Exception e) {
        throw new SemanticException(e.getMessage());
    }
}
Also used : TruncateTableDesc(org.apache.hadoop.hive.ql.ddl.table.misc.truncate.TruncateTableDesc) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) LinkedHashMap(java.util.LinkedHashMap) TableName(org.apache.hadoop.hive.common.TableName) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterPartitionMessage(org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 82 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AddPrimaryKeyHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
    List<SQLPrimaryKey> pks;
    try {
        pks = msg.getPrimaryKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (pks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
    final String actualTblName = pks.get(0).getTable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLPrimaryKey pk : pks) {
        pk.setTable_db(actualDbName);
        pk.setTable_name(actualTblName);
    }
    Constraints constraints = new Constraints(pks, null, null, null, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : AddPrimaryKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Task(org.apache.hadoop.hive.ql.exec.Task) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 83 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropConstraintHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload());
    final String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    final String actualTblName = msg.getTable();
    final TableName tName = HiveTableName.ofNullable(actualTblName, actualDbName);
    String constraintName = msg.getConstraint();
    AlterTableDropConstraintDesc dropConstraintsDesc = new AlterTableDropConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraintName);
    Task<DDLWork> dropConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    context.log.debug("Added drop constrain task : {}:{}", dropConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(dropConstraintsTask);
}
Also used : HiveTableName(org.apache.hadoop.hive.ql.parse.HiveTableName) TableName(org.apache.hadoop.hive.common.TableName) AlterTableDropConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.drop.AlterTableDropConstraintDesc) DropConstraintMessage(org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork)

Example 84 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropFunctionHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    DropFunctionMessage msg = deserializer.getDropFunctionMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    String qualifiedFunctionName = FunctionUtils.qualifyFunctionName(msg.getFunctionName(), actualDbName);
    // When the load is invoked via Scheduler's executor route, the function resources will not be
    // there in classpath. Processing drop function event tries to unregister the function resulting
    // in ClassNotFoundException being thrown in such case.
    // Obtaining FunctionInfo object from FunctionRegistry will add the function's resources URLs to UDFClassLoader.
    FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(qualifiedFunctionName);
    DropFunctionDesc desc = new DropFunctionDesc(qualifiedFunctionName, false, context.eventOnlyReplicationSpec());
    Task<DDLWork> dropFunctionTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, desc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    context.log.debug("Added drop function task : {}:{}", dropFunctionTask.getId(), desc.getName());
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
    return Collections.singletonList(dropFunctionTask);
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DropFunctionDesc(org.apache.hadoop.hive.ql.ddl.function.drop.DropFunctionDesc) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) DropFunctionMessage(org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage)

Example 85 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropTableHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    String actualDbName;
    String actualTblName;
    if (context.dmd.getDumpType() == DumpType.EVENT_RENAME_DROP_TABLE) {
        AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
        actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
        actualTblName = msg.getTable();
    } else {
        DropTableMessage msg = deserializer.getDropTableMessage(context.dmd.getPayload());
        actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
        actualTblName = msg.getTable();
    }
    DropTableDesc dropTableDesc = new DropTableDesc(actualDbName + "." + actualTblName, true, true, context.eventOnlyReplicationSpec(), false);
    Task<DDLWork> dropTableTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    context.log.debug("Added drop tbl task : {}:{}", dropTableTask.getId(), dropTableDesc.getTableName());
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
    return Collections.singletonList(dropTableTask);
}
Also used : DropTableMessage(org.apache.hadoop.hive.metastore.messaging.DropTableMessage) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableMessage(org.apache.hadoop.hive.metastore.messaging.AlterTableMessage) DropTableDesc(org.apache.hadoop.hive.ql.ddl.table.drop.DropTableDesc)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8