use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class TruncatePartitionHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload());
final TableName tName = TableName.fromString(msg.getTable(), null, context.isDbNameEmpty() ? msg.getDB() : context.dbName);
Map<String, String> partSpec = new LinkedHashMap<>();
org.apache.hadoop.hive.metastore.api.Table tblObj;
try {
tblObj = msg.getTableObj();
Iterator<String> afterIterator = msg.getPtnObjAfter().getValuesIterator();
for (FieldSchema fs : tblObj.getPartitionKeys()) {
partSpec.put(fs.getName(), afterIterator.next());
}
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
TruncateTableDesc truncateTableDesc = new TruncateTableDesc(tName, partSpec, context.eventOnlyReplicationSpec());
truncateTableDesc.setWriteId(msg.getWriteId());
Task<DDLWork> truncatePtnTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added truncate ptn task : {}:{}:{}", truncatePtnTask.getId(), truncateTableDesc.getTableName(), truncateTableDesc.getWriteId());
updatedMetadata.set(context.dmd.getEventTo().toString(), tName.getDb(), tName.getTable(), partSpec);
try {
return ReplUtils.addChildTask(truncatePtnTask);
} catch (Exception e) {
throw new SemanticException(e.getMessage());
}
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AddPrimaryKeyHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
List<SQLPrimaryKey> pks;
try {
pks = msg.getPrimaryKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (pks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
final String actualTblName = pks.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLPrimaryKey pk : pks) {
pk.setTable_db(actualDbName);
pk.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(pks, null, null, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DropConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload());
final String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
final String actualTblName = msg.getTable();
final TableName tName = HiveTableName.ofNullable(actualTblName, actualDbName);
String constraintName = msg.getConstraint();
AlterTableDropConstraintDesc dropConstraintsDesc = new AlterTableDropConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraintName);
Task<DDLWork> dropConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added drop constrain task : {}:{}", dropConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(dropConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DropFunctionHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
DropFunctionMessage msg = deserializer.getDropFunctionMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String qualifiedFunctionName = FunctionUtils.qualifyFunctionName(msg.getFunctionName(), actualDbName);
// When the load is invoked via Scheduler's executor route, the function resources will not be
// there in classpath. Processing drop function event tries to unregister the function resulting
// in ClassNotFoundException being thrown in such case.
// Obtaining FunctionInfo object from FunctionRegistry will add the function's resources URLs to UDFClassLoader.
FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(qualifiedFunctionName);
DropFunctionDesc desc = new DropFunctionDesc(qualifiedFunctionName, false, context.eventOnlyReplicationSpec());
Task<DDLWork> dropFunctionTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, desc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added drop function task : {}:{}", dropFunctionTask.getId(), desc.getName());
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(dropFunctionTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DropTableHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
String actualDbName;
String actualTblName;
if (context.dmd.getDumpType() == DumpType.EVENT_RENAME_DROP_TABLE) {
AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
actualTblName = msg.getTable();
} else {
DropTableMessage msg = deserializer.getDropTableMessage(context.dmd.getPayload());
actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
actualTblName = msg.getTable();
}
DropTableDesc dropTableDesc = new DropTableDesc(actualDbName + "." + actualTblName, true, true, context.eventOnlyReplicationSpec(), false);
Task<DDLWork> dropTableTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added drop tbl task : {}:{}", dropTableTask.getId(), dropTableDesc.getTableName());
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(dropTableTask);
}
Aggregations