use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class AddNotNullConstraintHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
AddNotNullConstraintMessage msg = deserializer.getAddNotNullConstraintMessage(context.dmd.getPayload());
List<SQLNotNullConstraint> nns = null;
try {
nns = msg.getNotNullConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
if (nns.isEmpty()) {
return tasks;
}
String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? nns.get(0).getTable_name() : context.tableName;
for (SQLNotNullConstraint nn : nns) {
nn.setTable_db(actualDbName);
nn.setTable_name(actualTblName);
}
AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList<SQLPrimaryKey>(), new ArrayList<SQLForeignKey>(), new ArrayList<SQLUniqueConstraint>(), nns, new ArrayList<SQLDefaultConstraint>(), new ArrayList<SQLCheckConstraint>(), context.eventOnlyReplicationSpec());
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class AlterDatabaseHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
if (!context.isTableNameEmpty()) {
throw new SemanticException("Alter Database are not supported for table-level replication");
}
AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
try {
Database oldDb = msg.getDbObjBefore();
Database newDb = msg.getDbObjAfter();
AlterDatabaseDesc alterDbDesc;
if ((oldDb.getOwnerType() == newDb.getOwnerType()) && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
// If owner information is unchanged, then DB properties would've changed
Map<String, String> newDbProps = new HashMap<>();
Map<String, String> dbProps = newDb.getParameters();
for (Map.Entry<String, String> entry : dbProps.entrySet()) {
String key = entry.getKey();
// Ignore the keys which are local to source warehouse
if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID.toString())) {
continue;
}
newDbProps.put(key, entry.getValue());
}
alterDbDesc = new AlterDatabaseDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec());
} else {
alterDbDesc = new AlterDatabaseDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec());
}
Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, alterDbDesc));
context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName);
// Only database object is updated
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(alterDbTask);
} catch (Exception e) {
throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
}
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DropDatabaseHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
DropDatabaseMessage msg = deserializer.getDropDatabaseMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
DropDatabaseDesc desc = new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec());
Task<? extends Serializable> dropDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc));
context.log.info("Added drop database task : {}:{}", dropDBTask.getId(), desc.getDatabaseName());
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(dropDBTask);
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DropPartitionHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
try {
DropPartitionMessage msg = deserializer.getDropPartitionMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName;
Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = genPartSpecs(new Table(msg.getTableObj()), msg.getPartitions());
if (partSpecs.size() > 0) {
DropTableDesc dropPtnDesc = new DropTableDesc(actualDbName + "." + actualTblName, partSpecs, null, true, context.eventOnlyReplicationSpec());
Task<DDLWork> dropPtnTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc));
context.log.debug("Added drop ptn task : {}:{},{}", dropPtnTask.getId(), dropPtnDesc.getTableName(), msg.getPartitions());
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(dropPtnTask);
} else {
throw new SemanticException("DROP PARTITION EVENT does not return any part descs for event message :" + context.dmd.getPayload());
}
} catch (Exception e) {
throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
}
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class RenamePartitionHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName;
Map<String, String> newPartSpec = new LinkedHashMap<>();
Map<String, String> oldPartSpec = new LinkedHashMap<>();
String tableName = actualDbName + "." + actualTblName;
try {
Table tblObj = msg.getTableObj();
Iterator<String> beforeIterator = msg.getPtnObjBefore().getValuesIterator();
Iterator<String> afterIterator = msg.getPtnObjAfter().getValuesIterator();
for (FieldSchema fs : tblObj.getPartitionKeys()) {
oldPartSpec.put(fs.getName(), beforeIterator.next());
newPartSpec.put(fs.getName(), afterIterator.next());
}
} catch (Exception e) {
throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
}
RenamePartitionDesc renamePtnDesc = new RenamePartitionDesc(tableName, oldPartSpec, newPartSpec, context.eventOnlyReplicationSpec());
Task<DDLWork> renamePtnTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, renamePtnDesc));
context.log.debug("Added rename ptn task : {}:{}->{}", renamePtnTask.getId(), oldPartSpec, newPartSpec);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, newPartSpec);
return Collections.singletonList(renamePtnTask);
}
Aggregations