use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class AddForeignKeyHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
List<SQLForeignKey> fks = null;
try {
fks = msg.getForeignKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
if (fks.isEmpty()) {
return tasks;
}
String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? fks.get(0).getFktable_name() : context.tableName;
for (SQLForeignKey fk : fks) {
// Otherwise, keep db name
if (fk.getPktable_db().equals(fk.getFktable_db())) {
fk.setPktable_db(actualDbName);
}
fk.setFktable_db(actualDbName);
fk.setFktable_name(actualTblName);
}
AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList<SQLPrimaryKey>(), fks, new ArrayList<SQLUniqueConstraint>(), context.eventOnlyReplicationSpec());
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class AddPrimaryKeyHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
List<SQLPrimaryKey> pks = null;
try {
pks = msg.getPrimaryKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
if (pks.isEmpty()) {
return tasks;
}
String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? pks.get(0).getTable_name() : context.tableName;
for (SQLPrimaryKey pk : pks) {
pk.setTable_db(actualDbName);
pk.setTable_name(actualTblName);
}
AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, pks, new ArrayList<SQLForeignKey>(), new ArrayList<SQLUniqueConstraint>(), context.eventOnlyReplicationSpec());
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class AddUniqueConstraintHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
List<SQLUniqueConstraint> uks = null;
try {
uks = msg.getUniqueConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
if (uks.isEmpty()) {
return tasks;
}
String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? uks.get(0).getTable_name() : context.tableName;
for (SQLUniqueConstraint uk : uks) {
uk.setTable_db(actualDbName);
uk.setTable_name(actualTblName);
}
AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList<SQLPrimaryKey>(), new ArrayList<SQLForeignKey>(), uks, context.eventOnlyReplicationSpec());
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DropConstraintHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName;
String constraintName = msg.getConstraint();
AlterTableDesc dropConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, constraintName, context.eventOnlyReplicationSpec());
Task<DDLWork> dropConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc));
context.log.debug("Added drop constrain task : {}:{}", dropConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(dropConstraintsTask);
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class RenameTableHandler method handle.
@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
if (!context.isTableNameEmpty()) {
throw new SemanticException("RENAMES of tables are not supported for table-level replication");
}
try {
String oldDbName = msg.getTableObjBefore().getDbName();
String newDbName = msg.getTableObjAfter().getDbName();
if (!context.isDbNameEmpty()) {
// newDbName must be the same
if (!oldDbName.equalsIgnoreCase(newDbName)) {
throw new SemanticException("Cannot replicate an event renaming a table across" + " databases into a db level load " + oldDbName + "->" + newDbName);
} else {
// both were the same, and can be replaced by the new db we're loading into.
oldDbName = context.dbName;
newDbName = context.dbName;
}
}
String oldName = StatsUtils.getFullyQualifiedTableName(oldDbName, msg.getTableObjBefore().getTableName());
String newName = StatsUtils.getFullyQualifiedTableName(newDbName, msg.getTableObjAfter().getTableName());
AlterTableDesc renameTableDesc = new AlterTableDesc(oldName, newName, false, context.eventOnlyReplicationSpec());
Task<DDLWork> renameTableTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, renameTableDesc));
context.log.debug("Added rename table task : {}:{}->{}", renameTableTask.getId(), oldName, newName);
// oldDbName and newDbName *will* be the same if we're here
updatedMetadata.set(context.dmd.getEventTo().toString(), newDbName, msg.getTableObjAfter().getTableName(), null);
// if so. If that should ever change, this will need reworking.
return Collections.singletonList(renameTableTask);
} catch (Exception e) {
throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
}
}
Aggregations