use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AddUniqueConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
List<SQLUniqueConstraint> uks;
try {
uks = msg.getUniqueConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (uks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
final String actualTblName = uks.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLUniqueConstraint uk : uks) {
uk.setTable_db(actualDbName);
uk.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, uks, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class CreateDatabaseHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
MetaData metaData;
try {
FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
} catch (IOException e) {
throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
}
Database db = metaData.getDatabase();
String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
if (!db.getParameters().isEmpty()) {
AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
createDBTask.addDependentTask(alterDbProperties);
}
if (StringUtils.isNotEmpty(db.getOwnerName())) {
AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
createDBTask.addDependentTask(alterDbTask);
}
updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
return Collections.singletonList(createDBTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class RenameTableHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
try {
Table tableObjBefore = msg.getTableObjBefore();
Table tableObjAfter = msg.getTableObjAfter();
String oldDbName = tableObjBefore.getDbName();
String newDbName = tableObjAfter.getDbName();
if (!context.isDbNameEmpty()) {
// newDbName must be the same
if (!oldDbName.equalsIgnoreCase(newDbName)) {
throw new SemanticException("Cannot replicate an event renaming a table across" + " databases into a db level load " + oldDbName + "->" + newDbName);
} else {
// both were the same, and can be replaced by the new db we're loading into.
oldDbName = context.dbName;
newDbName = context.dbName;
}
}
TableName oldName = TableName.fromString(tableObjBefore.getTableName(), null, oldDbName);
TableName newName = TableName.fromString(tableObjAfter.getTableName(), null, newDbName);
ReplicationSpec replicationSpec = context.eventOnlyReplicationSpec();
AlterTableRenameDesc renameTableDesc = new AlterTableRenameDesc(oldName, replicationSpec, false, newName.getNotEmptyDbTable());
renameTableDesc.setWriteId(msg.getWriteId());
Task<DDLWork> renameTableTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, renameTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added rename table task : {}:{}->{}", renameTableTask.getId(), oldName.getNotEmptyDbTable(), newName.getNotEmptyDbTable());
// oldDbName and newDbName *will* be the same if we're here
updatedMetadata.set(context.dmd.getEventTo().toString(), newDbName, tableObjAfter.getTableName(), null);
// if so. If that should ever change, this will need reworking.
return ReplUtils.addChildTask(renameTableTask);
} catch (Exception e) {
throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
}
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AddCheckConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddCheckConstraintMessage msg = deserializer.getAddCheckConstraintMessage(context.dmd.getPayload());
List<SQLCheckConstraint> ccs;
try {
ccs = msg.getCheckConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (ccs.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? ccs.get(0).getTable_db() : context.dbName;
final String actualTblName = ccs.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLCheckConstraint ck : ccs) {
ck.setTable_db(actualDbName);
ck.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, null, null, ccs);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AddForeignKeyHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
List<SQLForeignKey> fks;
try {
fks = msg.getForeignKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (fks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
final String actualTblName = fks.get(0).getFktable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLForeignKey fk : fks) {
// Otherwise, keep db name
if (fk.getPktable_db().equals(fk.getFktable_db())) {
fk.setPktable_db(actualDbName);
}
fk.setFktable_db(actualDbName);
fk.setFktable_name(actualTblName);
}
Constraints constraints = new Constraints(null, fks, null, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
Aggregations