use of org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage in project hive by apache.
the class DbNotificationListener method onAddUniqueConstraint.
/**
* @param addUniqueConstraintEvent add unique constraint event
* @throws MetaException
*/
@Override
public void onAddUniqueConstraint(AddUniqueConstraintEvent addUniqueConstraintEvent) throws MetaException {
List<SQLUniqueConstraint> cols = addUniqueConstraintEvent.getUniqueConstraintCols();
if (cols.size() > 0) {
AddUniqueConstraintMessage msg = MessageBuilder.getInstance().buildAddUniqueConstraintMessage(addUniqueConstraintEvent.getUniqueConstraintCols());
NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_UNIQUECONSTRAINT.toString(), msgEncoder.getSerializer().serialize(msg));
event.setCatName(cols.get(0).isSetCatName() ? cols.get(0).getCatName() : DEFAULT_CATALOG_NAME);
event.setDbName(cols.get(0).getTable_db());
event.setTableName(cols.get(0).getTable_name());
process(event, addUniqueConstraintEvent);
}
}
use of org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage in project hive by apache.
the class LoadConstraint method isUniqueConstraintsAlreadyLoaded.
private boolean isUniqueConstraintsAlreadyLoaded(String uksMsgString) throws Exception {
AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(uksMsgString);
List<SQLUniqueConstraint> uksInMsg = msg.getUniqueConstraints();
if (uksInMsg.isEmpty()) {
return true;
}
String dbName = StringUtils.isBlank(dbNameToLoadIn) ? uksInMsg.get(0).getTable_db() : dbNameToLoadIn;
List<SQLUniqueConstraint> uks;
try {
uks = context.hiveDb.getUniqueConstraintList(dbName, uksInMsg.get(0).getTable_name());
} catch (NoSuchObjectException e) {
return false;
}
return CollectionUtils.isNotEmpty(uks);
}
use of org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage in project hive by apache.
the class AddUniqueConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
List<SQLUniqueConstraint> uks;
try {
uks = msg.getUniqueConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (uks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
final String actualTblName = uks.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLUniqueConstraint uk : uks) {
uk.setTable_db(actualDbName);
uk.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, uks, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
Aggregations