use of org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc in project hive by apache.
the class AddNotNullConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddNotNullConstraintMessage msg = deserializer.getAddNotNullConstraintMessage(context.dmd.getPayload());
List<SQLNotNullConstraint> nns;
try {
nns = msg.getNotNullConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (nns.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName;
final String actualTblName = nns.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLNotNullConstraint nn : nns) {
nn.setTable_db(actualDbName);
nn.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, nns, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc in project hive by apache.
the class AddDefaultConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddDefaultConstraintMessage msg = deserializer.getAddDefaultConstraintMessage(context.dmd.getPayload());
List<SQLDefaultConstraint> dcs;
try {
dcs = msg.getDefaultConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (dcs.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? dcs.get(0).getTable_db() : context.dbName;
final String actualTblName = dcs.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLDefaultConstraint dc : dcs) {
dc.setTable_db(actualDbName);
dc.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, null, dcs, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc in project hive by apache.
the class AddPrimaryKeyHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
List<SQLPrimaryKey> pks;
try {
pks = msg.getPrimaryKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (pks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
final String actualTblName = pks.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLPrimaryKey pk : pks) {
pk.setTable_db(actualDbName);
pk.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(pks, null, null, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc in project hive by apache.
the class AddUniqueConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
List<SQLUniqueConstraint> uks;
try {
uks = msg.getUniqueConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (uks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName;
final String actualTblName = uks.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLUniqueConstraint uk : uks) {
uk.setTable_db(actualDbName);
uk.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, uks, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc in project hive by apache.
the class AddCheckConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddCheckConstraintMessage msg = deserializer.getAddCheckConstraintMessage(context.dmd.getPayload());
List<SQLCheckConstraint> ccs;
try {
ccs = msg.getCheckConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (ccs.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? ccs.get(0).getTable_db() : context.dbName;
final String actualTblName = ccs.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLCheckConstraint ck : ccs) {
ck.setTable_db(actualDbName);
ck.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, null, null, ccs);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
Aggregations