use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.
the class AlterTableAddConstraintAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
// TODO CAT - for now always use the default catalog. Eventually will want to see if
// the user specified a catalog
List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
List<SQLForeignKey> foreignKeys = new ArrayList<>();
List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
List<SQLCheckConstraint> checkConstraints = new ArrayList<>();
ASTNode constraintNode = (ASTNode) command.getChild(0);
switch(constraintNode.getToken().getType()) {
case HiveParser.TOK_UNIQUE:
ConstraintsUtils.processUniqueConstraints(tableName, constraintNode, uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
ConstraintsUtils.processPrimaryKeys(tableName, constraintNode, primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
ConstraintsUtils.processForeignKeys(tableName, constraintNode, foreignKeys);
break;
case HiveParser.TOK_CHECK_CONSTRAINT:
ConstraintsUtils.processCheckConstraints(tableName, constraintNode, null, checkConstraints, command, ctx.getTokenRewriteStream());
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintNode.getToken().getText()));
}
Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, checkConstraints);
AlterTableAddConstraintDesc desc = new AlterTableAddConstraintDesc(tableName, null, constraints);
Table table = getTable(tableName);
if (AcidUtils.isTransactionalTable(table)) {
setAcidDdlDesc(desc);
}
addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.
the class AlterTableChangeColumnAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
String oldColumnName = command.getChild(0).getText().toLowerCase();
String newColumnName = command.getChild(1).getText().toLowerCase();
String newType = getTypeStringFromAST((ASTNode) command.getChild(2));
Table table = getTable(tableName);
SkewedInfo skewInfo = table.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColumnName)) {
throw new SemanticException(oldColumnName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
ASTNode constraintChild = null;
for (int i = 3; i < command.getChildCount(); i++) {
ASTNode child = (ASTNode) command.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.StringLiteral:
newComment = unescapeSQLString(child.getText());
break;
case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = unescapeIdentifier(child.getChild(0).getText());
break;
case HiveParser.KW_FIRST:
first = true;
break;
case HiveParser.TOK_CASCADE:
isCascade = true;
break;
case HiveParser.TOK_RESTRICT:
break;
default:
constraintChild = child;
}
}
Constraints constraints = getConstraints(tableName, command, newColumnName, table, constraintChild);
AlterTableChangeColumnDesc desc = new AlterTableChangeColumnDesc(tableName, partitionSpec, isCascade, constraints, unescapeIdentifier(oldColumnName), unescapeIdentifier(newColumnName), newType, newComment, first, flagCol);
if (AcidUtils.isTransactionalTable(table)) {
// Note: we might actually need it only when certain changes (e.g. name or type?) are made.
setAcidDdlDesc(desc);
}
addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.
the class AddNotNullConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddNotNullConstraintMessage msg = deserializer.getAddNotNullConstraintMessage(context.dmd.getPayload());
List<SQLNotNullConstraint> nns;
try {
nns = msg.getNotNullConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (nns.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName;
final String actualTblName = nns.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLNotNullConstraint nn : nns) {
nn.setTable_db(actualDbName);
nn.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, nns, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.
the class AddDefaultConstraintHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddDefaultConstraintMessage msg = deserializer.getAddDefaultConstraintMessage(context.dmd.getPayload());
List<SQLDefaultConstraint> dcs;
try {
dcs = msg.getDefaultConstraints();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (dcs.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? dcs.get(0).getTable_db() : context.dbName;
final String actualTblName = dcs.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLDefaultConstraint dc : dcs) {
dc.setTable_db(actualDbName);
dc.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(null, null, null, null, dcs, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints in project hive by apache.
the class AddPrimaryKeyHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
List<SQLPrimaryKey> pks;
try {
pks = msg.getPrimaryKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (pks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName;
final String actualTblName = pks.get(0).getTable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLPrimaryKey pk : pks) {
pk.setTable_db(actualDbName);
pk.setTable_name(actualTblName);
}
Constraints constraints = new Constraints(pks, null, null, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
Aggregations