use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.
the class ObjectStore method addNotNullConstraints.
private List<SQLNotNullConstraint> addNotNullConstraints(List<SQLNotNullConstraint> nns, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<MConstraint> cstrs = new ArrayList<>();
String constraintName;
for (SQLNotNullConstraint nn : nns) {
final String catName = normalizeIdentifier(nn.getCatName());
final String tableDB = normalizeIdentifier(nn.getTable_db());
final String tableName = normalizeIdentifier(nn.getTable_name());
final String columnName = normalizeIdentifier(nn.getColumn_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
AttachedMTableInfo nParentTable = getMTable(catName, tableDB, tableName, retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + tableName);
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + columnName);
}
}
if (nn.getNn_name() == null) {
constraintName = generateConstraintName(parentTable, tableDB, tableName, columnName, "nn");
} else {
constraintName = normalizeIdentifier(nn.getNn_name());
if (constraintNameAlreadyExists(parentTable, constraintName)) {
String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), constraintName);
throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
}
}
int enableValidateRely = (nn.isEnable_cstr() ? 4 : 0) + (nn.isValidate_cstr() ? 2 : 0) + (nn.isRely_cstr() ? 1 : 0);
MConstraint muk = new MConstraint(constraintName, 1, // Not null constraint should reference a single column
MConstraint.NOT_NULL_CONSTRAINT, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
cstrs.add(muk);
// Add normalized identifier back to result
nn.setCatName(catName);
nn.setTable_db(tableDB);
nn.setTable_name(tableName);
nn.setColumn_name(columnName);
nn.setNn_name(constraintName);
}
pm.makePersistentAll(cstrs);
return nns;
}
use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.
the class ReplDumpTask method dumpConstraintMetadata.
private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
try {
Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
Hive db = getHive();
List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
serializer.writeTo(jsonWriter, null);
}
}
if (fks != null && !fks.isEmpty()) {
try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
serializer.writeTo(jsonWriter, null);
}
}
} catch (NoSuchObjectException e) {
// Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
// Just log a debug message and skip it.
LOG.debug(e.getMessage());
}
}
use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.
the class BaseSemanticAnalyzer method getColumns.
/**
* Get the list of FieldSchema out of the ASTNode.
* Additionally, populate the primaryKeys and foreignKeys if any.
*/
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws SemanticException {
List<FieldSchema> colList = new ArrayList<FieldSchema>();
Tree parent = ast.getParent();
for (int i = 0; i < ast.getChildCount(); i++) {
FieldSchema col = new FieldSchema();
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.TOK_UNIQUE:
{
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], child, uniqueConstraints);
}
break;
case HiveParser.TOK_PRIMARY_KEY:
{
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys);
}
break;
case HiveParser.TOK_FOREIGN_KEY:
{
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys);
}
break;
default:
Tree grandChild = child.getChild(0);
if (grandChild != null) {
String name = grandChild.getText();
if (lowerCase) {
name = name.toLowerCase();
}
checkColumnName(name);
// child 0 is the name of the column
col.setName(unescapeIdentifier(name));
// child 1 is the type of the column
ASTNode typeChild = (ASTNode) (child.getChild(1));
col.setType(getTypeStringFromAST(typeChild));
// child 2 is the optional comment of the column
// child 3 is the optional constraint
ASTNode constraintChild = null;
if (child.getChildCount() == 4) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
constraintChild = (ASTNode) child.getChild(3);
} else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
} else if (child.getChildCount() == 3) {
constraintChild = (ASTNode) child.getChild(2);
}
if (constraintChild != null) {
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
processCheckConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
break;
case HiveParser.TOK_DEFAULT_VALUE:
processDefaultConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild);
break;
case HiveParser.TOK_NOT_NULL:
processNotNullConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
}
colList.add(col);
break;
}
}
return colList;
}
use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.
private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
String oldColName = ast.getChild(0).getText();
String newColName = ast.getChild(1).getText();
String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
ASTNode constraintChild = null;
int childCount = ast.getChildCount();
for (int i = 3; i < childCount; i++) {
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.StringLiteral:
newComment = unescapeSQLString(child.getText());
break;
case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = unescapeIdentifier(child.getChild(0).getText());
break;
case HiveParser.KW_FIRST:
first = true;
break;
case HiveParser.TOK_CASCADE:
isCascade = true;
break;
case HiveParser.TOK_RESTRICT:
break;
default:
constraintChild = (ASTNode) child;
}
}
List<SQLPrimaryKey> primaryKeys = null;
List<SQLForeignKey> foreignKeys = null;
List<SQLUniqueConstraint> uniqueConstraints = null;
List<SQLNotNullConstraint> notNullConstraints = null;
List<SQLDefaultConstraint> defaultConstraints = null;
List<SQLCheckConstraint> checkConstraints = null;
if (constraintChild != null) {
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
checkConstraints = new ArrayList<>();
processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
break;
case HiveParser.TOK_DEFAULT_VALUE:
defaultConstraints = new ArrayList<>();
processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
break;
case HiveParser.TOK_NOT_NULL:
notNullConstraints = new ArrayList<>();
processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
uniqueConstraints = new ArrayList<>();
processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
primaryKeys = new ArrayList<>();
processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
foreignKeys = new ArrayList<>();
processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
/* Validate the operation of renaming a column name. */
Table tab = getTable(qualified);
if (checkConstraints != null && !checkConstraints.isEmpty()) {
validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
}
if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
}
SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String tblName = getDotName(qualified);
AlterTableDesc alterTblDesc;
if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
} else {
alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
}
addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
use of org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint in project hive by apache.
the class BaseSemanticAnalyzer method getColumns.
/**
* Get the list of FieldSchema out of the ASTNode.
* Additionally, populate the primaryKeys and foreignKeys if any.
*/
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints, Configuration conf) throws SemanticException {
List<FieldSchema> colList = new ArrayList<FieldSchema>();
Tree parent = ast.getParent();
for (int i = 0; i < ast.getChildCount(); i++) {
FieldSchema col = new FieldSchema();
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.TOK_UNIQUE:
{
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
// TODO CAT - for now always use the default catalog. Eventually will want to see if
// the user specified a catalog
ConstraintsUtils.processUniqueConstraints(tName, child, uniqueConstraints);
}
break;
case HiveParser.TOK_PRIMARY_KEY:
{
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
ConstraintsUtils.processPrimaryKeys(tName, child, primaryKeys);
}
break;
case HiveParser.TOK_FOREIGN_KEY:
{
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
ConstraintsUtils.processForeignKeys(tName, child, foreignKeys);
}
break;
case HiveParser.TOK_CHECK_CONSTRAINT:
{
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
// TODO CAT - for now always use the default catalog. Eventually will want to see if
// the user specified a catalog
ConstraintsUtils.processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream);
}
break;
default:
Tree grandChild = child.getChild(0);
if (grandChild != null) {
String name = grandChild.getText();
if (lowerCase) {
name = name.toLowerCase();
}
checkColumnName(name);
// child 0 is the name of the column
col.setName(unescapeIdentifier(name));
// child 1 is the type of the column
ASTNode typeChild = (ASTNode) (child.getChild(1));
col.setType(getTypeStringFromAST(typeChild));
// child 2 is the optional comment of the column
// child 3 is the optional constraint
ASTNode constraintChild = null;
if (child.getChildCount() == 4) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
constraintChild = (ASTNode) child.getChild(3);
} else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
} else if (child.getChildCount() == 3) {
constraintChild = (ASTNode) child.getChild(2);
}
if (constraintChild != null) {
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
break;
case HiveParser.TOK_DEFAULT_VALUE:
ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream);
break;
case HiveParser.TOK_NOT_NULL:
ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
}
colList.add(col);
break;
}
}
return colList;
}
Aggregations