use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class TestDefaultConstraint method getNoSuchCatalog.
@Test
public void getNoSuchCatalog() throws TException {
DefaultConstraintsRequest rqst = new DefaultConstraintsRequest("nosuch", testTables[0].getDbName(), testTables[0].getTableName());
List<SQLDefaultConstraint> dv = client.getDefaultConstraints(rqst);
Assert.assertTrue(dv.isEmpty());
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class TestDefaultConstraint method getNoSuchDb.
@Test
public void getNoSuchDb() throws TException {
DefaultConstraintsRequest rqst = new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, "nosuch", testTables[0].getTableName());
List<SQLDefaultConstraint> dv = client.getDefaultConstraints(rqst);
Assert.assertTrue(dv.isEmpty());
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class ObjectStore method getDefaultConstraintsViaJdo.
private List<SQLDefaultConstraint> getDefaultConstraintsViaJdo(String catName, String dbName, String tblName) {
boolean commited = false;
List<SQLDefaultConstraint> defaultConstraints = null;
Query query = null;
try {
openTransaction();
query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == catName &&" + " constraintType == MConstraint.DEFAULT_CONSTRAINT");
query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, java.lang.String catName");
Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
pm.retrieveAll(constraints);
defaultConstraints = new ArrayList<>();
for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
MConstraint currConstraint = (MConstraint) i.next();
List<MFieldSchema> cols = currConstraint.getParentColumn() != null ? currConstraint.getParentColumn().getCols() : currConstraint.getParentTable().getPartitionKeys();
int enableValidateRely = currConstraint.getEnableValidateRely();
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
defaultConstraints.add(new SQLDefaultConstraint(catName, dbName, tblName, cols.get(currConstraint.getParentIntegerIndex()).getName(), currConstraint.getDefaultValue(), currConstraint.getConstraintName(), enable, validate, rely));
}
commited = commitTransaction();
} finally {
rollbackAndCleanup(commited, query);
}
return defaultConstraints;
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class BaseSemanticAnalyzer method getColumns.
/**
* Get the list of FieldSchema out of the ASTNode.
* Additionally, populate the primaryKeys and foreignKeys if any.
*/
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws SemanticException {
List<FieldSchema> colList = new ArrayList<FieldSchema>();
Tree parent = ast.getParent();
for (int i = 0; i < ast.getChildCount(); i++) {
FieldSchema col = new FieldSchema();
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.TOK_UNIQUE:
{
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], child, uniqueConstraints);
}
break;
case HiveParser.TOK_PRIMARY_KEY:
{
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys);
}
break;
case HiveParser.TOK_FOREIGN_KEY:
{
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys);
}
break;
default:
Tree grandChild = child.getChild(0);
if (grandChild != null) {
String name = grandChild.getText();
if (lowerCase) {
name = name.toLowerCase();
}
checkColumnName(name);
// child 0 is the name of the column
col.setName(unescapeIdentifier(name));
// child 1 is the type of the column
ASTNode typeChild = (ASTNode) (child.getChild(1));
col.setType(getTypeStringFromAST(typeChild));
// child 2 is the optional comment of the column
// child 3 is the optional constraint
ASTNode constraintChild = null;
if (child.getChildCount() == 4) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
constraintChild = (ASTNode) child.getChild(3);
} else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
} else if (child.getChildCount() == 3) {
constraintChild = (ASTNode) child.getChild(2);
}
if (constraintChild != null) {
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
processCheckConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
break;
case HiveParser.TOK_DEFAULT_VALUE:
processDefaultConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild);
break;
case HiveParser.TOK_NOT_NULL:
processNotNullConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, ImmutableList.of(col.getName()), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
}
colList.add(col);
break;
}
}
return colList;
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.
private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
String oldColName = ast.getChild(0).getText();
String newColName = ast.getChild(1).getText();
String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
ASTNode constraintChild = null;
int childCount = ast.getChildCount();
for (int i = 3; i < childCount; i++) {
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.StringLiteral:
newComment = unescapeSQLString(child.getText());
break;
case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = unescapeIdentifier(child.getChild(0).getText());
break;
case HiveParser.KW_FIRST:
first = true;
break;
case HiveParser.TOK_CASCADE:
isCascade = true;
break;
case HiveParser.TOK_RESTRICT:
break;
default:
constraintChild = (ASTNode) child;
}
}
List<SQLPrimaryKey> primaryKeys = null;
List<SQLForeignKey> foreignKeys = null;
List<SQLUniqueConstraint> uniqueConstraints = null;
List<SQLNotNullConstraint> notNullConstraints = null;
List<SQLDefaultConstraint> defaultConstraints = null;
List<SQLCheckConstraint> checkConstraints = null;
if (constraintChild != null) {
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
checkConstraints = new ArrayList<>();
processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
break;
case HiveParser.TOK_DEFAULT_VALUE:
defaultConstraints = new ArrayList<>();
processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
break;
case HiveParser.TOK_NOT_NULL:
notNullConstraints = new ArrayList<>();
processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
uniqueConstraints = new ArrayList<>();
processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
primaryKeys = new ArrayList<>();
processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
foreignKeys = new ArrayList<>();
processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
/* Validate the operation of renaming a column name. */
Table tab = getTable(qualified);
if (checkConstraints != null && !checkConstraints.isEmpty()) {
validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
}
if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
}
SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String tblName = getDotName(qualified);
AlterTableDesc alterTblDesc;
if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
} else {
alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
}
addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Aggregations