use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class BaseSemanticAnalyzer method getColumns.
/**
* Get the list of FieldSchema out of the ASTNode.
* Additionally, populate the primaryKeys and foreignKeys if any.
*/
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints, Configuration conf) throws SemanticException {
List<FieldSchema> colList = new ArrayList<FieldSchema>();
Tree parent = ast.getParent();
for (int i = 0; i < ast.getChildCount(); i++) {
FieldSchema col = new FieldSchema();
ASTNode child = (ASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveParser.TOK_UNIQUE:
{
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
// TODO CAT - for now always use the default catalog. Eventually will want to see if
// the user specified a catalog
ConstraintsUtils.processUniqueConstraints(tName, child, uniqueConstraints);
}
break;
case HiveParser.TOK_PRIMARY_KEY:
{
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
ConstraintsUtils.processPrimaryKeys(tName, child, primaryKeys);
}
break;
case HiveParser.TOK_FOREIGN_KEY:
{
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
ConstraintsUtils.processForeignKeys(tName, child, foreignKeys);
}
break;
case HiveParser.TOK_CHECK_CONSTRAINT:
{
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
// TODO CAT - for now always use the default catalog. Eventually will want to see if
// the user specified a catalog
ConstraintsUtils.processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream);
}
break;
default:
Tree grandChild = child.getChild(0);
if (grandChild != null) {
String name = grandChild.getText();
if (lowerCase) {
name = name.toLowerCase();
}
checkColumnName(name);
// child 0 is the name of the column
col.setName(unescapeIdentifier(name));
// child 1 is the type of the column
ASTNode typeChild = (ASTNode) (child.getChild(1));
col.setType(getTypeStringFromAST(typeChild));
// child 2 is the optional comment of the column
// child 3 is the optional constraint
ASTNode constraintChild = null;
if (child.getChildCount() == 4) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
constraintChild = (ASTNode) child.getChild(3);
} else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
col.setComment(unescapeSQLString(child.getChild(2).getText()));
} else if (child.getChildCount() == 3) {
constraintChild = (ASTNode) child.getChild(2);
}
if (constraintChild != null) {
final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
// Process column constraint
switch(constraintChild.getToken().getType()) {
case HiveParser.TOK_CHECK_CONSTRAINT:
ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
break;
case HiveParser.TOK_DEFAULT_VALUE:
ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream);
break;
case HiveParser.TOK_NOT_NULL:
ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
break;
case HiveParser.TOK_UNIQUE:
ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
break;
case HiveParser.TOK_PRIMARY_KEY:
if (!primaryKeys.isEmpty()) {
throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
}
ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys);
break;
case HiveParser.TOK_FOREIGN_KEY:
ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys);
break;
default:
throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
}
}
}
colList.add(col);
break;
}
}
return colList;
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class Hive method getTableConstraints.
public TableConstraintsInfo getTableConstraints(String dbName, String tblName, boolean fetchReliable, boolean fetchEnabled, long tableId) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS);
try {
ValidWriteIdList validWriteIdList = getValidWriteIdList(dbName, tblName);
AllTableConstraintsRequest request = new AllTableConstraintsRequest(dbName, tblName, getDefaultCatalog(conf));
request.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.writeToString() : null);
request.setTableId(tableId);
SQLAllTableConstraints tableConstraints = getMSC().getAllTableConstraints(request);
if (fetchReliable && tableConstraints != null) {
if (CollectionUtils.isNotEmpty(tableConstraints.getPrimaryKeys())) {
tableConstraints.setPrimaryKeys(tableConstraints.getPrimaryKeys().stream().filter(SQLPrimaryKey::isRely_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getForeignKeys())) {
tableConstraints.setForeignKeys(tableConstraints.getForeignKeys().stream().filter(SQLForeignKey::isRely_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getUniqueConstraints())) {
tableConstraints.setUniqueConstraints(tableConstraints.getUniqueConstraints().stream().filter(SQLUniqueConstraint::isRely_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getNotNullConstraints())) {
tableConstraints.setNotNullConstraints(tableConstraints.getNotNullConstraints().stream().filter(SQLNotNullConstraint::isRely_cstr).collect(Collectors.toList()));
}
}
if (fetchEnabled && tableConstraints != null) {
if (CollectionUtils.isNotEmpty(tableConstraints.getCheckConstraints())) {
tableConstraints.setCheckConstraints(tableConstraints.getCheckConstraints().stream().filter(SQLCheckConstraint::isEnable_cstr).collect(Collectors.toList()));
}
if (CollectionUtils.isNotEmpty(tableConstraints.getDefaultConstraints())) {
tableConstraints.setDefaultConstraints(tableConstraints.getDefaultConstraints().stream().filter(SQLDefaultConstraint::isEnable_cstr).collect(Collectors.toList()));
}
}
return new TableConstraintsInfo(new PrimaryKeyInfo(tableConstraints.getPrimaryKeys(), tblName, dbName), new ForeignKeyInfo(tableConstraints.getForeignKeys(), tblName, dbName), new UniqueConstraint(tableConstraints.getUniqueConstraints(), tblName, dbName), new DefaultConstraint(tableConstraints.getDefaultConstraints(), tblName, dbName), new CheckConstraint(tableConstraints.getCheckConstraints()), new NotNullConstraint(tableConstraints.getNotNullConstraints(), tblName, dbName));
} catch (Exception e) {
throw new HiveException(e);
} finally {
perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE_CONSTRAINTS, "HS2-cache");
}
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class DbNotificationListener method onAddDefaultConstraint.
/**
* @param addDefaultConstraintEvent add default constraint event
* @throws MetaException
*/
@Override
public void onAddDefaultConstraint(AddDefaultConstraintEvent addDefaultConstraintEvent) throws MetaException {
List<SQLDefaultConstraint> cols = addDefaultConstraintEvent.getDefaultConstraintCols();
if (cols.size() > 0) {
AddDefaultConstraintMessage colsInMsg = MessageBuilder.getInstance().buildAddDefaultConstraintMessage(cols);
NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_DEFAULTCONSTRAINT.toString(), msgEncoder.getSerializer().serialize(colsInMsg));
event.setCatName(cols.get(0).isSetCatName() ? cols.get(0).getCatName() : DEFAULT_CATALOG_NAME);
event.setDbName(cols.get(0).getTable_db());
event.setTableName(cols.get(0).getTable_name());
process(event, addDefaultConstraintEvent);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class NonCatCallsWithCatalog method defaultConstraints.
@Test
public void defaultConstraints() throws TException {
String constraintName = "ocdv";
// Table in non 'hive' catalog
List<SQLDefaultConstraint> dv = new SQLDefaultConstraintBuilder().onTable(testTables[2]).addColumn("test_col1").setConstraintName(constraintName).setDefaultVal("empty").build(conf);
client.addDefaultConstraint(dv);
DefaultConstraintsRequest rqst = new DefaultConstraintsRequest(testTables[2].getCatName(), testTables[2].getDbName(), testTables[2].getTableName());
List<SQLDefaultConstraint> fetched = client.getDefaultConstraints(rqst);
Assert.assertEquals(1, fetched.size());
Assert.assertEquals(expectedCatalog(), fetched.get(0).getCatName());
Assert.assertEquals(testTables[2].getDbName(), fetched.get(0).getTable_db());
Assert.assertEquals(testTables[2].getTableName(), fetched.get(0).getTable_name());
Assert.assertEquals("test_col1", fetched.get(0).getColumn_name());
Assert.assertEquals("empty", fetched.get(0).getDefault_value());
Assert.assertEquals(constraintName, fetched.get(0).getDc_name());
Assert.assertTrue(fetched.get(0).isEnable_cstr());
Assert.assertFalse(fetched.get(0).isValidate_cstr());
Assert.assertFalse(fetched.get(0).isRely_cstr());
Assert.assertEquals(testTables[2].getCatName(), fetched.get(0).getCatName());
client.dropConstraint(testTables[2].getDbName(), testTables[2].getTableName(), constraintName);
rqst = new DefaultConstraintsRequest(testTables[2].getCatName(), testTables[2].getDbName(), testTables[2].getTableName());
fetched = client.getDefaultConstraints(rqst);
Assert.assertTrue(fetched.isEmpty());
}
use of org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint in project hive by apache.
the class MetaStoreDirectSql method getDefaultConstraints.
public List<SQLDefaultConstraint> getDefaultConstraints(String catName, String db_name, String tbl_name) throws MetaException {
List<SQLDefaultConstraint> ret = new ArrayList<SQLDefaultConstraint>();
String queryText = "SELECT " + DBS + ".\"NAME\", " + TBLS + ".\"TBL_NAME\"," + "CASE WHEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" IS NOT NULL THEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" " + "ELSE " + PARTITION_KEYS + ".\"PKEY_NAME\" END, " + "" + KEY_CONSTRAINTS + ".\"CONSTRAINT_NAME\", " + KEY_CONSTRAINTS + ".\"ENABLE_VALIDATE_RELY\", " + "" + KEY_CONSTRAINTS + ".\"DEFAULT_VALUE\" " + " from " + TBLS + " " + " INNER JOIN " + KEY_CONSTRAINTS + " ON " + TBLS + ".\"TBL_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" " + " INNER JOIN " + DBS + " ON " + TBLS + ".\"DB_ID\" = " + DBS + ".\"DB_ID\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + " ON " + COLUMNS_V2 + ".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " " + COLUMNS_V2 + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " ON " + TBLS + ".\"TBL_ID\" = " + PARTITION_KEYS + ".\"TBL_ID\" AND " + " " + PARTITION_KEYS + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " WHERE " + KEY_CONSTRAINTS + ".\"CONSTRAINT_TYPE\" = " + MConstraint.DEFAULT_CONSTRAINT + " AND" + " " + DBS + ".\"CTLG_NAME\" = ? AND" + (db_name == null ? "" : " " + DBS + ".\"NAME\" = ? AND") + (tbl_name == null ? "" : " " + TBLS + ".\"TBL_NAME\" = ? ");
queryText = queryText.trim();
if (queryText.endsWith("AND")) {
queryText = queryText.substring(0, queryText.length() - 3);
}
if (LOG.isDebugEnabled()) {
LOG.debug("getDefaultConstraints: directsql : " + queryText);
}
List<String> pms = new ArrayList<>();
pms.add(catName);
if (db_name != null) {
pms.add(db_name);
}
if (tbl_name != null) {
pms.add(tbl_name);
}
try (QueryWrapper queryParams = new QueryWrapper(pm.newQuery("javax.jdo.query.SQL", queryText))) {
List<Object[]> sqlResult = MetastoreDirectSqlUtils.ensureList(executeWithArray(queryParams, pms.toArray(), queryText));
if (!sqlResult.isEmpty()) {
for (Object[] line : sqlResult) {
int enableValidateRely = MetastoreDirectSqlUtils.extractSqlInt(line[4]);
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
SQLDefaultConstraint currConstraint = new SQLDefaultConstraint(catName, MetastoreDirectSqlUtils.extractSqlString(line[0]), MetastoreDirectSqlUtils.extractSqlString(line[1]), MetastoreDirectSqlUtils.extractSqlString(line[2]), MetastoreDirectSqlUtils.extractSqlString(line[5]), MetastoreDirectSqlUtils.extractSqlString(line[3]), enable, validate, rely);
ret.add(currConstraint);
}
}
return ret;
}
}
Aggregations