use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class ConstraintsUtils method processForeignKeys.
public static void processForeignKeys(TableName tableName, ASTNode node, List<SQLForeignKey> foreignKeys) throws SemanticException {
// The ANTLR grammar looks like :
// 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
// KW_REFERENCES tabName=tableName parCols=columnParenthesesList
// enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
// -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
// when the user specifies the constraint name (i.e. child.getChildCount() == 7)
// 2. KW_FOREIGN KW_KEY fkCols=columnParenthesesList
// KW_REFERENCES tabName=tableName parCols=columnParenthesesList
// enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
// -> ^(TOK_FOREIGN_KEY $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
// when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
String constraintName = null;
boolean enable = true;
boolean validate = true;
boolean rely = false;
int fkIndex = -1;
for (int i = 0; i < node.getChildCount(); i++) {
ASTNode grandChild = (ASTNode) node.getChild(i);
int type = grandChild.getToken().getType();
if (type == HiveParser.TOK_CONSTRAINT_NAME) {
constraintName = BaseSemanticAnalyzer.unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase());
} else if (type == HiveParser.TOK_ENABLE) {
enable = true;
// validate is true by default if we enable the constraint
validate = true;
} else if (type == HiveParser.TOK_DISABLE) {
enable = false;
// validate is false by default if we disable the constraint
validate = false;
} else if (type == HiveParser.TOK_VALIDATE) {
validate = true;
} else if (type == HiveParser.TOK_NOVALIDATE) {
validate = false;
} else if (type == HiveParser.TOK_RELY) {
rely = true;
} else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) {
fkIndex = i;
}
}
if (enable) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " + "Please use DISABLE instead."));
}
if (validate) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " + "Please use NOVALIDATE instead."));
}
int ptIndex = fkIndex + 1;
int pkIndex = ptIndex + 1;
if (node.getChild(fkIndex).getChildCount() != node.getChild(pkIndex).getChildCount()) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
}
TableName parentTblName = BaseSemanticAnalyzer.getQualifiedTableName((ASTNode) node.getChild(ptIndex));
for (int j = 0; j < node.getChild(fkIndex).getChildCount(); j++) {
SQLForeignKey sqlForeignKey = new SQLForeignKey();
sqlForeignKey.setFktable_db(tableName.getDb());
sqlForeignKey.setFktable_name(tableName.getTable());
Tree fkgrandChild = node.getChild(fkIndex).getChild(j);
BaseSemanticAnalyzer.checkColumnName(fkgrandChild.getText());
sqlForeignKey.setFkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
sqlForeignKey.setPktable_db(parentTblName.getDb());
sqlForeignKey.setPktable_name(parentTblName.getTable());
Tree pkgrandChild = node.getChild(pkIndex).getChild(j);
sqlForeignKey.setPkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
sqlForeignKey.setKey_seq(j + 1);
sqlForeignKey.setFk_name(constraintName);
sqlForeignKey.setEnable_cstr(enable);
sqlForeignKey.setValidate_cstr(validate);
sqlForeignKey.setRely_cstr(rely);
foreignKeys.add(sqlForeignKey);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class AddForeignKeyHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
List<SQLForeignKey> fks;
try {
fks = msg.getForeignKeys();
} catch (Exception e) {
if (!(e instanceof SemanticException)) {
throw new SemanticException("Error reading message members", e);
} else {
throw (SemanticException) e;
}
}
List<Task<?>> tasks = new ArrayList<Task<?>>();
if (fks.isEmpty()) {
return tasks;
}
final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
final String actualTblName = fks.get(0).getFktable_name();
final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
for (SQLForeignKey fk : fks) {
// Otherwise, keep db name
if (fk.getPktable_db().equals(fk.getFktable_db())) {
fk.setPktable_db(actualDbName);
}
fk.setFktable_db(actualDbName);
fk.setFktable_name(actualTblName);
}
Constraints constraints = new Constraints(null, fks, null, null, null, null);
AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
tasks.add(addConstraintsTask);
context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
return Collections.singletonList(addConstraintsTask);
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class LoadConstraint method isForeignKeysAlreadyLoaded.
private boolean isForeignKeysAlreadyLoaded(String fksMsgString) throws Exception {
AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(fksMsgString);
List<SQLForeignKey> fksInMsg = msg.getForeignKeys();
if (fksInMsg.isEmpty()) {
return true;
}
String dbName = StringUtils.isBlank(dbNameToLoadIn) ? fksInMsg.get(0).getFktable_db() : dbNameToLoadIn;
List<SQLForeignKey> fks;
try {
fks = context.hiveDb.getForeignKeyList(dbName, fksInMsg.get(0).getFktable_name());
} catch (NoSuchObjectException e) {
return false;
}
return CollectionUtils.isNotEmpty(fks);
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class MetaStoreDirectSql method getForeignKeys.
public List<SQLForeignKey> getForeignKeys(String catName, String parent_db_name, String parent_tbl_name, String foreign_db_name, String foreign_tbl_name) throws MetaException {
List<SQLForeignKey> ret = new ArrayList<>();
String queryText = "SELECT \"D2\".\"NAME\", \"T2\".\"TBL_NAME\", " + "CASE WHEN \"C2\".\"COLUMN_NAME\" IS NOT NULL THEN \"C2\".\"COLUMN_NAME\" " + "ELSE \"P2\".\"PKEY_NAME\" END, " + "" + DBS + ".\"NAME\", " + TBLS + ".\"TBL_NAME\", " + "CASE WHEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" IS NOT NULL THEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" " + "ELSE " + PARTITION_KEYS + ".\"PKEY_NAME\" END, " + "" + KEY_CONSTRAINTS + ".\"POSITION\", " + KEY_CONSTRAINTS + ".\"UPDATE_RULE\", " + KEY_CONSTRAINTS + ".\"DELETE_RULE\", " + "" + KEY_CONSTRAINTS + ".\"CONSTRAINT_NAME\" , \"KEY_CONSTRAINTS2\".\"CONSTRAINT_NAME\", " + KEY_CONSTRAINTS + ".\"ENABLE_VALIDATE_RELY\" " + " from " + TBLS + " " + " INNER JOIN " + KEY_CONSTRAINTS + " ON " + TBLS + ".\"TBL_ID\" = " + KEY_CONSTRAINTS + ".\"CHILD_TBL_ID\" " + " INNER JOIN " + KEY_CONSTRAINTS + " \"KEY_CONSTRAINTS2\" ON \"KEY_CONSTRAINTS2\".\"PARENT_TBL_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" " + " AND \"KEY_CONSTRAINTS2\".\"PARENT_CD_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " \"KEY_CONSTRAINTS2\".\"PARENT_INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " INNER JOIN " + DBS + " ON " + TBLS + ".\"DB_ID\" = " + DBS + ".\"DB_ID\" " + " INNER JOIN " + TBLS + " \"T2\" ON " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" = \"T2\".\"TBL_ID\" " + " INNER JOIN " + DBS + " \"D2\" ON \"T2\".\"DB_ID\" = \"D2\".\"DB_ID\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + " ON " + COLUMNS_V2 + ".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"CHILD_CD_ID\" AND " + " " + COLUMNS_V2 + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"CHILD_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " ON " + TBLS + ".\"TBL_ID\" = " + PARTITION_KEYS + ".\"TBL_ID\" AND " + " " + PARTITION_KEYS + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"CHILD_INTEGER_IDX\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + " \"C2\" ON \"C2\".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " \"C2\".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " \"P2\" ON \"P2\".\"TBL_ID\" = " + TBLS + ".\"TBL_ID\" AND " + " \"P2\".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " WHERE " + KEY_CONSTRAINTS + ".\"CONSTRAINT_TYPE\" = " + MConstraint.FOREIGN_KEY_CONSTRAINT + " AND \"KEY_CONSTRAINTS2\".\"CONSTRAINT_TYPE\" = " + MConstraint.PRIMARY_KEY_CONSTRAINT + " AND" + " " + DBS + ".\"CTLG_NAME\" = ? AND" + (foreign_db_name == null ? "" : " " + DBS + ".\"NAME\" = ? AND") + (foreign_tbl_name == null ? "" : " " + TBLS + ".\"TBL_NAME\" = ? AND") + (parent_tbl_name == null ? "" : " \"T2\".\"TBL_NAME\" = ? AND") + (parent_db_name == null ? "" : " \"D2\".\"NAME\" = ?");
queryText = queryText.trim();
if (queryText.endsWith("AND")) {
queryText = queryText.substring(0, queryText.length() - 3);
}
List<String> pms = new ArrayList<String>();
pms.add(catName);
if (foreign_db_name != null) {
pms.add(foreign_db_name);
}
if (foreign_tbl_name != null) {
pms.add(foreign_tbl_name);
}
if (parent_tbl_name != null) {
pms.add(parent_tbl_name);
}
if (parent_db_name != null) {
pms.add(parent_db_name);
}
try (QueryWrapper queryParams = new QueryWrapper(pm.newQuery("javax.jdo.query.SQL", queryText))) {
List<Object[]> sqlResult = MetastoreDirectSqlUtils.ensureList(executeWithArray(queryParams, pms.toArray(), queryText));
if (!sqlResult.isEmpty()) {
for (Object[] line : sqlResult) {
int enableValidateRely = MetastoreDirectSqlUtils.extractSqlInt(line[11]);
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
SQLForeignKey currKey = new SQLForeignKey(MetastoreDirectSqlUtils.extractSqlString(line[0]), MetastoreDirectSqlUtils.extractSqlString(line[1]), MetastoreDirectSqlUtils.extractSqlString(line[2]), MetastoreDirectSqlUtils.extractSqlString(line[3]), MetastoreDirectSqlUtils.extractSqlString(line[4]), MetastoreDirectSqlUtils.extractSqlString(line[5]), MetastoreDirectSqlUtils.extractSqlInt(line[6]), MetastoreDirectSqlUtils.extractSqlInt(line[7]), MetastoreDirectSqlUtils.extractSqlInt(line[8]), MetastoreDirectSqlUtils.extractSqlString(line[9]), MetastoreDirectSqlUtils.extractSqlString(line[10]), enable, validate, rely);
currKey.setCatName(catName);
ret.add(currKey);
}
}
return ret;
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class TestObjectStore method testTableOps.
/**
* Test table operations
*/
@Test
public void testTableOps() throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
Database db1 = new DatabaseBuilder().setName(DB1).setDescription("description").setLocation("locationurl").build(conf);
objectStore.createDatabase(db1);
StorageDescriptor sd1 = new StorageDescriptor(ImmutableList.of(new FieldSchema("pk_col", "double", null)), "location", null, null, false, 0, new SerDeInfo("SerDeName", "serializationLib", null), null, null, null);
HashMap<String, String> params = new HashMap<>();
params.put("EXTERNAL", "false");
Table tbl1 = new Table(TABLE1, DB1, "owner", 1, 2, 3, sd1, null, params, null, null, "MANAGED_TABLE");
objectStore.createTable(tbl1);
List<String> tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
Assert.assertEquals(1, tables.size());
Assert.assertEquals(TABLE1, tables.get(0));
StorageDescriptor sd2 = new StorageDescriptor(ImmutableList.of(new FieldSchema("fk_col", "double", null)), "location", null, null, false, 0, new SerDeInfo("SerDeName", "serializationLib", null), null, null, null);
Table newTbl1 = new Table("new" + TABLE1, DB1, "owner", 1, 2, 3, sd2, null, params, null, null, "MANAGED_TABLE");
// Change different fields and verify they were altered
newTbl1.setOwner("role1");
newTbl1.setOwnerType(PrincipalType.ROLE);
objectStore.alterTable(DEFAULT_CATALOG_NAME, DB1, TABLE1, newTbl1, null);
tables = objectStore.getTables(DEFAULT_CATALOG_NAME, DB1, "new*");
Assert.assertEquals(1, tables.size());
Assert.assertEquals("new" + TABLE1, tables.get(0));
// Verify fields were altered during the alterTable operation
Table alteredTable = objectStore.getTable(DEFAULT_CATALOG_NAME, DB1, "new" + TABLE1);
Assert.assertEquals("Owner of table was not altered", newTbl1.getOwner(), alteredTable.getOwner());
Assert.assertEquals("Owner type of table was not altered", newTbl1.getOwnerType(), alteredTable.getOwnerType());
objectStore.createTable(tbl1);
tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
Assert.assertEquals(2, tables.size());
List<SQLForeignKey> foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, DB1, TABLE1, null, null);
Assert.assertEquals(0, foreignKeys.size());
SQLPrimaryKey pk = new SQLPrimaryKey(DB1, TABLE1, "pk_col", 1, "pk_const_1", false, false, false);
pk.setCatName(DEFAULT_CATALOG_NAME);
objectStore.addPrimaryKeys(ImmutableList.of(pk));
SQLForeignKey fk = new SQLForeignKey(DB1, TABLE1, "pk_col", DB1, "new" + TABLE1, "fk_col", 1, 0, 0, "fk_const_1", "pk_const_1", false, false, false);
objectStore.addForeignKeys(ImmutableList.of(fk));
// Retrieve from PK side
foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, DB1, "new" + TABLE1);
Assert.assertEquals(1, foreignKeys.size());
List<SQLForeignKey> fks = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, DB1, "new" + TABLE1);
if (fks != null) {
for (SQLForeignKey fkcol : fks) {
objectStore.dropConstraint(fkcol.getCatName(), fkcol.getFktable_db(), fkcol.getFktable_name(), fkcol.getFk_name());
}
}
// Retrieve from FK side
foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, DB1, TABLE1, null, null);
Assert.assertEquals(0, foreignKeys.size());
// Retrieve from PK side
foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, DB1, "new" + TABLE1);
Assert.assertEquals(0, foreignKeys.size());
objectStore.dropTable(DEFAULT_CATALOG_NAME, DB1, TABLE1);
tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
Assert.assertEquals(1, tables.size());
objectStore.dropTable(DEFAULT_CATALOG_NAME, DB1, "new" + TABLE1);
tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
Assert.assertEquals(0, tables.size());
objectStore.dropDatabase(db1.getCatalogName(), DB1);
}
Aggregations