Search in sources :

Example 36 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class ConstraintsUtils method processForeignKeys.

public static void processForeignKeys(TableName tableName, ASTNode node, List<SQLForeignKey> foreignKeys) throws SemanticException {
    // The ANTLR grammar looks like :
    // 1.  KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user specifies the constraint name (i.e. child.getChildCount() == 7)
    // 2.  KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $fkCols  $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
    String constraintName = null;
    boolean enable = true;
    boolean validate = true;
    boolean rely = false;
    int fkIndex = -1;
    for (int i = 0; i < node.getChildCount(); i++) {
        ASTNode grandChild = (ASTNode) node.getChild(i);
        int type = grandChild.getToken().getType();
        if (type == HiveParser.TOK_CONSTRAINT_NAME) {
            constraintName = BaseSemanticAnalyzer.unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase());
        } else if (type == HiveParser.TOK_ENABLE) {
            enable = true;
            // validate is true by default if we enable the constraint
            validate = true;
        } else if (type == HiveParser.TOK_DISABLE) {
            enable = false;
            // validate is false by default if we disable the constraint
            validate = false;
        } else if (type == HiveParser.TOK_VALIDATE) {
            validate = true;
        } else if (type == HiveParser.TOK_NOVALIDATE) {
            validate = false;
        } else if (type == HiveParser.TOK_RELY) {
            rely = true;
        } else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) {
            fkIndex = i;
        }
    }
    if (enable) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " + "Please use DISABLE instead."));
    }
    if (validate) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " + "Please use NOVALIDATE instead."));
    }
    int ptIndex = fkIndex + 1;
    int pkIndex = ptIndex + 1;
    if (node.getChild(fkIndex).getChildCount() != node.getChild(pkIndex).getChildCount()) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
    }
    TableName parentTblName = BaseSemanticAnalyzer.getQualifiedTableName((ASTNode) node.getChild(ptIndex));
    for (int j = 0; j < node.getChild(fkIndex).getChildCount(); j++) {
        SQLForeignKey sqlForeignKey = new SQLForeignKey();
        sqlForeignKey.setFktable_db(tableName.getDb());
        sqlForeignKey.setFktable_name(tableName.getTable());
        Tree fkgrandChild = node.getChild(fkIndex).getChild(j);
        BaseSemanticAnalyzer.checkColumnName(fkgrandChild.getText());
        sqlForeignKey.setFkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setPktable_db(parentTblName.getDb());
        sqlForeignKey.setPktable_name(parentTblName.getTable());
        Tree pkgrandChild = node.getChild(pkIndex).getChild(j);
        sqlForeignKey.setPkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setKey_seq(j + 1);
        sqlForeignKey.setFk_name(constraintName);
        sqlForeignKey.setEnable_cstr(enable);
        sqlForeignKey.setValidate_cstr(validate);
        sqlForeignKey.setRely_cstr(rely);
        foreignKeys.add(sqlForeignKey);
    }
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 37 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class AddForeignKeyHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
    List<SQLForeignKey> fks;
    try {
        fks = msg.getForeignKeys();
    } catch (Exception e) {
        if (!(e instanceof SemanticException)) {
            throw new SemanticException("Error reading message members", e);
        } else {
            throw (SemanticException) e;
        }
    }
    List<Task<?>> tasks = new ArrayList<Task<?>>();
    if (fks.isEmpty()) {
        return tasks;
    }
    final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName;
    final String actualTblName = fks.get(0).getFktable_name();
    final TableName tName = TableName.fromString(actualTblName, null, actualDbName);
    for (SQLForeignKey fk : fks) {
        // Otherwise, keep db name
        if (fk.getPktable_db().equals(fk.getFktable_db())) {
            fk.setPktable_db(actualDbName);
        }
        fk.setFktable_db(actualDbName);
        fk.setFktable_name(actualTblName);
    }
    Constraints constraints = new Constraints(null, fks, null, null, null, null);
    AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(tName, context.eventOnlyReplicationSpec(), constraints);
    Task<DDLWork> addConstraintsTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    tasks.add(addConstraintsTask);
    context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName);
    updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null);
    return Collections.singletonList(addConstraintsTask);
}
Also used : AddForeignKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage) Task(org.apache.hadoop.hive.ql.exec.Task) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TableName(org.apache.hadoop.hive.common.TableName) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableAddConstraintDesc(org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 38 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class LoadConstraint method isForeignKeysAlreadyLoaded.

private boolean isForeignKeysAlreadyLoaded(String fksMsgString) throws Exception {
    AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(fksMsgString);
    List<SQLForeignKey> fksInMsg = msg.getForeignKeys();
    if (fksInMsg.isEmpty()) {
        return true;
    }
    String dbName = StringUtils.isBlank(dbNameToLoadIn) ? fksInMsg.get(0).getFktable_db() : dbNameToLoadIn;
    List<SQLForeignKey> fks;
    try {
        fks = context.hiveDb.getForeignKeyList(dbName, fksInMsg.get(0).getFktable_name());
    } catch (NoSuchObjectException e) {
        return false;
    }
    return CollectionUtils.isNotEmpty(fks);
}
Also used : AddForeignKeyMessage(org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 39 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class MetaStoreDirectSql method getForeignKeys.

public List<SQLForeignKey> getForeignKeys(String catName, String parent_db_name, String parent_tbl_name, String foreign_db_name, String foreign_tbl_name) throws MetaException {
    List<SQLForeignKey> ret = new ArrayList<>();
    String queryText = "SELECT  \"D2\".\"NAME\", \"T2\".\"TBL_NAME\", " + "CASE WHEN \"C2\".\"COLUMN_NAME\" IS NOT NULL THEN \"C2\".\"COLUMN_NAME\" " + "ELSE \"P2\".\"PKEY_NAME\" END, " + "" + DBS + ".\"NAME\", " + TBLS + ".\"TBL_NAME\", " + "CASE WHEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" IS NOT NULL THEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" " + "ELSE " + PARTITION_KEYS + ".\"PKEY_NAME\" END, " + "" + KEY_CONSTRAINTS + ".\"POSITION\", " + KEY_CONSTRAINTS + ".\"UPDATE_RULE\", " + KEY_CONSTRAINTS + ".\"DELETE_RULE\", " + "" + KEY_CONSTRAINTS + ".\"CONSTRAINT_NAME\" , \"KEY_CONSTRAINTS2\".\"CONSTRAINT_NAME\", " + KEY_CONSTRAINTS + ".\"ENABLE_VALIDATE_RELY\" " + " from " + TBLS + " " + " INNER JOIN " + KEY_CONSTRAINTS + " ON " + TBLS + ".\"TBL_ID\" = " + KEY_CONSTRAINTS + ".\"CHILD_TBL_ID\" " + " INNER JOIN " + KEY_CONSTRAINTS + " \"KEY_CONSTRAINTS2\" ON \"KEY_CONSTRAINTS2\".\"PARENT_TBL_ID\"  = " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" " + " AND \"KEY_CONSTRAINTS2\".\"PARENT_CD_ID\"  = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " \"KEY_CONSTRAINTS2\".\"PARENT_INTEGER_IDX\"  = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " INNER JOIN " + DBS + " ON " + TBLS + ".\"DB_ID\" = " + DBS + ".\"DB_ID\" " + " INNER JOIN " + TBLS + " \"T2\" ON  " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" = \"T2\".\"TBL_ID\" " + " INNER JOIN " + DBS + " \"D2\" ON \"T2\".\"DB_ID\" = \"D2\".\"DB_ID\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + "  ON " + COLUMNS_V2 + ".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"CHILD_CD_ID\" AND " + " " + COLUMNS_V2 + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"CHILD_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " ON " + TBLS + ".\"TBL_ID\" = " + PARTITION_KEYS + ".\"TBL_ID\" AND " + " " + PARTITION_KEYS + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"CHILD_INTEGER_IDX\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + " \"C2\" ON \"C2\".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " \"C2\".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " \"P2\" ON \"P2\".\"TBL_ID\" = " + TBLS + ".\"TBL_ID\" AND " + " \"P2\".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " WHERE " + KEY_CONSTRAINTS + ".\"CONSTRAINT_TYPE\" = " + MConstraint.FOREIGN_KEY_CONSTRAINT + " AND \"KEY_CONSTRAINTS2\".\"CONSTRAINT_TYPE\" = " + MConstraint.PRIMARY_KEY_CONSTRAINT + " AND" + " " + DBS + ".\"CTLG_NAME\" = ? AND" + (foreign_db_name == null ? "" : " " + DBS + ".\"NAME\" = ? AND") + (foreign_tbl_name == null ? "" : " " + TBLS + ".\"TBL_NAME\" = ? AND") + (parent_tbl_name == null ? "" : " \"T2\".\"TBL_NAME\" = ? AND") + (parent_db_name == null ? "" : " \"D2\".\"NAME\" = ?");
    queryText = queryText.trim();
    if (queryText.endsWith("AND")) {
        queryText = queryText.substring(0, queryText.length() - 3);
    }
    List<String> pms = new ArrayList<String>();
    pms.add(catName);
    if (foreign_db_name != null) {
        pms.add(foreign_db_name);
    }
    if (foreign_tbl_name != null) {
        pms.add(foreign_tbl_name);
    }
    if (parent_tbl_name != null) {
        pms.add(parent_tbl_name);
    }
    if (parent_db_name != null) {
        pms.add(parent_db_name);
    }
    try (QueryWrapper queryParams = new QueryWrapper(pm.newQuery("javax.jdo.query.SQL", queryText))) {
        List<Object[]> sqlResult = MetastoreDirectSqlUtils.ensureList(executeWithArray(queryParams, pms.toArray(), queryText));
        if (!sqlResult.isEmpty()) {
            for (Object[] line : sqlResult) {
                int enableValidateRely = MetastoreDirectSqlUtils.extractSqlInt(line[11]);
                boolean enable = (enableValidateRely & 4) != 0;
                boolean validate = (enableValidateRely & 2) != 0;
                boolean rely = (enableValidateRely & 1) != 0;
                SQLForeignKey currKey = new SQLForeignKey(MetastoreDirectSqlUtils.extractSqlString(line[0]), MetastoreDirectSqlUtils.extractSqlString(line[1]), MetastoreDirectSqlUtils.extractSqlString(line[2]), MetastoreDirectSqlUtils.extractSqlString(line[3]), MetastoreDirectSqlUtils.extractSqlString(line[4]), MetastoreDirectSqlUtils.extractSqlString(line[5]), MetastoreDirectSqlUtils.extractSqlInt(line[6]), MetastoreDirectSqlUtils.extractSqlInt(line[7]), MetastoreDirectSqlUtils.extractSqlInt(line[8]), MetastoreDirectSqlUtils.extractSqlString(line[9]), MetastoreDirectSqlUtils.extractSqlString(line[10]), enable, validate, rely);
                currKey.setCatName(catName);
                ret.add(currKey);
            }
        }
        return ret;
    }
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 40 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestObjectStore method testTableOps.

/**
 * Test table operations
 */
@Test
public void testTableOps() throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
    Database db1 = new DatabaseBuilder().setName(DB1).setDescription("description").setLocation("locationurl").build(conf);
    objectStore.createDatabase(db1);
    StorageDescriptor sd1 = new StorageDescriptor(ImmutableList.of(new FieldSchema("pk_col", "double", null)), "location", null, null, false, 0, new SerDeInfo("SerDeName", "serializationLib", null), null, null, null);
    HashMap<String, String> params = new HashMap<>();
    params.put("EXTERNAL", "false");
    Table tbl1 = new Table(TABLE1, DB1, "owner", 1, 2, 3, sd1, null, params, null, null, "MANAGED_TABLE");
    objectStore.createTable(tbl1);
    List<String> tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
    Assert.assertEquals(1, tables.size());
    Assert.assertEquals(TABLE1, tables.get(0));
    StorageDescriptor sd2 = new StorageDescriptor(ImmutableList.of(new FieldSchema("fk_col", "double", null)), "location", null, null, false, 0, new SerDeInfo("SerDeName", "serializationLib", null), null, null, null);
    Table newTbl1 = new Table("new" + TABLE1, DB1, "owner", 1, 2, 3, sd2, null, params, null, null, "MANAGED_TABLE");
    // Change different fields and verify they were altered
    newTbl1.setOwner("role1");
    newTbl1.setOwnerType(PrincipalType.ROLE);
    objectStore.alterTable(DEFAULT_CATALOG_NAME, DB1, TABLE1, newTbl1, null);
    tables = objectStore.getTables(DEFAULT_CATALOG_NAME, DB1, "new*");
    Assert.assertEquals(1, tables.size());
    Assert.assertEquals("new" + TABLE1, tables.get(0));
    // Verify fields were altered during the alterTable operation
    Table alteredTable = objectStore.getTable(DEFAULT_CATALOG_NAME, DB1, "new" + TABLE1);
    Assert.assertEquals("Owner of table was not altered", newTbl1.getOwner(), alteredTable.getOwner());
    Assert.assertEquals("Owner type of table was not altered", newTbl1.getOwnerType(), alteredTable.getOwnerType());
    objectStore.createTable(tbl1);
    tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
    Assert.assertEquals(2, tables.size());
    List<SQLForeignKey> foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, DB1, TABLE1, null, null);
    Assert.assertEquals(0, foreignKeys.size());
    SQLPrimaryKey pk = new SQLPrimaryKey(DB1, TABLE1, "pk_col", 1, "pk_const_1", false, false, false);
    pk.setCatName(DEFAULT_CATALOG_NAME);
    objectStore.addPrimaryKeys(ImmutableList.of(pk));
    SQLForeignKey fk = new SQLForeignKey(DB1, TABLE1, "pk_col", DB1, "new" + TABLE1, "fk_col", 1, 0, 0, "fk_const_1", "pk_const_1", false, false, false);
    objectStore.addForeignKeys(ImmutableList.of(fk));
    // Retrieve from PK side
    foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, DB1, "new" + TABLE1);
    Assert.assertEquals(1, foreignKeys.size());
    List<SQLForeignKey> fks = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, DB1, "new" + TABLE1);
    if (fks != null) {
        for (SQLForeignKey fkcol : fks) {
            objectStore.dropConstraint(fkcol.getCatName(), fkcol.getFktable_db(), fkcol.getFktable_name(), fkcol.getFk_name());
        }
    }
    // Retrieve from FK side
    foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, DB1, TABLE1, null, null);
    Assert.assertEquals(0, foreignKeys.size());
    // Retrieve from PK side
    foreignKeys = objectStore.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, DB1, "new" + TABLE1);
    Assert.assertEquals(0, foreignKeys.size());
    objectStore.dropTable(DEFAULT_CATALOG_NAME, DB1, TABLE1);
    tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
    Assert.assertEquals(1, tables.size());
    objectStore.dropTable(DEFAULT_CATALOG_NAME, DB1, "new" + TABLE1);
    tables = objectStore.getAllTables(DEFAULT_CATALOG_NAME, DB1);
    Assert.assertEquals(0, tables.size());
    objectStore.dropDatabase(db1.getCatalogName(), DB1);
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) TestHiveMetaStore.createSourceTable(org.apache.hadoop.hive.metastore.TestHiveMetaStore.createSourceTable) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest) Test(org.junit.Test)

Aggregations

SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)46 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)28 Test (org.junit.Test)20 Table (org.apache.hadoop.hive.metastore.api.Table)19 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)15 ArrayList (java.util.ArrayList)13 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)13 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)13 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)13 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)12 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 TApplicationException (org.apache.thrift.TApplicationException)5 Tree (org.antlr.runtime.tree.Tree)3