Search in sources :

Example 51 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class ObjectStore method dropTable.

@Override
public boolean dropTable(String dbName, String tableName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException {
    boolean success = false;
    try {
        openTransaction();
        MTable tbl = getMTable(dbName, tableName);
        pm.retrieve(tbl);
        if (tbl != null) {
            // first remove all the grants
            List<MTablePrivilege> tabGrants = listAllTableGrants(dbName, tableName);
            if (tabGrants != null && tabGrants.size() > 0) {
                pm.deletePersistentAll(tabGrants);
            }
            List<MTableColumnPrivilege> tblColGrants = listTableAllColumnGrants(dbName, tableName);
            if (tblColGrants != null && tblColGrants.size() > 0) {
                pm.deletePersistentAll(tblColGrants);
            }
            List<MPartitionPrivilege> partGrants = this.listTableAllPartitionGrants(dbName, tableName);
            if (partGrants != null && partGrants.size() > 0) {
                pm.deletePersistentAll(partGrants);
            }
            List<MPartitionColumnPrivilege> partColGrants = listTableAllPartitionColumnGrants(dbName, tableName);
            if (partColGrants != null && partColGrants.size() > 0) {
                pm.deletePersistentAll(partColGrants);
            }
            // delete column statistics if present
            try {
                deleteTableColumnStatistics(dbName, tableName, null);
            } catch (NoSuchObjectException e) {
                LOG.info("Found no table level column statistics associated with db " + dbName + " table " + tableName + " record to delete");
            }
            List<MConstraint> tabConstraints = listAllTableConstraintsWithOptionalConstraintName(dbName, tableName, null);
            if (tabConstraints != null && tabConstraints.size() > 0) {
                pm.deletePersistentAll(tabConstraints);
            }
            preDropStorageDescriptor(tbl.getSd());
            // then remove the table
            pm.deletePersistentAll(tbl);
        }
        success = commitTransaction();
    } finally {
        if (!success) {
            rollbackTransaction();
        }
    }
    return success;
}
Also used : MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) MPartitionColumnPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege) MTable(org.apache.hadoop.hive.metastore.model.MTable) MPartitionPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionPrivilege) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MTablePrivilege(org.apache.hadoop.hive.metastore.model.MTablePrivilege) MTableColumnPrivilege(org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege)

Example 52 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDropIndex.

private void analyzeDropIndex(ASTNode ast) throws SemanticException {
    String indexName = unescapeIdentifier(ast.getChild(0).getText());
    String tableName = getUnescapedName((ASTNode) ast.getChild(1));
    boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
    // we want to signal an error if the index doesn't exist and we're
    // configured not to ignore this
    boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
    Table tbl = getTable(tableName, false);
    if (throwException && tbl == null) {
        throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
    }
    try {
        Index idx = db.getIndex(tableName, indexName);
    } catch (HiveException e) {
        if (!(e.getCause() instanceof NoSuchObjectException)) {
            throw new SemanticException(ErrorMsg.CANNOT_DROP_INDEX.getMsg("dropping index"), e);
        }
        if (throwException) {
            throw new SemanticException(ErrorMsg.INVALID_INDEX.getMsg(indexName));
        }
    }
    if (tbl != null) {
        inputs.add(new ReadEntity(tbl));
    }
    DropIndexDesc dropIdxDesc = new DropIndexDesc(indexName, tableName, throwException);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropIdxDesc), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DropIndexDesc(org.apache.hadoop.hive.ql.plan.DropIndexDesc) HiveIndex(org.apache.hadoop.hive.ql.index.HiveIndex) Index(org.apache.hadoop.hive.metastore.api.Index) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 53 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class HBaseStore method dropPartitions.

@Override
public void dropPartitions(String dbName, String tblName, List<String> partNames) throws MetaException, NoSuchObjectException {
    boolean commit = false;
    openTransaction();
    try {
        for (String partName : partNames) {
            dropPartition(dbName, tblName, partNameToVals(partName));
        }
        commit = true;
    } catch (Exception e) {
        LOG.error("Unable to drop partitions", e);
        throw new NoSuchObjectException("Failure dropping partitions, " + e.getMessage());
    } finally {
        commitOrRoleBack(commit);
    }
}
Also used : NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) TException(org.apache.thrift.TException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 54 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class HBaseStore method getRole.

@Override
public Role getRole(String roleName) throws NoSuchObjectException {
    boolean commit = false;
    openTransaction();
    try {
        Role role = getHBase().getRole(roleName);
        if (role == null) {
            throw new NoSuchObjectException("Unable to find role " + roleName);
        }
        commit = true;
        return role;
    } catch (IOException e) {
        LOG.error("Unable to get role", e);
        throw new NoSuchObjectException("Error reading table " + e.getMessage());
    } finally {
        commitOrRoleBack(commit);
    }
}
Also used : Role(org.apache.hadoop.hive.metastore.api.Role) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) IOException(java.io.IOException)

Example 55 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class HBaseStore method getPartitionsByExprInternal.

private boolean getPartitionsByExprInternal(String dbName, String tblName, ExpressionTree exprTree, short maxParts, List<Partition> result) throws MetaException, NoSuchObjectException {
    dbName = HiveStringUtils.normalizeIdentifier(dbName);
    tblName = HiveStringUtils.normalizeIdentifier(tblName);
    Table table = getTable(dbName, tblName);
    if (table == null) {
        throw new NoSuchObjectException("Unable to find table " + dbName + "." + tblName);
    }
    // general hbase filter plan from expression tree
    PlanResult planRes = HBaseFilterPlanUtil.getFilterPlan(exprTree, table.getPartitionKeys());
    if (LOG.isDebugEnabled()) {
        LOG.debug("Hbase Filter Plan generated : " + planRes.plan);
    }
    // results from scans need to be merged as there can be overlapping results between
    // the scans. Use a map of list of partition values to partition for this.
    Map<List<String>, Partition> mergedParts = new HashMap<List<String>, Partition>();
    for (ScanPlan splan : planRes.plan.getPlans()) {
        try {
            List<Partition> parts = getHBase().scanPartitions(dbName, tblName, splan.getStartRowSuffix(dbName, tblName, table.getPartitionKeys()), splan.getEndRowSuffix(dbName, tblName, table.getPartitionKeys()), splan.getFilter(table.getPartitionKeys()), -1);
            boolean reachedMax = false;
            for (Partition part : parts) {
                mergedParts.put(part.getValues(), part);
                if (mergedParts.size() == maxParts) {
                    reachedMax = true;
                    break;
                }
            }
            if (reachedMax) {
                break;
            }
        } catch (IOException e) {
            LOG.error("Unable to get partitions", e);
            throw new MetaException("Error scanning partitions" + tableNameForErrorMsg(dbName, tblName) + ": " + e);
        }
    }
    for (Entry<List<String>, Partition> mp : mergedParts.entrySet()) {
        result.add(mp.getValue());
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Matched partitions " + result);
    }
    // being returned
    return !planRes.hasUnsupportedCondition;
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) PlanResult(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult) ScanPlan(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)93 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)56 TException (org.apache.thrift.TException)42 Table (org.apache.hadoop.hive.metastore.api.Table)33 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)31 Partition (org.apache.hadoop.hive.metastore.api.Partition)31 ArrayList (java.util.ArrayList)29 IOException (java.io.IOException)23 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)23 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)22 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)15 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)13 Database (org.apache.hadoop.hive.metastore.api.Database)11 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)11 HCatException (org.apache.hive.hcatalog.common.HCatException)11 Path (org.apache.hadoop.fs.Path)10 SQLException (java.sql.SQLException)8 List (java.util.List)7 Query (javax.jdo.Query)7 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)7