Search in sources :

Example 11 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class ObjectStore method convertToMFunction.

private MFunction convertToMFunction(Function func) throws InvalidObjectException {
    if (func == null) {
        return null;
    }
    MDatabase mdb = null;
    try {
        mdb = getMDatabase(func.getDbName());
    } catch (NoSuchObjectException e) {
        LOG.error(StringUtils.stringifyException(e));
        throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist.");
    }
    MFunction mfunc = new MFunction(func.getFunctionName(), mdb, func.getClassName(), func.getOwnerName(), func.getOwnerType().name(), func.getCreateTime(), func.getFunctionType().getValue(), convertToMResourceUriList(func.getResourceUris()));
    return mfunc;
}
Also used : MDatabase(org.apache.hadoop.hive.metastore.model.MDatabase) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MFunction(org.apache.hadoop.hive.metastore.model.MFunction)

Example 12 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class ObjectStore method getMPartitionColumnStatistics.

private List<MPartitionColumnStatistics> getMPartitionColumnStatistics(Table table, List<String> partNames, List<String> colNames, QueryWrapper queryWrapper) throws NoSuchObjectException, MetaException {
    boolean committed = false;
    try {
        openTransaction();
        // We are not going to verify SD for each partition. Just verify for the table.
        validateTableCols(table, colNames);
        Query query = queryWrapper.query = pm.newQuery(MPartitionColumnStatistics.class);
        String paramStr = "java.lang.String t1, java.lang.String t2";
        String filter = "tableName == t1 && dbName == t2 && (";
        Object[] params = new Object[colNames.size() + partNames.size() + 2];
        int i = 0;
        params[i++] = table.getTableName();
        params[i++] = table.getDbName();
        int firstI = i;
        for (String s : partNames) {
            filter += ((i == firstI) ? "" : " || ") + "partitionName == p" + i;
            paramStr += ", java.lang.String p" + i;
            params[i++] = s;
        }
        filter += ") && (";
        firstI = i;
        for (String s : colNames) {
            filter += ((i == firstI) ? "" : " || ") + "colName == c" + i;
            paramStr += ", java.lang.String c" + i;
            params[i++] = s;
        }
        filter += ")";
        query.setFilter(filter);
        query.declareParameters(paramStr);
        query.setOrdering("partitionName ascending");
        @SuppressWarnings("unchecked") List<MPartitionColumnStatistics> result = (List<MPartitionColumnStatistics>) query.executeWithArray(params);
        pm.retrieveAll(result);
        committed = commitTransaction();
        return result;
    } catch (Exception ex) {
        LOG.error("Error retrieving statistics via jdo", ex);
        if (ex instanceof MetaException) {
            throw (MetaException) ex;
        }
        throw new MetaException(ex.getMessage());
    } finally {
        if (!committed) {
            rollbackTransaction();
            return Lists.newArrayList();
        }
    }
}
Also used : Query(javax.jdo.Query) MStringList(org.apache.hadoop.hive.metastore.model.MStringList) ArrayList(java.util.ArrayList) List(java.util.List) LinkedList(java.util.LinkedList) MPartitionColumnStatistics(org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) JDOException(javax.jdo.JDOException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) JDOCanRetryException(javax.jdo.JDOCanRetryException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 13 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class ObjectStore method grantRole.

@Override
public boolean grantRole(Role role, String userName, PrincipalType principalType, String grantor, PrincipalType grantorType, boolean grantOption) throws MetaException, NoSuchObjectException, InvalidObjectException {
    boolean success = false;
    boolean commited = false;
    try {
        openTransaction();
        MRoleMap roleMap = null;
        try {
            roleMap = this.getMSecurityUserRoleMap(userName, principalType, role.getRoleName());
        } catch (Exception e) {
        }
        if (roleMap != null) {
            throw new InvalidObjectException("Principal " + userName + " already has the role " + role.getRoleName());
        }
        if (principalType == PrincipalType.ROLE) {
            validateRole(userName);
        }
        MRole mRole = getMRole(role.getRoleName());
        long now = System.currentTimeMillis() / 1000;
        MRoleMap roleMember = new MRoleMap(userName, principalType.toString(), mRole, (int) now, grantor, grantorType.toString(), grantOption);
        pm.makePersistent(roleMember);
        commited = commitTransaction();
        success = true;
    } finally {
        if (!commited) {
            rollbackTransaction();
        }
    }
    return success;
}
Also used : MRole(org.apache.hadoop.hive.metastore.model.MRole) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MRoleMap(org.apache.hadoop.hive.metastore.model.MRoleMap) JDOException(javax.jdo.JDOException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) JDOCanRetryException(javax.jdo.JDOCanRetryException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException)

Example 14 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class TestObjectStore method dropAllStoreObjects.

public static void dropAllStoreObjects(RawStore store) throws MetaException, InvalidObjectException, InvalidInputException {
    try {
        Deadline.registerIfNot(100000);
        List<Function> funcs = store.getAllFunctions();
        for (Function func : funcs) {
            store.dropFunction(func.getDbName(), func.getFunctionName());
        }
        List<String> dbs = store.getAllDatabases();
        for (int i = 0; i < dbs.size(); i++) {
            String db = dbs.get(i);
            List<String> tbls = store.getAllTables(db);
            for (String tbl : tbls) {
                List<Index> indexes = store.getIndexes(db, tbl, 100);
                for (Index index : indexes) {
                    store.dropIndex(db, tbl, index.getIndexName());
                }
            }
            for (String tbl : tbls) {
                Deadline.startTimer("getPartition");
                List<Partition> parts = store.getPartitions(db, tbl, 100);
                for (Partition part : parts) {
                    store.dropPartition(db, tbl, part.getValues());
                }
                // Find any constraints and drop them
                Set<String> constraints = new HashSet<>();
                List<SQLPrimaryKey> pk = store.getPrimaryKeys(db, tbl);
                if (pk != null) {
                    for (SQLPrimaryKey pkcol : pk) {
                        constraints.add(pkcol.getPk_name());
                    }
                }
                List<SQLForeignKey> fks = store.getForeignKeys(null, null, db, tbl);
                if (fks != null) {
                    for (SQLForeignKey fkcol : fks) {
                        constraints.add(fkcol.getFk_name());
                    }
                }
                for (String constraint : constraints) {
                    store.dropConstraint(db, tbl, constraint);
                }
                store.dropTable(db, tbl);
            }
            store.dropDatabase(db);
        }
        List<String> roles = store.listRoleNames();
        for (String role : roles) {
            store.removeRole(role);
        }
    } catch (NoSuchObjectException e) {
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Index(org.apache.hadoop.hive.metastore.api.Index) Function(org.apache.hadoop.hive.metastore.api.Function) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HashSet(java.util.HashSet)

Example 15 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class TestObjectStore method testDirectSqlErrorMetrics.

@Test
public void testDirectSqlErrorMetrics() throws Exception {
    HiveConf conf = new HiveConf();
    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true);
    conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name());
    MetricsFactory.init(conf);
    CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
    objectStore.new GetDbHelper("foo", null, true, true) {

        @Override
        protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
            return null;
        }

        @Override
        protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
            return null;
        }
    }.run(false);
    String json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DIRECTSQL_ERRORS, "");
    objectStore.new GetDbHelper("foo", null, true, true) {

        @Override
        protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
            throw new RuntimeException();
        }

        @Override
        protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
            return null;
        }
    }.run(false);
    json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DIRECTSQL_ERRORS, 1);
}
Also used : CodahaleMetrics(org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics) Database(org.apache.hadoop.hive.metastore.api.Database) HiveConf(org.apache.hadoop.hive.conf.HiveConf) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Aggregations

NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)93 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)56 TException (org.apache.thrift.TException)42 Table (org.apache.hadoop.hive.metastore.api.Table)33 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)31 Partition (org.apache.hadoop.hive.metastore.api.Partition)31 ArrayList (java.util.ArrayList)29 IOException (java.io.IOException)23 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)23 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)22 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)15 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)13 Database (org.apache.hadoop.hive.metastore.api.Database)11 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)11 HCatException (org.apache.hive.hcatalog.common.HCatException)11 Path (org.apache.hadoop.fs.Path)10 SQLException (java.sql.SQLException)8 List (java.util.List)7 Query (javax.jdo.Query)7 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)7