Search in sources :

Example 96 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class ReplDumpTask method dumpConstraintMetadata.

private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
    try {
        Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
        Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
        Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
        Hive db = getHive();
        List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
        List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
        List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
        List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
        if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
            try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
        if (fks != null && !fks.isEmpty()) {
            try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
    } catch (NoSuchObjectException e) {
        // Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
        // Just log a debug message and skip it.
        LOG.debug(e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) JsonWriter(org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) Hive(org.apache.hadoop.hive.ql.metadata.Hive) ConstraintsSerializer(org.apache.hadoop.hive.ql.parse.repl.dump.io.ConstraintsSerializer) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 97 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class DDLTask method dropPartitions.

private void dropPartitions(Hive db, Table tbl, DropTableDesc dropTbl) throws HiveException {
    ReplicationSpec replicationSpec = dropTbl.getReplicationSpec();
    if (replicationSpec.isInReplicationScope()) {
        // parameter key values.
        for (DropTableDesc.PartSpec partSpec : dropTbl.getPartSpecs()) {
            List<Partition> partitions = new ArrayList<>();
            try {
                db.getPartitionsByExpr(tbl, partSpec.getPartSpec(), conf, partitions);
                for (Partition p : Iterables.filter(partitions, replicationSpec.allowEventReplacementInto())) {
                    db.dropPartition(tbl.getDbName(), tbl.getTableName(), p.getValues(), true);
                }
            } catch (NoSuchObjectException e) {
            // ignore NSOE because that means there's nothing to drop.
            } catch (Exception e) {
                throw new HiveException(e.getMessage(), e);
            }
        }
        return;
    }
    // ifExists is currently verified in DDLSemanticAnalyzer
    List<Partition> droppedParts = db.dropPartitions(dropTbl.getTableName(), dropTbl.getPartSpecs(), PartitionDropOptions.instance().deleteData(true).ifExists(true).purgeData(dropTbl.getIfPurge()));
    for (Partition partition : droppedParts) {
        console.printInfo("Dropped the partition " + partition.getName());
        // We have already locked the table, don't lock the partitions.
        addIfAbsentByName(new WriteEntity(partition, WriteEntity.WriteType.DDL_NO_LOCK));
    }
    ;
}
Also used : Partition(org.apache.hadoop.hive.ql.metadata.Partition) AlterTableExchangePartition(org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition) ReplicationSpec(org.apache.hadoop.hive.ql.parse.ReplicationSpec) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DropTableDesc(org.apache.hadoop.hive.ql.plan.DropTableDesc) ArrayList(java.util.ArrayList) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) SQLException(java.sql.SQLException) FileNotFoundException(java.io.FileNotFoundException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Example 98 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class TestHiveMetaStore method testDatabaseLocation.

@Test
public void testDatabaseLocation() throws Throwable {
    try {
        // clear up any existing databases
        silentDropDatabase(TEST_DB1_NAME);
        Database db = new Database();
        db.setName(TEST_DB1_NAME);
        String dbLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/_testDB_create_";
        db.setLocationUri(dbLocation);
        client.createDatabase(db);
        db = client.getDatabase(TEST_DB1_NAME);
        assertEquals("name of returned db is different from that of inserted db", TEST_DB1_NAME, db.getName());
        assertEquals("location of the returned db is different from that of inserted db", warehouse.getDnsPath(new Path(dbLocation)).toString(), db.getLocationUri());
        client.dropDatabase(TEST_DB1_NAME);
        silentDropDatabase(TEST_DB1_NAME);
        boolean objectNotExist = false;
        try {
            client.getDatabase(TEST_DB1_NAME);
        } catch (NoSuchObjectException e) {
            objectNotExist = true;
        }
        assertTrue("Database " + TEST_DB1_NAME + " exists ", objectNotExist);
        db = new Database();
        db.setName(TEST_DB1_NAME);
        dbLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/_testDB_file_";
        FileSystem fs = FileSystem.get(new Path(dbLocation).toUri(), conf);
        fs.createNewFile(new Path(dbLocation));
        fs.deleteOnExit(new Path(dbLocation));
        db.setLocationUri(dbLocation);
        boolean createFailed = false;
        try {
            client.createDatabase(db);
        } catch (MetaException cantCreateDB) {
            System.err.println(cantCreateDB.getMessage());
            createFailed = true;
        }
        assertTrue("Database creation succeeded even location exists and is a file", createFailed);
        objectNotExist = false;
        try {
            client.getDatabase(TEST_DB1_NAME);
        } catch (NoSuchObjectException e) {
            objectNotExist = true;
        }
        assertTrue("Database " + TEST_DB1_NAME + " exists when location is specified and is a file", objectNotExist);
    } catch (Throwable e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testDatabaseLocation() failed.");
        throw e;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 99 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class TestHiveMetaStore method testSimpleTypeApi.

@Test
public void testSimpleTypeApi() throws Exception {
    try {
        client.dropType(ColumnType.INT_TYPE_NAME);
        Type typ1 = new Type();
        typ1.setName(ColumnType.INT_TYPE_NAME);
        boolean ret = client.createType(typ1);
        assertTrue("Unable to create type", ret);
        Type typ1_2 = client.getType(ColumnType.INT_TYPE_NAME);
        assertNotNull(typ1_2);
        assertEquals(typ1.getName(), typ1_2.getName());
        ret = client.dropType(ColumnType.INT_TYPE_NAME);
        assertTrue("unable to drop type integer", ret);
        boolean exceptionThrown = false;
        try {
            client.getType(ColumnType.INT_TYPE_NAME);
        } catch (NoSuchObjectException e) {
            exceptionThrown = true;
        }
        assertTrue("Expected NoSuchObjectException", exceptionThrown);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testSimpleTypeApi() failed.");
        throw e;
    }
}
Also used : Type(org.apache.hadoop.hive.metastore.api.Type) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Example 100 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class TestHiveMetaStore method testSimpleFunction.

@Test
public void testSimpleFunction() throws Exception {
    String dbName = "test_db";
    String funcName = "test_func";
    String className = "org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper";
    String owner = "test_owner";
    final int N_FUNCTIONS = 5;
    PrincipalType ownerType = PrincipalType.USER;
    int createTime = (int) (System.currentTimeMillis() / 1000);
    FunctionType funcType = FunctionType.JAVA;
    try {
        cleanUp(dbName, null, null);
        for (Function f : client.getAllFunctions().getFunctions()) {
            client.dropFunction(f.getDbName(), f.getFunctionName());
        }
        createDb(dbName);
        for (int i = 0; i < N_FUNCTIONS; i++) {
            createFunction(dbName, funcName + "_" + i, className, owner, ownerType, createTime, funcType, null);
        }
        // Try the different getters
        // getFunction()
        Function func = client.getFunction(dbName, funcName + "_0");
        assertEquals("function db name", dbName, func.getDbName());
        assertEquals("function name", funcName + "_0", func.getFunctionName());
        assertEquals("function class name", className, func.getClassName());
        assertEquals("function owner name", owner, func.getOwnerName());
        assertEquals("function owner type", PrincipalType.USER, func.getOwnerType());
        assertEquals("function type", funcType, func.getFunctionType());
        List<ResourceUri> resources = func.getResourceUris();
        assertTrue("function resources", resources == null || resources.size() == 0);
        boolean gotException = false;
        try {
            func = client.getFunction(dbName, "nonexistent_func");
        } catch (NoSuchObjectException e) {
            // expected failure
            gotException = true;
        }
        assertEquals(true, gotException);
        // getAllFunctions()
        GetAllFunctionsResponse response = client.getAllFunctions();
        List<Function> allFunctions = response.getFunctions();
        assertEquals(N_FUNCTIONS, allFunctions.size());
        assertEquals(funcName + "_3", allFunctions.get(3).getFunctionName());
        // getFunctions()
        List<String> funcs = client.getFunctions(dbName, "*_func_*");
        assertEquals(N_FUNCTIONS, funcs.size());
        assertEquals(funcName + "_0", funcs.get(0));
        funcs = client.getFunctions(dbName, "nonexistent_func");
        assertEquals(0, funcs.size());
        // dropFunction()
        for (int i = 0; i < N_FUNCTIONS; i++) {
            client.dropFunction(dbName, funcName + "_" + i);
        }
        // Confirm that the function is now gone
        funcs = client.getFunctions(dbName, funcName);
        assertEquals(0, funcs.size());
        response = client.getAllFunctions();
        allFunctions = response.getFunctions();
        assertEquals(0, allFunctions.size());
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testConcurrentMetastores() failed.");
        throw e;
    } finally {
        silentDropDatabase(dbName);
    }
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Function(org.apache.hadoop.hive.metastore.api.Function) GetAllFunctionsResponse(org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) Test(org.junit.Test)

Aggregations

NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)144 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)74 TException (org.apache.thrift.TException)55 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)47 Table (org.apache.hadoop.hive.metastore.api.Table)45 Partition (org.apache.hadoop.hive.metastore.api.Partition)44 ArrayList (java.util.ArrayList)42 IOException (java.io.IOException)39 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)36 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)30 Test (org.junit.Test)24 Database (org.apache.hadoop.hive.metastore.api.Database)22 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)21 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)20 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)20 Path (org.apache.hadoop.fs.Path)19 Query (javax.jdo.Query)17 SQLException (java.sql.SQLException)16 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)13 InvalidMetaException (com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)13