Search in sources :

Example 76 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestDatabases method testAlterDatabaseNoSuchDatabase.

@Test(expected = NoSuchObjectException.class)
public void testAlterDatabaseNoSuchDatabase() throws Exception {
    Database newDatabase = new DatabaseBuilder().setName("test_database_altered").build();
    client.alterDatabase("no_such_database", newDatabase);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 77 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestGetPartitions method createDB.

private void createDB(String dbName) throws TException {
    Database db = new DatabaseBuilder().setName(dbName).build();
    client.createDatabase(db);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 78 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestListPartitions method createDB.

private void createDB(String dbName) throws TException {
    Database db = new DatabaseBuilder().setName(dbName).build();
    client.createDatabase(db);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 79 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestPermsGrp method testCustomPerms.

public void testCustomPerms() throws Exception {
    String dbName = Warehouse.DEFAULT_DATABASE_NAME;
    String tblName = "simptbl";
    String typeName = "Person";
    try {
        // Lets first test for default permissions, this is the case when user specified nothing.
        Table tbl = getTable(dbName, tblName, typeName);
        msc.createTable(tbl);
        Database db = Hive.get(hcatConf).getDatabase(dbName);
        Path dfsPath = clientWH.getDefaultTablePath(db, tblName);
        cleanupTbl(dbName, tblName, typeName);
        // Next user did specify perms.
        try {
            callHCatCli(new String[] { "-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx-wx---" });
            fail();
        } catch (Exception e) {
            assertTrue(e instanceof ExitException);
            assertEquals(((ExitException) e).getStatus(), 0);
        }
        dfsPath = clientWH.getDefaultTablePath(db, tblName);
        assertEquals(FsPermission.valueOf("drwx-wx---"), dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission());
        cleanupTbl(dbName, tblName, typeName);
        // User specified perms in invalid format.
        hcatConf.set(HCatConstants.HCAT_PERMS, "rwx");
        // make sure create table fails.
        try {
            callHCatCli(new String[] { "-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx" });
            fail();
        } catch (Exception me) {
            assertTrue(me instanceof ExitException);
        }
        // No physical dir gets created.
        dfsPath = clientWH.getDefaultTablePath(db, tblName);
        try {
            dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
            fail();
        } catch (Exception fnfe) {
            assertTrue(fnfe instanceof FileNotFoundException);
        }
        // And no metadata gets created.
        try {
            msc.getTable(Warehouse.DEFAULT_DATABASE_NAME, tblName);
            fail();
        } catch (Exception e) {
            assertTrue(e instanceof NoSuchObjectException);
            assertEquals("default.simptbl table not found", e.getMessage());
        }
        // test for invalid group name
        hcatConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
        hcatConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
        try {
            // create table must fail.
            callHCatCli(new String[] { "-e", "create table simptbl (name string) stored as RCFILE", "-p", "rw-rw-rw-", "-g", "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER" });
            fail();
        } catch (Exception me) {
            assertTrue(me instanceof SecurityException);
        }
        try {
            // no metadata should get created.
            msc.getTable(dbName, tblName);
            fail();
        } catch (Exception e) {
            assertTrue(e instanceof NoSuchObjectException);
            assertEquals("default.simptbl table not found", e.getMessage());
        }
        try {
            // neither dir should get created.
            dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
            fail();
        } catch (Exception e) {
            assertTrue(e instanceof FileNotFoundException);
        }
    } catch (Exception e) {
        LOG.error("testCustomPerms failed.", e);
        throw e;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) FileNotFoundException(java.io.FileNotFoundException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) ExitException(org.apache.hive.hcatalog.ExitException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) ExitException(org.apache.hive.hcatalog.ExitException) TException(org.apache.thrift.TException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) FileNotFoundException(java.io.FileNotFoundException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 80 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class HCatSemanticAnalyzer method authorizeDDLWork.

@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
    // DB opereations, none of them are enforced by Hive right now.
    ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
    if (showDatabases != null) {
        authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(), HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
    }
    DropDatabaseDesc dropDb = work.getDropDatabaseDesc();
    if (dropDb != null) {
        Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
        if (db != null) {
            // if above returned a null, then the db does not exist - probably a
            // "drop database if exists" clause - don't try to authorize then.
            authorize(db, Privilege.DROP);
        }
    }
    DescDatabaseDesc descDb = work.getDescDatabaseDesc();
    if (descDb != null) {
        Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
        authorize(db, Privilege.SELECT);
    }
    SwitchDatabaseDesc switchDb = work.getSwitchDatabaseDesc();
    if (switchDb != null) {
        Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
        authorize(db, Privilege.SELECT);
    }
    ShowTablesDesc showTables = work.getShowTblsDesc();
    if (showTables != null) {
        String dbName = showTables.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTables.getDbName();
        authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
    }
    ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc();
    if (showTableStatus != null) {
        String dbName = showTableStatus.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTableStatus.getDbName();
        authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
    }
    // TODO: add alter database support in HCat
    // Table operations.
    DropTableDesc dropTable = work.getDropTblDesc();
    if (dropTable != null) {
        if (dropTable.getPartSpecs() == null) {
        // drop table is already enforced by Hive. We only check for table level location even if the
        // table is partitioned.
        } else {
            // this is actually a ALTER TABLE DROP PARITITION statement
            for (DropTableDesc.PartSpec partSpec : dropTable.getPartSpecs()) {
                // partitions are not added as write entries in drop partitions in Hive
                Table table = hive.getTable(SessionState.get().getCurrentDatabase(), dropTable.getTableName());
                List<Partition> partitions = null;
                try {
                    partitions = hive.getPartitionsByFilter(table, partSpec.getPartSpec().getExprString());
                } catch (Exception e) {
                    throw new HiveException(e);
                }
                for (Partition part : partitions) {
                    authorize(part, Privilege.DROP);
                }
            }
        }
    }
    AlterTableDesc alterTable = work.getAlterTblDesc();
    if (alterTable != null) {
        Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);
        Partition part = null;
        if (alterTable.getPartSpec() != null) {
            part = hive.getPartition(table, alterTable.getPartSpec(), false);
        }
        String newLocation = alterTable.getNewLocation();
        /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
      * for the old table/partition location and the new location.
      */
        if (alterTable.getOp() == AlterTableDesc.AlterTableTypes.ALTERLOCATION) {
            if (part != null) {
                // authorize for the old
                authorize(part, Privilege.ALTER_DATA);
                // location, and new location
                part.setLocation(newLocation);
                authorize(part, Privilege.ALTER_DATA);
            } else {
                // authorize for the old
                authorize(table, Privilege.ALTER_DATA);
                // location, and new location
                table.getTTable().getSd().setLocation(newLocation);
                authorize(table, Privilege.ALTER_DATA);
            }
        }
    // other alter operations are already supported by Hive
    }
    // we should be careful when authorizing table based on just the
    // table name. If columns have separate authorization domain, it
    // must be honored
    DescTableDesc descTable = work.getDescTblDesc();
    if (descTable != null) {
        String tableName = extractTableName(descTable.getTableName());
        authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
    }
    ShowPartitionsDesc showParts = work.getShowPartsDesc();
    if (showParts != null) {
        String tableName = extractTableName(showParts.getTabName());
        authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
    }
}
Also used : DropDatabaseDesc(org.apache.hadoop.hive.ql.plan.DropDatabaseDesc) Partition(org.apache.hadoop.hive.ql.metadata.Partition) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DescTableDesc(org.apache.hadoop.hive.ql.plan.DescTableDesc) DropTableDesc(org.apache.hadoop.hive.ql.plan.DropTableDesc) SwitchDatabaseDesc(org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc) ShowDatabasesDesc(org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc) HCatException(org.apache.hive.hcatalog.common.HCatException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ShowPartitionsDesc(org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc) ShowTablesDesc(org.apache.hadoop.hive.ql.plan.ShowTablesDesc) ShowTableStatusDesc(org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc) Database(org.apache.hadoop.hive.metastore.api.Database) DescDatabaseDesc(org.apache.hadoop.hive.ql.plan.DescDatabaseDesc)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)236 Test (org.junit.Test)107 Table (org.apache.hadoop.hive.metastore.api.Table)70 ArrayList (java.util.ArrayList)51 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)39 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)39 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)37 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)36 Partition (org.apache.hadoop.hive.metastore.api.Partition)35 Path (org.apache.hadoop.fs.Path)34 IOException (java.io.IOException)29 HashMap (java.util.HashMap)27 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)24 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)22 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)22 TException (org.apache.thrift.TException)21 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)20 FileSystem (org.apache.hadoop.fs.FileSystem)17