Search in sources :

Example 81 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestHCatOutputFormat method initTable.

private void initTable() throws Exception {
    try {
        client.dropTable(dbName, tblName);
    } catch (Exception e) {
    }
    try {
        client.dropDatabase(dbName);
    } catch (Exception e) {
    }
    client.createDatabase(new Database(dbName, "", null, null));
    assertNotNull((client.getDatabase(dbName).getLocationUri()));
    List<FieldSchema> fields = new ArrayList<FieldSchema>();
    fields.add(new FieldSchema("colname", serdeConstants.STRING_TYPE_NAME, ""));
    Table tbl = new Table();
    tbl.setDbName(dbName);
    tbl.setTableName(tblName);
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(Lists.newArrayList(new FieldSchema("data_column", serdeConstants.STRING_TYPE_NAME, "")));
    tbl.setSd(sd);
    sd.setInputFormat(RCFileInputFormat.class.getName());
    sd.setOutputFormat(RCFileOutputFormat.class.getName());
    sd.setParameters(new HashMap<String, String>());
    sd.getParameters().put("test_param_1", "Use this for comments etc");
    // sd.setBucketCols(new ArrayList<String>(2));
    // sd.getBucketCols().add("name");
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
    tbl.setPartitionKeys(fields);
    Map<String, String> tableParams = new HashMap<String, String>();
    tableParams.put("hcat.testarg", "testArgValue");
    tbl.setParameters(tableParams);
    client.createTable(tbl);
    Path tblPath = new Path(client.getTable(dbName, tblName).getSd().getLocation());
    assertTrue(tblPath.getFileSystem(hiveConf).mkdirs(new Path(tblPath, "colname=p1")));
}
Also used : Path(org.apache.hadoop.fs.Path) RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) RCFileInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 82 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class ObjectStore method getJDODatabase.

public Database getJDODatabase(String name) throws NoSuchObjectException {
    MDatabase mdb = null;
    boolean commited = false;
    try {
        openTransaction();
        mdb = getMDatabase(name);
        commited = commitTransaction();
    } finally {
        if (!commited) {
            rollbackTransaction();
        }
    }
    Database db = new Database();
    db.setName(mdb.getName());
    db.setDescription(mdb.getDescription());
    db.setLocationUri(mdb.getLocationUri());
    db.setParameters(convertMap(mdb.getParameters()));
    db.setOwnerName(mdb.getOwnerName());
    String type = org.apache.commons.lang.StringUtils.defaultIfBlank(mdb.getOwnerType(), null);
    PrincipalType principalType = (type == null) ? null : PrincipalType.valueOf(type);
    db.setOwnerType(principalType);
    return db;
}
Also used : MDatabase(org.apache.hadoop.hive.metastore.model.MDatabase) MDatabase(org.apache.hadoop.hive.metastore.model.MDatabase) Database(org.apache.hadoop.hive.metastore.api.Database) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType)

Example 83 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class SharedCache method refreshDatabasesInCache.

public void refreshDatabasesInCache(List<Database> databases) {
    try {
        cacheLock.writeLock().lock();
        if (isDatabaseCacheDirty.compareAndSet(true, false)) {
            LOG.debug("Skipping database cache update; the database list we have is dirty.");
            return;
        }
        databaseCache.clear();
        for (Database db : databases) {
            addDatabaseToCache(db);
        }
    } finally {
        cacheLock.writeLock().unlock();
    }
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database)

Example 84 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestHiveMetaStore method testTableDatabase.

@Test
public void testTableDatabase() throws Exception {
    String dbName = "testDb";
    String tblName_1 = "testTbl_1";
    String tblName_2 = "testTbl_2";
    try {
        silentDropDatabase(dbName);
        Database db = new Database();
        db.setName(dbName);
        String dbLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "_testDB_table_create_";
        db.setLocationUri(dbLocation);
        client.createDatabase(db);
        db = client.getDatabase(dbName);
        Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName_1).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).build();
        client.createTable(tbl);
        tbl = client.getTable(dbName, tblName_1);
        Path path = new Path(tbl.getSd().getLocation());
        System.err.println("Table's location " + path + ", Database's location " + db.getLocationUri());
        assertEquals("Table location is not a subset of the database location", path.getParent().toString(), db.getLocationUri());
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testTableDatabase() failed.");
        throw e;
    } finally {
        silentDropDatabase(dbName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Example 85 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestHiveMetaStore method testRenamePartition.

@Test
public void testRenamePartition() throws Throwable {
    try {
        String dbName = "compdb1";
        String tblName = "comptbl1";
        List<String> vals = new ArrayList<>(2);
        vals.add("2011-07-11");
        vals.add("8");
        String part_path = "/ds=2011-07-11/hr=8";
        List<String> tmp_vals = new ArrayList<>(2);
        tmp_vals.add("tmp_2011-07-11");
        tmp_vals.add("-8");
        String part2_path = "/ds=tmp_2011-07-11/hr=-8";
        client.dropTable(dbName, tblName);
        silentDropDatabase(dbName);
        Database db = new Database();
        db.setName(dbName);
        db.setDescription("Rename Partition Test database");
        client.createDatabase(db);
        Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).addPartCol("ds", ColumnType.STRING_TYPE_NAME).addPartCol("hr", ColumnType.INT_TYPE_NAME).build();
        client.createTable(tbl);
        if (isThriftClient) {
            // the createTable() above does not update the location in the 'tbl'
            // object when the client is a thrift client and the code below relies
            // on the location being present in the 'tbl' object - so get the table
            // from the metastore
            tbl = client.getTable(dbName, tblName);
        }
        Partition part = new Partition();
        part.setDbName(dbName);
        part.setTableName(tblName);
        part.setValues(vals);
        part.setParameters(new HashMap<>());
        part.setSd(tbl.getSd().deepCopy());
        part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
        part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
        part.getParameters().put("retention", "10");
        part.getSd().setNumBuckets(12);
        part.getSd().getSerdeInfo().getParameters().put("abc", "1");
        client.add_partition(part);
        part.setValues(tmp_vals);
        client.renamePartition(dbName, tblName, vals, part);
        boolean exceptionThrown = false;
        try {
            Partition p = client.getPartition(dbName, tblName, vals);
        } catch (Exception e) {
            assertEquals("partition should not have existed", NoSuchObjectException.class, e.getClass());
            exceptionThrown = true;
        }
        assertTrue("Expected NoSuchObjectException", exceptionThrown);
        Partition part3 = client.getPartition(dbName, tblName, tmp_vals);
        assertEquals("couldn't rename partition", part3.getParameters().get("retention"), "10");
        assertEquals("couldn't rename partition", part3.getSd().getSerdeInfo().getParameters().get("abc"), "1");
        assertEquals("couldn't rename partition", part3.getSd().getNumBuckets(), 12);
        assertEquals("new partition sd matches", part3.getSd().getLocation(), tbl.getSd().getLocation() + part2_path);
        part.setValues(vals);
        client.renamePartition(dbName, tblName, tmp_vals, part);
        exceptionThrown = false;
        try {
            Partition p = client.getPartition(dbName, tblName, tmp_vals);
        } catch (Exception e) {
            assertEquals("partition should not have existed", NoSuchObjectException.class, e.getClass());
            exceptionThrown = true;
        }
        assertTrue("Expected NoSuchObjectException", exceptionThrown);
        part3 = client.getPartition(dbName, tblName, vals);
        assertEquals("couldn't rename partition", part3.getParameters().get("retention"), "10");
        assertEquals("couldn't rename partition", part3.getSd().getSerdeInfo().getParameters().get("abc"), "1");
        assertEquals("couldn't rename partition", part3.getSd().getNumBuckets(), 12);
        assertEquals("new partition sd matches", part3.getSd().getLocation(), tbl.getSd().getLocation() + part_path);
        client.dropTable(dbName, tblName);
        client.dropDatabase(dbName);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testRenamePartition() failed.");
        throw e;
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)236 Test (org.junit.Test)107 Table (org.apache.hadoop.hive.metastore.api.Table)70 ArrayList (java.util.ArrayList)51 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)39 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)39 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)37 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)36 Partition (org.apache.hadoop.hive.metastore.api.Partition)35 Path (org.apache.hadoop.fs.Path)34 IOException (java.io.IOException)29 HashMap (java.util.HashMap)27 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)24 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)22 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)22 TException (org.apache.thrift.TException)21 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)20 FileSystem (org.apache.hadoop.fs.FileSystem)17