Search in sources :

Example 1 with UnknownDBException

use of org.apache.hadoop.hive.metastore.api.UnknownDBException in project hive by apache.

the class TestHiveMetaStore method testSimpleTable.

public void testSimpleTable() throws Exception {
    try {
        String dbName = "simpdb";
        String tblName = "simptbl";
        String tblName2 = "simptbl2";
        String typeName = "Person";
        client.dropTable(dbName, tblName);
        silentDropDatabase(dbName);
        Database db = new Database();
        db.setName(dbName);
        client.createDatabase(db);
        client.dropType(typeName);
        Type typ1 = new Type();
        typ1.setName(typeName);
        typ1.setFields(new ArrayList<FieldSchema>(2));
        typ1.getFields().add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));
        typ1.getFields().add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));
        client.createType(typ1);
        Table tbl = new Table();
        tbl.setDbName(dbName);
        tbl.setTableName(tblName);
        StorageDescriptor sd = new StorageDescriptor();
        tbl.setSd(sd);
        sd.setCols(typ1.getFields());
        sd.setCompressed(false);
        sd.setNumBuckets(1);
        sd.setParameters(new HashMap<String, String>());
        sd.getParameters().put("test_param_1", "Use this for comments etc");
        sd.setBucketCols(new ArrayList<String>(2));
        sd.getBucketCols().add("name");
        sd.setSerdeInfo(new SerDeInfo());
        sd.getSerdeInfo().setName(tbl.getTableName());
        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
        sd.getSerdeInfo().getParameters().put(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
        sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
        sd.setInputFormat(HiveInputFormat.class.getName());
        sd.setInputFormat(HiveOutputFormat.class.getName());
        tbl.setPartitionKeys(new ArrayList<FieldSchema>());
        client.createTable(tbl);
        if (isThriftClient) {
            // the createTable() above does not update the location in the 'tbl'
            // object when the client is a thrift client and the code below relies
            // on the location being present in the 'tbl' object - so get the table
            // from the metastore
            tbl = client.getTable(dbName, tblName);
        }
        Table tbl2 = client.getTable(dbName, tblName);
        assertNotNull(tbl2);
        assertEquals(tbl2.getDbName(), dbName);
        assertEquals(tbl2.getTableName(), tblName);
        assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
        assertEquals(tbl2.getSd().isCompressed(), false);
        assertEquals(tbl2.getSd().getNumBuckets(), 1);
        assertEquals(tbl2.getSd().getLocation(), tbl.getSd().getLocation());
        assertNotNull(tbl2.getSd().getSerdeInfo());
        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
        sd.getSerdeInfo().getParameters().put(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
        tbl2.setTableName(tblName2);
        tbl2.setParameters(new HashMap<String, String>());
        tbl2.getParameters().put("EXTERNAL", "TRUE");
        tbl2.getSd().setLocation(tbl.getSd().getLocation() + "-2");
        List<FieldSchema> fieldSchemas = client.getFields(dbName, tblName);
        assertNotNull(fieldSchemas);
        assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
        for (FieldSchema fs : tbl.getSd().getCols()) {
            assertTrue(fieldSchemas.contains(fs));
        }
        List<FieldSchema> fieldSchemasFull = client.getSchema(dbName, tblName);
        assertNotNull(fieldSchemasFull);
        assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size() + tbl.getPartitionKeys().size());
        for (FieldSchema fs : tbl.getSd().getCols()) {
            assertTrue(fieldSchemasFull.contains(fs));
        }
        for (FieldSchema fs : tbl.getPartitionKeys()) {
            assertTrue(fieldSchemasFull.contains(fs));
        }
        client.createTable(tbl2);
        if (isThriftClient) {
            tbl2 = client.getTable(tbl2.getDbName(), tbl2.getTableName());
        }
        Table tbl3 = client.getTable(dbName, tblName2);
        assertNotNull(tbl3);
        assertEquals(tbl3.getDbName(), dbName);
        assertEquals(tbl3.getTableName(), tblName2);
        assertEquals(tbl3.getSd().getCols().size(), typ1.getFields().size());
        assertEquals(tbl3.getSd().isCompressed(), false);
        assertEquals(tbl3.getSd().getNumBuckets(), 1);
        assertEquals(tbl3.getSd().getLocation(), tbl2.getSd().getLocation());
        assertEquals(tbl3.getParameters(), tbl2.getParameters());
        fieldSchemas = client.getFields(dbName, tblName2);
        assertNotNull(fieldSchemas);
        assertEquals(fieldSchemas.size(), tbl2.getSd().getCols().size());
        for (FieldSchema fs : tbl2.getSd().getCols()) {
            assertTrue(fieldSchemas.contains(fs));
        }
        fieldSchemasFull = client.getSchema(dbName, tblName2);
        assertNotNull(fieldSchemasFull);
        assertEquals(fieldSchemasFull.size(), tbl2.getSd().getCols().size() + tbl2.getPartitionKeys().size());
        for (FieldSchema fs : tbl2.getSd().getCols()) {
            assertTrue(fieldSchemasFull.contains(fs));
        }
        for (FieldSchema fs : tbl2.getPartitionKeys()) {
            assertTrue(fieldSchemasFull.contains(fs));
        }
        assertEquals("Use this for comments etc", tbl2.getSd().getParameters().get("test_param_1"));
        assertEquals("name", tbl2.getSd().getBucketCols().get(0));
        assertTrue("Partition key list is not empty", (tbl2.getPartitionKeys() == null) || (tbl2.getPartitionKeys().size() == 0));
        //test get_table_objects_by_name functionality
        ArrayList<String> tableNames = new ArrayList<String>();
        tableNames.add(tblName2);
        tableNames.add(tblName);
        tableNames.add(tblName2);
        List<Table> foundTables = client.getTableObjectsByName(dbName, tableNames);
        assertEquals(2, foundTables.size());
        for (Table t : foundTables) {
            if (t.getTableName().equals(tblName2)) {
                assertEquals(t.getSd().getLocation(), tbl2.getSd().getLocation());
            } else {
                assertEquals(t.getTableName(), tblName);
                assertEquals(t.getSd().getLocation(), tbl.getSd().getLocation());
            }
            assertEquals(t.getSd().getCols().size(), typ1.getFields().size());
            assertEquals(t.getSd().isCompressed(), false);
            assertEquals(foundTables.get(0).getSd().getNumBuckets(), 1);
            assertNotNull(t.getSd().getSerdeInfo());
            assertEquals(t.getDbName(), dbName);
        }
        tableNames.add(1, "table_that_doesnt_exist");
        foundTables = client.getTableObjectsByName(dbName, tableNames);
        assertEquals(foundTables.size(), 2);
        InvalidOperationException ioe = null;
        try {
            foundTables = client.getTableObjectsByName(dbName, null);
        } catch (InvalidOperationException e) {
            ioe = e;
        }
        assertNotNull(ioe);
        assertTrue("Table not found", ioe.getMessage().contains("null tables"));
        UnknownDBException udbe = null;
        try {
            foundTables = client.getTableObjectsByName("db_that_doesnt_exist", tableNames);
        } catch (UnknownDBException e) {
            udbe = e;
        }
        assertNotNull(udbe);
        assertTrue("DB not found", udbe.getMessage().contains("not find database db_that_doesnt_exist"));
        udbe = null;
        try {
            foundTables = client.getTableObjectsByName("", tableNames);
        } catch (UnknownDBException e) {
            udbe = e;
        }
        assertNotNull(udbe);
        assertTrue("DB not found", udbe.getMessage().contains("is null or empty"));
        FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), hiveConf);
        client.dropTable(dbName, tblName);
        assertFalse(fs.exists(new Path(tbl.getSd().getLocation())));
        client.dropTable(dbName, tblName2);
        assertTrue(fs.exists(new Path(tbl2.getSd().getLocation())));
        client.dropType(typeName);
        client.dropDatabase(dbName);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testSimpleTable() failed.");
        throw e;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hive.metastore.api.Table) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) LazySimpleSerDe(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ArrayList(java.util.ArrayList) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HiveInputFormat(org.apache.hadoop.hive.ql.io.HiveInputFormat) Type(org.apache.hadoop.hive.metastore.api.Type) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException)

Example 2 with UnknownDBException

use of org.apache.hadoop.hive.metastore.api.UnknownDBException in project hive by apache.

the class ObjectStore method getTableObjectsByName.

@Override
public List<Table> getTableObjectsByName(String db, List<String> tbl_names) throws MetaException, UnknownDBException {
    List<Table> tables = new ArrayList<Table>();
    boolean committed = false;
    Query dbExistsQuery = null;
    Query query = null;
    try {
        openTransaction();
        db = HiveStringUtils.normalizeIdentifier(db);
        dbExistsQuery = pm.newQuery(MDatabase.class, "name == db");
        dbExistsQuery.declareParameters("java.lang.String db");
        dbExistsQuery.setUnique(true);
        dbExistsQuery.setResult("name");
        String dbNameIfExists = (String) dbExistsQuery.execute(db);
        if (dbNameIfExists == null || dbNameIfExists.isEmpty()) {
            throw new UnknownDBException("Could not find database " + db);
        }
        List<String> lowered_tbl_names = new ArrayList<String>();
        for (String t : tbl_names) {
            lowered_tbl_names.add(HiveStringUtils.normalizeIdentifier(t));
        }
        query = pm.newQuery(MTable.class);
        query.setFilter("database.name == db && tbl_names.contains(tableName)");
        query.declareParameters("java.lang.String db, java.util.Collection tbl_names");
        Collection mtables = (Collection) query.execute(db, lowered_tbl_names);
        for (Iterator iter = mtables.iterator(); iter.hasNext(); ) {
            tables.add(convertToTable((MTable) iter.next()));
        }
        committed = commitTransaction();
    } finally {
        if (!committed) {
            rollbackTransaction();
        }
        if (dbExistsQuery != null) {
            dbExistsQuery.closeAll();
        }
        if (query != null) {
            query.closeAll();
        }
    }
    return tables;
}
Also used : MDatabase(org.apache.hadoop.hive.metastore.model.MDatabase) Table(org.apache.hadoop.hive.metastore.api.Table) MVersionTable(org.apache.hadoop.hive.metastore.model.MVersionTable) MTable(org.apache.hadoop.hive.metastore.model.MTable) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) MTable(org.apache.hadoop.hive.metastore.model.MTable) Query(javax.jdo.Query) ArrayList(java.util.ArrayList) Iterator(java.util.Iterator) Collection(java.util.Collection)

Example 3 with UnknownDBException

use of org.apache.hadoop.hive.metastore.api.UnknownDBException in project hive by apache.

the class TestMarkPartition method testMarkingPartitionSet.

public void testMarkingPartitionSet() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException, UnknownDBException, UnknownTableException, InvalidPartitionException, UnknownPartitionException, InterruptedException {
    HiveMetaStoreClient msc = new HiveMetaStoreClient(hiveConf);
    driver = new Driver(hiveConf);
    driver.run("drop database if exists hive2215 cascade");
    driver.run("create database hive2215");
    driver.run("use hive2215");
    driver.run("drop table if exists tmptbl");
    driver.run("create table tmptbl (a string) partitioned by (b string)");
    driver.run("alter table tmptbl add partition (b='2011')");
    Map<String, String> kvs = new HashMap<String, String>();
    kvs.put("b", "'2011'");
    msc.markPartitionForEvent("hive2215", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
    assert msc.isPartitionMarkedForEvent("hive2215", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
    Thread.sleep(10000);
    assert !msc.isPartitionMarkedForEvent("hive2215", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
    kvs.put("b", "'2012'");
    assert !msc.isPartitionMarkedForEvent("hive2215", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
    try {
        msc.markPartitionForEvent("hive2215", "tmptbl2", kvs, PartitionEventType.LOAD_DONE);
        assert false;
    } catch (Exception e) {
        assert e instanceof UnknownTableException;
    }
    try {
        msc.isPartitionMarkedForEvent("hive2215", "tmptbl2", kvs, PartitionEventType.LOAD_DONE);
        assert false;
    } catch (Exception e) {
        assert e instanceof UnknownTableException;
    }
    kvs.put("a", "'2012'");
    try {
        msc.isPartitionMarkedForEvent("hive2215", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
        assert false;
    } catch (Exception e) {
        assert e instanceof InvalidPartitionException;
    }
}
Also used : UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) HashMap(java.util.HashMap) Driver(org.apache.hadoop.hive.ql.Driver) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) TException(org.apache.thrift.TException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException)

Aggregations

UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)3 ArrayList (java.util.ArrayList)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)2 Table (org.apache.hadoop.hive.metastore.api.Table)2 TException (org.apache.thrift.TException)2 SQLException (java.sql.SQLException)1 Collection (java.util.Collection)1 HashMap (java.util.HashMap)1 Iterator (java.util.Iterator)1 Query (javax.jdo.Query)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)1 Database (org.apache.hadoop.hive.metastore.api.Database)1 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)1 FunctionType (org.apache.hadoop.hive.metastore.api.FunctionType)1 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1