Search in sources :

Example 21 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestHiveMetaStoreTimeout method testResetTimeout.

@Test
public void testResetTimeout() throws Exception {
    HiveMetaStore.TEST_TIMEOUT_VALUE = 5 * 1000;
    String dbName = "db";
    // no timeout before reset
    client.dropDatabase(dbName, true, true);
    Database db = new Database();
    db.setName(dbName);
    try {
        client.createDatabase(db);
    } catch (MetaException e) {
        Assert.fail("should not throw timeout exception: " + e.getMessage());
    }
    client.dropDatabase(dbName, true, true);
    // reset
    client.setMetaConf(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname, "3s");
    // timeout after reset
    try {
        client.createDatabase(db);
        Assert.fail("should throw timeout exception.");
    } catch (MetaException e) {
        Assert.assertTrue("unexpected MetaException", e.getMessage().contains("Timeout when " + "executing method: create_database"));
    }
    // restore
    client.dropDatabase(dbName, true, true);
    client.setMetaConf(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname, "10s");
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 22 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLTask method switchDatabase.

/**
   * Switch to a different Database
   * @param db
   * @param switchDb
   * @return Always returns 0
   * @throws HiveException
   */
private int switchDatabase(Hive db, SwitchDatabaseDesc switchDb) throws HiveException {
    String dbName = switchDb.getDatabaseName();
    if (!db.databaseExists(dbName)) {
        throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName);
    }
    SessionState.get().setCurrentDatabase(dbName);
    // set database specific parameters
    Database database = db.getDatabase(dbName);
    assert (database != null);
    Map<String, String> dbParams = database.getParameters();
    if (dbParams != null) {
        for (HiveConf.ConfVars var : HiveConf.dbVars) {
            String newValue = dbParams.get(var.varname);
            if (newValue != null) {
                LOG.info("Changing " + var.varname + " from " + conf.getVar(var) + " to " + newValue);
                conf.setVar(var, newValue);
            }
        }
    }
    return 0;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Database(org.apache.hadoop.hive.metastore.api.Database) ConfVars(org.apache.hadoop.hive.conf.HiveConf.ConfVars) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 23 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLTask method showCreateDatabase.

private int showCreateDatabase(Hive db, DataOutputStream outStream, String databaseName) throws Exception {
    Database database = db.getDatabase(databaseName);
    StringBuilder createDb_str = new StringBuilder();
    createDb_str.append("CREATE DATABASE `").append(database.getName()).append("`\n");
    if (database.getDescription() != null) {
        createDb_str.append("COMMENT\n  '");
        createDb_str.append(HiveStringUtils.escapeHiveCommand(database.getDescription())).append("'\n");
    }
    createDb_str.append("LOCATION\n  '");
    createDb_str.append(database.getLocationUri()).append("'\n");
    String propertiesToString = propertiesToString(database.getParameters(), null);
    if (!propertiesToString.isEmpty()) {
        createDb_str.append("WITH DBPROPERTIES (\n");
        createDb_str.append(propertiesToString).append(")\n");
    }
    outStream.write(createDb_str.toString().getBytes("UTF-8"));
    return 0;
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database)

Example 24 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class ImportSemanticAnalyzer method fixLocationInPartSpec.

/**
   * Helper method to set location properly in partSpec
   */
private static void fixLocationInPartSpec(FileSystem fs, ImportTableDesc tblDesc, Table table, Warehouse wh, ReplicationSpec replicationSpec, AddPartitionDesc.OnePartitionDesc partSpec, EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, HiveException, IOException {
    Path tgtPath = null;
    if (tblDesc.getLocation() == null) {
        if (table.getDataLocation() != null) {
            tgtPath = new Path(table.getDataLocation().toString(), Warehouse.makePartPath(partSpec.getPartSpec()));
        } else {
            Database parentDb = x.getHive().getDatabase(tblDesc.getDatabaseName());
            tgtPath = new Path(wh.getTablePath(parentDb, tblDesc.getTableName()), Warehouse.makePartPath(partSpec.getPartSpec()));
        }
    } else {
        tgtPath = new Path(tblDesc.getLocation(), Warehouse.makePartPath(partSpec.getPartSpec()));
    }
    FileSystem tgtFs = FileSystem.get(tgtPath.toUri(), x.getConf());
    checkTargetLocationEmpty(tgtFs, tgtPath, replicationSpec, x);
    partSpec.setLocation(tgtPath.toString());
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 25 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLSemanticAnalyzer method analyzeSwitchDatabase.

private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    Database database = getDatabase(dbName, true);
    ReadEntity dbReadEntity = new ReadEntity(database);
    dbReadEntity.noLockNeeded();
    inputs.add(dbReadEntity);
    SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) SwitchDatabaseDesc(org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)153 Table (org.apache.hadoop.hive.metastore.api.Table)49 Test (org.junit.Test)46 ArrayList (java.util.ArrayList)42 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)30 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)29 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)29 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)28 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)24 Path (org.apache.hadoop.fs.Path)23 Partition (org.apache.hadoop.hive.metastore.api.Partition)21 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)18 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)18 HashMap (java.util.HashMap)17 TException (org.apache.thrift.TException)17 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)16 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)15 IOException (java.io.IOException)14 SQLException (java.sql.SQLException)13 HiveInputFormat (org.apache.hadoop.hive.ql.io.HiveInputFormat)13