Search in sources :

Example 96 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestMetastoreExpr method testPartitionExpr.

public void testPartitionExpr() throws Exception {
    String dbName = "filterdb";
    String tblName = "filtertbl";
    silentDropDatabase(dbName);
    Database db = new Database();
    db.setName(dbName);
    client.createDatabase(db);
    ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
    cols.add(new FieldSchema("c1", serdeConstants.STRING_TYPE_NAME, ""));
    cols.add(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, ""));
    ArrayList<FieldSchema> partCols = Lists.newArrayList(new FieldSchema("p1", serdeConstants.STRING_TYPE_NAME, ""), new FieldSchema("p2", serdeConstants.INT_TYPE_NAME, ""));
    Table tbl = new Table();
    tbl.setDbName(dbName);
    tbl.setTableName(tblName);
    addSd(cols, tbl);
    tbl.setPartitionKeys(partCols);
    client.createTable(tbl);
    tbl = client.getTable(dbName, tblName);
    addPartition(client, tbl, Lists.newArrayList("p11", "32"), "part1");
    addPartition(client, tbl, Lists.newArrayList("p12", "32"), "part2");
    addPartition(client, tbl, Lists.newArrayList("p13", "31"), "part3");
    addPartition(client, tbl, Lists.newArrayList("p14", "-33"), "part4");
    ExprBuilder e = new ExprBuilder(tblName);
    checkExpr(3, dbName, tblName, e.val(0).intCol("p2").pred(">", 2).build());
    checkExpr(3, dbName, tblName, e.intCol("p2").val(0).pred("<", 2).build());
    checkExpr(1, dbName, tblName, e.intCol("p2").val(0).pred(">", 2).build());
    checkExpr(2, dbName, tblName, e.val(31).intCol("p2").pred("<=", 2).build());
    checkExpr(3, dbName, tblName, e.val("p11").strCol("p1").pred(">", 2).build());
    checkExpr(1, dbName, tblName, e.val("p11").strCol("p1").pred(">", 2).intCol("p2").val(31).pred("<", 2).pred("and", 2).build());
    checkExpr(3, dbName, tblName, e.val(32).val(31).intCol("p2").val(false).pred("between", 4).build());
    // Apply isnull and instr (not supported by pushdown) via name filtering.
    checkExpr(4, dbName, tblName, e.val("p").strCol("p1").fn("instr", TypeInfoFactory.intTypeInfo, 2).val(0).pred("<=", 2).build());
    checkExpr(0, dbName, tblName, e.intCol("p2").pred("isnull", 1).build());
    // Cannot deserialize => throw the specific exception.
    try {
        client.listPartitionsByExpr(dbName, tblName, new byte[] { 'f', 'o', 'o' }, null, (short) -1, new ArrayList<Partition>());
        fail("Should have thrown IncompatibleMetastoreException");
    } catch (IMetaStoreClient.IncompatibleMetastoreException ignore) {
    }
    // Invalid expression => throw some exception, but not incompatible metastore.
    try {
        checkExpr(-1, dbName, tblName, e.val(31).intCol("p3").pred(">", 2).build());
        fail("Should have thrown");
    } catch (IMetaStoreClient.IncompatibleMetastoreException ignore) {
        fail("Should not have thrown IncompatibleMetastoreException");
    } catch (Exception ignore) {
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Database(org.apache.hadoop.hive.metastore.api.Database) ArrayList(java.util.ArrayList) TException(org.apache.thrift.TException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 97 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestEximReplicationTasks method testDropDb.

@Test
public void testDropDb() throws IOException {
    Database db = new Database();
    db.setName("testdb");
    NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_DROP_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString());
    event.setDbName(db.getName());
    HCatNotificationEvent hev = new HCatNotificationEvent(event);
    ReplicationTask rtask = ReplicationTask.create(client, hev);
    assertEquals(hev.toString(), rtask.getEvent().toString());
    verifyDropDbReplicationTask(rtask);
}
Also used : NoopReplicationTask(org.apache.hive.hcatalog.api.repl.NoopReplicationTask) ReplicationTask(org.apache.hive.hcatalog.api.repl.ReplicationTask) Database(org.apache.hadoop.hive.metastore.api.Database) HCatNotificationEvent(org.apache.hive.hcatalog.api.HCatNotificationEvent) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) HCatNotificationEvent(org.apache.hive.hcatalog.api.HCatNotificationEvent) Test(org.junit.Test)

Example 98 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestEximReplicationTasks method testCreateDb.

@Test
public void testCreateDb() {
    Database db = new Database();
    db.setName("testdb");
    NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_CREATE_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString());
    event.setDbName(db.getName());
    HCatNotificationEvent hev = new HCatNotificationEvent(event);
    ReplicationTask rtask = ReplicationTask.create(client, hev);
    assertEquals(hev.toString(), rtask.getEvent().toString());
    // CREATE DB currently replicated as Noop.
    verifyCreateDbReplicationTask(rtask);
}
Also used : NoopReplicationTask(org.apache.hive.hcatalog.api.repl.NoopReplicationTask) ReplicationTask(org.apache.hive.hcatalog.api.repl.ReplicationTask) Database(org.apache.hadoop.hive.metastore.api.Database) HCatNotificationEvent(org.apache.hive.hcatalog.api.HCatNotificationEvent) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) HCatNotificationEvent(org.apache.hive.hcatalog.api.HCatNotificationEvent) Test(org.junit.Test)

Example 99 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class MetaStoreDirectSql method getDatabase.

public Database getDatabase(String dbName) throws MetaException {
    Query queryDbSelector = null;
    Query queryDbParams = null;
    try {
        dbName = dbName.toLowerCase();
        String queryTextDbSelector = "select " + "\"DB_ID\", \"NAME\", \"DB_LOCATION_URI\", \"DESC\", " + "\"OWNER_NAME\", \"OWNER_TYPE\" " + "FROM \"DBS\" where \"NAME\" = ? ";
        Object[] params = new Object[] { dbName };
        queryDbSelector = pm.newQuery("javax.jdo.query.SQL", queryTextDbSelector);
        if (LOG.isTraceEnabled()) {
            LOG.trace("getDatabase:query instantiated : " + queryTextDbSelector + " with param [" + params[0] + "]");
        }
        List<Object[]> sqlResult = executeWithArray(queryDbSelector, params, queryTextDbSelector);
        if ((sqlResult == null) || sqlResult.isEmpty()) {
            return null;
        }
        assert (sqlResult.size() == 1);
        if (sqlResult.get(0) == null) {
            return null;
        }
        Object[] dbline = sqlResult.get(0);
        Long dbid = extractSqlLong(dbline[0]);
        String queryTextDbParams = "select \"PARAM_KEY\", \"PARAM_VALUE\" " + " FROM \"DATABASE_PARAMS\" " + " WHERE \"DB_ID\" = ? " + " AND \"PARAM_KEY\" IS NOT NULL";
        params[0] = dbid;
        queryDbParams = pm.newQuery("javax.jdo.query.SQL", queryTextDbParams);
        if (LOG.isTraceEnabled()) {
            LOG.trace("getDatabase:query2 instantiated : " + queryTextDbParams + " with param [" + params[0] + "]");
        }
        Map<String, String> dbParams = new HashMap<String, String>();
        List<Object[]> sqlResult2 = ensureList(executeWithArray(queryDbParams, params, queryTextDbParams));
        if (!sqlResult2.isEmpty()) {
            for (Object[] line : sqlResult2) {
                dbParams.put(extractSqlString(line[0]), extractSqlString(line[1]));
            }
        }
        Database db = new Database();
        db.setName(extractSqlString(dbline[1]));
        db.setLocationUri(extractSqlString(dbline[2]));
        db.setDescription(extractSqlString(dbline[3]));
        db.setOwnerName(extractSqlString(dbline[4]));
        String type = extractSqlString(dbline[5]);
        db.setOwnerType((null == type || type.trim().isEmpty()) ? null : PrincipalType.valueOf(type));
        db.setParameters(MetaStoreUtils.trimMapNulls(dbParams, convertMapNullsToEmptyStrings));
        if (LOG.isDebugEnabled()) {
            LOG.debug("getDatabase: directsql returning db " + db.getName() + " locn[" + db.getLocationUri() + "] desc [" + db.getDescription() + "] owner [" + db.getOwnerName() + "] ownertype [" + db.getOwnerType() + "]");
        }
        return db;
    } finally {
        if (queryDbSelector != null) {
            queryDbSelector.closeAll();
        }
        if (queryDbParams != null) {
            queryDbParams.closeAll();
        }
    }
}
Also used : Query(javax.jdo.Query) HashMap(java.util.HashMap) MDatabase(org.apache.hadoop.hive.metastore.model.MDatabase) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 100 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLSemanticAnalyzer method analyzeShowCreateDatabase.

private void analyzeShowCreateDatabase(ASTNode ast) throws SemanticException {
    String dbName = getUnescapedName((ASTNode) ast.getChild(0));
    ShowCreateDatabaseDesc showCreateDbDesc = new ShowCreateDatabaseDesc(dbName, ctx.getResFile().toString());
    Database database = getDatabase(dbName);
    inputs.add(new ReadEntity(database));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateDbDesc), conf));
    setFetchTask(createFetchTask(showCreateDbDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) ShowCreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)153 Table (org.apache.hadoop.hive.metastore.api.Table)49 Test (org.junit.Test)46 ArrayList (java.util.ArrayList)42 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)30 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)29 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)29 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)28 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)24 Path (org.apache.hadoop.fs.Path)23 Partition (org.apache.hadoop.hive.metastore.api.Partition)21 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)18 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)18 HashMap (java.util.HashMap)17 TException (org.apache.thrift.TException)17 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)16 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)15 IOException (java.io.IOException)14 SQLException (java.sql.SQLException)13 HiveInputFormat (org.apache.hadoop.hive.ql.io.HiveInputFormat)13