use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestMetastoreExpr method testPartitionExpr.
public void testPartitionExpr() throws Exception {
String dbName = "filterdb";
String tblName = "filtertbl";
silentDropDatabase(dbName);
Database db = new Database();
db.setName(dbName);
client.createDatabase(db);
ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
cols.add(new FieldSchema("c1", serdeConstants.STRING_TYPE_NAME, ""));
cols.add(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, ""));
ArrayList<FieldSchema> partCols = Lists.newArrayList(new FieldSchema("p1", serdeConstants.STRING_TYPE_NAME, ""), new FieldSchema("p2", serdeConstants.INT_TYPE_NAME, ""));
Table tbl = new Table();
tbl.setDbName(dbName);
tbl.setTableName(tblName);
addSd(cols, tbl);
tbl.setPartitionKeys(partCols);
client.createTable(tbl);
tbl = client.getTable(dbName, tblName);
addPartition(client, tbl, Lists.newArrayList("p11", "32"), "part1");
addPartition(client, tbl, Lists.newArrayList("p12", "32"), "part2");
addPartition(client, tbl, Lists.newArrayList("p13", "31"), "part3");
addPartition(client, tbl, Lists.newArrayList("p14", "-33"), "part4");
ExprBuilder e = new ExprBuilder(tblName);
checkExpr(3, dbName, tblName, e.val(0).intCol("p2").pred(">", 2).build());
checkExpr(3, dbName, tblName, e.intCol("p2").val(0).pred("<", 2).build());
checkExpr(1, dbName, tblName, e.intCol("p2").val(0).pred(">", 2).build());
checkExpr(2, dbName, tblName, e.val(31).intCol("p2").pred("<=", 2).build());
checkExpr(3, dbName, tblName, e.val("p11").strCol("p1").pred(">", 2).build());
checkExpr(1, dbName, tblName, e.val("p11").strCol("p1").pred(">", 2).intCol("p2").val(31).pred("<", 2).pred("and", 2).build());
checkExpr(3, dbName, tblName, e.val(32).val(31).intCol("p2").val(false).pred("between", 4).build());
// Apply isnull and instr (not supported by pushdown) via name filtering.
checkExpr(4, dbName, tblName, e.val("p").strCol("p1").fn("instr", TypeInfoFactory.intTypeInfo, 2).val(0).pred("<=", 2).build());
checkExpr(0, dbName, tblName, e.intCol("p2").pred("isnull", 1).build());
// Cannot deserialize => throw the specific exception.
try {
client.listPartitionsByExpr(dbName, tblName, new byte[] { 'f', 'o', 'o' }, null, (short) -1, new ArrayList<Partition>());
fail("Should have thrown IncompatibleMetastoreException");
} catch (IMetaStoreClient.IncompatibleMetastoreException ignore) {
}
// Invalid expression => throw some exception, but not incompatible metastore.
try {
checkExpr(-1, dbName, tblName, e.val(31).intCol("p3").pred(">", 2).build());
fail("Should have thrown");
} catch (IMetaStoreClient.IncompatibleMetastoreException ignore) {
fail("Should not have thrown IncompatibleMetastoreException");
} catch (Exception ignore) {
}
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestEximReplicationTasks method testDropDb.
@Test
public void testDropDb() throws IOException {
Database db = new Database();
db.setName("testdb");
NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_DROP_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString());
event.setDbName(db.getName());
HCatNotificationEvent hev = new HCatNotificationEvent(event);
ReplicationTask rtask = ReplicationTask.create(client, hev);
assertEquals(hev.toString(), rtask.getEvent().toString());
verifyDropDbReplicationTask(rtask);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestEximReplicationTasks method testCreateDb.
@Test
public void testCreateDb() {
Database db = new Database();
db.setName("testdb");
NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_CREATE_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString());
event.setDbName(db.getName());
HCatNotificationEvent hev = new HCatNotificationEvent(event);
ReplicationTask rtask = ReplicationTask.create(client, hev);
assertEquals(hev.toString(), rtask.getEvent().toString());
// CREATE DB currently replicated as Noop.
verifyCreateDbReplicationTask(rtask);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class MetaStoreDirectSql method getDatabase.
public Database getDatabase(String dbName) throws MetaException {
Query queryDbSelector = null;
Query queryDbParams = null;
try {
dbName = dbName.toLowerCase();
String queryTextDbSelector = "select " + "\"DB_ID\", \"NAME\", \"DB_LOCATION_URI\", \"DESC\", " + "\"OWNER_NAME\", \"OWNER_TYPE\" " + "FROM \"DBS\" where \"NAME\" = ? ";
Object[] params = new Object[] { dbName };
queryDbSelector = pm.newQuery("javax.jdo.query.SQL", queryTextDbSelector);
if (LOG.isTraceEnabled()) {
LOG.trace("getDatabase:query instantiated : " + queryTextDbSelector + " with param [" + params[0] + "]");
}
List<Object[]> sqlResult = executeWithArray(queryDbSelector, params, queryTextDbSelector);
if ((sqlResult == null) || sqlResult.isEmpty()) {
return null;
}
assert (sqlResult.size() == 1);
if (sqlResult.get(0) == null) {
return null;
}
Object[] dbline = sqlResult.get(0);
Long dbid = extractSqlLong(dbline[0]);
String queryTextDbParams = "select \"PARAM_KEY\", \"PARAM_VALUE\" " + " FROM \"DATABASE_PARAMS\" " + " WHERE \"DB_ID\" = ? " + " AND \"PARAM_KEY\" IS NOT NULL";
params[0] = dbid;
queryDbParams = pm.newQuery("javax.jdo.query.SQL", queryTextDbParams);
if (LOG.isTraceEnabled()) {
LOG.trace("getDatabase:query2 instantiated : " + queryTextDbParams + " with param [" + params[0] + "]");
}
Map<String, String> dbParams = new HashMap<String, String>();
List<Object[]> sqlResult2 = ensureList(executeWithArray(queryDbParams, params, queryTextDbParams));
if (!sqlResult2.isEmpty()) {
for (Object[] line : sqlResult2) {
dbParams.put(extractSqlString(line[0]), extractSqlString(line[1]));
}
}
Database db = new Database();
db.setName(extractSqlString(dbline[1]));
db.setLocationUri(extractSqlString(dbline[2]));
db.setDescription(extractSqlString(dbline[3]));
db.setOwnerName(extractSqlString(dbline[4]));
String type = extractSqlString(dbline[5]);
db.setOwnerType((null == type || type.trim().isEmpty()) ? null : PrincipalType.valueOf(type));
db.setParameters(MetaStoreUtils.trimMapNulls(dbParams, convertMapNullsToEmptyStrings));
if (LOG.isDebugEnabled()) {
LOG.debug("getDatabase: directsql returning db " + db.getName() + " locn[" + db.getLocationUri() + "] desc [" + db.getDescription() + "] owner [" + db.getOwnerName() + "] ownertype [" + db.getOwnerType() + "]");
}
return db;
} finally {
if (queryDbSelector != null) {
queryDbSelector.closeAll();
}
if (queryDbParams != null) {
queryDbParams.closeAll();
}
}
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class DDLSemanticAnalyzer method analyzeShowCreateDatabase.
private void analyzeShowCreateDatabase(ASTNode ast) throws SemanticException {
String dbName = getUnescapedName((ASTNode) ast.getChild(0));
ShowCreateDatabaseDesc showCreateDbDesc = new ShowCreateDatabaseDesc(dbName, ctx.getResFile().toString());
Database database = getDatabase(dbName);
inputs.add(new ReadEntity(database));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateDbDesc), conf));
setFetchTask(createFetchTask(showCreateDbDesc.getSchema()));
}
Aggregations