use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.
the class ReplDumpTask method dumpConstraintMetadata.
private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
try {
Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
Hive db = getHive();
List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
serializer.writeTo(jsonWriter, null);
}
}
if (fks != null && !fks.isEmpty()) {
try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
serializer.writeTo(jsonWriter, null);
}
}
} catch (NoSuchObjectException e) {
// Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
// Just log a debug message and skip it.
LOG.debug(e.getMessage());
}
}
use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.
the class DDLTask method dropPartitions.
private void dropPartitions(Hive db, Table tbl, DropTableDesc dropTbl) throws HiveException {
ReplicationSpec replicationSpec = dropTbl.getReplicationSpec();
if (replicationSpec.isInReplicationScope()) {
// parameter key values.
for (DropTableDesc.PartSpec partSpec : dropTbl.getPartSpecs()) {
List<Partition> partitions = new ArrayList<>();
try {
db.getPartitionsByExpr(tbl, partSpec.getPartSpec(), conf, partitions);
for (Partition p : Iterables.filter(partitions, replicationSpec.allowEventReplacementInto())) {
db.dropPartition(tbl.getDbName(), tbl.getTableName(), p.getValues(), true);
}
} catch (NoSuchObjectException e) {
// ignore NSOE because that means there's nothing to drop.
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
}
return;
}
// ifExists is currently verified in DDLSemanticAnalyzer
List<Partition> droppedParts = db.dropPartitions(dropTbl.getTableName(), dropTbl.getPartSpecs(), PartitionDropOptions.instance().deleteData(true).ifExists(true).purgeData(dropTbl.getIfPurge()));
for (Partition partition : droppedParts) {
console.printInfo("Dropped the partition " + partition.getName());
// We have already locked the table, don't lock the partitions.
addIfAbsentByName(new WriteEntity(partition, WriteEntity.WriteType.DDL_NO_LOCK));
}
;
}
use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.
the class TestHiveMetaStore method testDatabaseLocation.
@Test
public void testDatabaseLocation() throws Throwable {
try {
// clear up any existing databases
silentDropDatabase(TEST_DB1_NAME);
Database db = new Database();
db.setName(TEST_DB1_NAME);
String dbLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/_testDB_create_";
db.setLocationUri(dbLocation);
client.createDatabase(db);
db = client.getDatabase(TEST_DB1_NAME);
assertEquals("name of returned db is different from that of inserted db", TEST_DB1_NAME, db.getName());
assertEquals("location of the returned db is different from that of inserted db", warehouse.getDnsPath(new Path(dbLocation)).toString(), db.getLocationUri());
client.dropDatabase(TEST_DB1_NAME);
silentDropDatabase(TEST_DB1_NAME);
boolean objectNotExist = false;
try {
client.getDatabase(TEST_DB1_NAME);
} catch (NoSuchObjectException e) {
objectNotExist = true;
}
assertTrue("Database " + TEST_DB1_NAME + " exists ", objectNotExist);
db = new Database();
db.setName(TEST_DB1_NAME);
dbLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/_testDB_file_";
FileSystem fs = FileSystem.get(new Path(dbLocation).toUri(), conf);
fs.createNewFile(new Path(dbLocation));
fs.deleteOnExit(new Path(dbLocation));
db.setLocationUri(dbLocation);
boolean createFailed = false;
try {
client.createDatabase(db);
} catch (MetaException cantCreateDB) {
System.err.println(cantCreateDB.getMessage());
createFailed = true;
}
assertTrue("Database creation succeeded even location exists and is a file", createFailed);
objectNotExist = false;
try {
client.getDatabase(TEST_DB1_NAME);
} catch (NoSuchObjectException e) {
objectNotExist = true;
}
assertTrue("Database " + TEST_DB1_NAME + " exists when location is specified and is a file", objectNotExist);
} catch (Throwable e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testDatabaseLocation() failed.");
throw e;
}
}
use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.
the class TestHiveMetaStore method testSimpleTypeApi.
@Test
public void testSimpleTypeApi() throws Exception {
try {
client.dropType(ColumnType.INT_TYPE_NAME);
Type typ1 = new Type();
typ1.setName(ColumnType.INT_TYPE_NAME);
boolean ret = client.createType(typ1);
assertTrue("Unable to create type", ret);
Type typ1_2 = client.getType(ColumnType.INT_TYPE_NAME);
assertNotNull(typ1_2);
assertEquals(typ1.getName(), typ1_2.getName());
ret = client.dropType(ColumnType.INT_TYPE_NAME);
assertTrue("unable to drop type integer", ret);
boolean exceptionThrown = false;
try {
client.getType(ColumnType.INT_TYPE_NAME);
} catch (NoSuchObjectException e) {
exceptionThrown = true;
}
assertTrue("Expected NoSuchObjectException", exceptionThrown);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testSimpleTypeApi() failed.");
throw e;
}
}
use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.
the class TestHiveMetaStore method testSimpleFunction.
@Test
public void testSimpleFunction() throws Exception {
String dbName = "test_db";
String funcName = "test_func";
String className = "org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper";
String owner = "test_owner";
final int N_FUNCTIONS = 5;
PrincipalType ownerType = PrincipalType.USER;
int createTime = (int) (System.currentTimeMillis() / 1000);
FunctionType funcType = FunctionType.JAVA;
try {
cleanUp(dbName, null, null);
for (Function f : client.getAllFunctions().getFunctions()) {
client.dropFunction(f.getDbName(), f.getFunctionName());
}
createDb(dbName);
for (int i = 0; i < N_FUNCTIONS; i++) {
createFunction(dbName, funcName + "_" + i, className, owner, ownerType, createTime, funcType, null);
}
// Try the different getters
// getFunction()
Function func = client.getFunction(dbName, funcName + "_0");
assertEquals("function db name", dbName, func.getDbName());
assertEquals("function name", funcName + "_0", func.getFunctionName());
assertEquals("function class name", className, func.getClassName());
assertEquals("function owner name", owner, func.getOwnerName());
assertEquals("function owner type", PrincipalType.USER, func.getOwnerType());
assertEquals("function type", funcType, func.getFunctionType());
List<ResourceUri> resources = func.getResourceUris();
assertTrue("function resources", resources == null || resources.size() == 0);
boolean gotException = false;
try {
func = client.getFunction(dbName, "nonexistent_func");
} catch (NoSuchObjectException e) {
// expected failure
gotException = true;
}
assertEquals(true, gotException);
// getAllFunctions()
GetAllFunctionsResponse response = client.getAllFunctions();
List<Function> allFunctions = response.getFunctions();
assertEquals(N_FUNCTIONS, allFunctions.size());
assertEquals(funcName + "_3", allFunctions.get(3).getFunctionName());
// getFunctions()
List<String> funcs = client.getFunctions(dbName, "*_func_*");
assertEquals(N_FUNCTIONS, funcs.size());
assertEquals(funcName + "_0", funcs.get(0));
funcs = client.getFunctions(dbName, "nonexistent_func");
assertEquals(0, funcs.size());
// dropFunction()
for (int i = 0; i < N_FUNCTIONS; i++) {
client.dropFunction(dbName, funcName + "_" + i);
}
// Confirm that the function is now gone
funcs = client.getFunctions(dbName, funcName);
assertEquals(0, funcs.size());
response = client.getAllFunctions();
allFunctions = response.getFunctions();
assertEquals(0, allFunctions.size());
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testConcurrentMetastores() failed.");
throw e;
} finally {
silentDropDatabase(dbName);
}
}
Aggregations