use of org.apache.hadoop.hive.ql.metadata.InvalidTableException in project hive by apache.
the class QTestUtil method clearTablesCreatedDuringTests.
/**
* Clear out any side effects of running tests
*/
public void clearTablesCreatedDuringTests() throws Exception {
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
conf.set("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl");
db = Hive.get(conf);
// and any databases other than the default database.
for (String dbName : db.getAllDatabases()) {
SessionState.get().setCurrentDatabase(dbName);
for (String tblName : db.getAllTables()) {
if (!DEFAULT_DATABASE_NAME.equals(dbName) || !srcTables.contains(tblName)) {
Table tblObj = null;
try {
tblObj = db.getTable(tblName);
} catch (InvalidTableException e) {
LOG.warn("Trying to drop table " + e.getTableName() + ". But it does not exist.");
continue;
}
// table will automatically drop all its index table
if (tblObj.isIndexTable()) {
continue;
}
db.dropTable(dbName, tblName, true, true, fsType == FsType.encrypted_hdfs);
} else {
// this table is defined in srcTables, drop all indexes on it
List<Index> indexes = db.getIndexes(dbName, tblName, (short) -1);
if (indexes != null && indexes.size() > 0) {
for (Index index : indexes) {
db.dropIndex(dbName, tblName, index.getIndexName(), true, true);
}
}
}
}
if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
// Drop cascade, functions dropped by cascade
db.dropDatabase(dbName, true, true, true);
}
}
// delete remaining directories for external tables (can affect stats for following tests)
try {
Path p = new Path(testWarehouse);
FileSystem fileSystem = p.getFileSystem(conf);
if (fileSystem.exists(p)) {
for (FileStatus status : fileSystem.listStatus(p)) {
if (status.isDirectory() && !srcTables.contains(status.getPath().getName())) {
fileSystem.delete(status.getPath(), true);
}
}
}
} catch (IllegalArgumentException e) {
// ignore.. provides invalid url sometimes intentionally
}
SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME);
List<String> roleNames = db.getAllRoleNames();
for (String roleName : roleNames) {
if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) {
db.dropRole(roleName);
}
}
}
use of org.apache.hadoop.hive.ql.metadata.InvalidTableException in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableDropParts.
private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) throws SemanticException {
boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
// If the drop has to fail on non-existent partitions, we cannot batch expressions.
// That is because we actually have to check each separate expression for existence.
// We could do a small optimization for the case where expr has all columns and all
// operators are equality, if we assume those would always match one partition (which
// may not be true with legacy, non-normalized column values). This is probably a
// popular case but that's kinda hacky. Let's not do it for now.
boolean canGroupExprs = ifExists;
boolean mustPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null);
ReplicationSpec replicationSpec = new ReplicationSpec(ast);
Table tab = null;
try {
tab = getTable(qualified);
} catch (SemanticException se) {
if (replicationSpec.isInReplicationScope() && ((se.getCause() instanceof InvalidTableException) || (se.getMessage().contains(ErrorMsg.INVALID_TABLE.getMsg())))) {
// We just return in that case, no drop needed.
return;
// TODO : the contains message check is fragile, we should refactor SemanticException to be
// queriable for error code, and not simply have a message
// NOTE : IF_EXISTS might also want to invoke this, but there's a good possibility
// that IF_EXISTS is stricter about table existence, and applies only to the ptn.
// Therefore, ignoring IF_EXISTS here.
} else {
throw se;
}
}
Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = getFullPartitionSpecs(ast, tab, canGroupExprs);
// nothing to do
if (partSpecs.isEmpty())
return;
validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
ReadEntity re = new ReadEntity(tab);
re.noLockNeeded();
inputs.add(re);
addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists);
DropTableDesc dropTblDesc = new DropTableDesc(getDotName(qualified), partSpecs, expectView ? TableType.VIRTUAL_VIEW : null, mustPurge, replicationSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf));
}
use of org.apache.hadoop.hive.ql.metadata.InvalidTableException in project hive by apache.
the class HCatSemanticAnalyzerBase method authorizeTable.
protected void authorizeTable(Hive hive, String tableName, Privilege priv) throws AuthorizationException, HiveException {
Table table;
try {
table = hive.getTable(tableName);
} catch (InvalidTableException ite) {
// Table itself doesn't exist in metastore, nothing to validate.
return;
}
authorize(table, priv);
}
use of org.apache.hadoop.hive.ql.metadata.InvalidTableException in project hive by apache.
the class ExportSemanticAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
Tree tableTree = ast.getChild(0);
Tree toTree = ast.getChild(1);
if (ast.getChildCount() > 2) {
replicationSpec = new ReplicationSpec((ASTNode) ast.getChild(2));
} else {
replicationSpec = new ReplicationSpec();
}
// initialize export path
String tmpPath = stripQuotes(toTree.getText());
URI toURI = EximUtil.getValidatedURI(conf, tmpPath);
// initialize source table/partition
TableSpec ts;
try {
ts = new TableSpec(db, conf, (ASTNode) tableTree, false, true);
} catch (SemanticException sme) {
if ((replicationSpec.isInReplicationScope()) && ((sme.getCause() instanceof InvalidTableException) || (sme instanceof Table.ValidationFailureSemanticException))) {
// If we're in replication scope, it's possible that we're running the export long after
// the table was dropped, so the table not existing currently or being a different kind of
// table is not an error - it simply means we should no-op, and let a future export
// capture the appropriate state
ts = null;
} else {
throw sme;
}
}
// All parsing is done, we're now good to start the export process.
prepareExport(ast, toURI, ts, replicationSpec, db, conf, ctx, rootTasks, inputs, outputs, LOG);
}
use of org.apache.hadoop.hive.ql.metadata.InvalidTableException in project hive by apache.
the class SemanticAnalyzer method validateAnalyzePartialscan.
/**
* Validate partialscan command
*
* @param tree
* @throws SemanticException
*/
private void validateAnalyzePartialscan(ASTNode tree) throws SemanticException {
// since it is partialscan, it is true table name in command
String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0));
Table tbl;
try {
tbl = this.getTableObjectByName(tableName);
} catch (InvalidTableException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
/* A nice error message should be given to user. */
if (tbl.isNonNative()) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_NATIVE.getMsg(tbl.getTableName()));
}
/**
* Partial scan doesn't support external table.
*/
if (tbl.getTableType().equals(TableType.EXTERNAL_TABLE)) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_EXTERNAL_TABLE.getMsg(tbl.getTableName()));
}
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_AUTOGATHER.getMsg());
}
}
Aggregations