use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableArchive.
private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive) throws SemanticException {
if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) {
throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg());
}
Table tab = getTable(qualified);
// partition name to value
List<Map<String, String>> partSpecs = getPartitionSpecs(tab, ast);
addTablePartsOutputs(tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK);
validateAlterTableType(tab, AlterTableTypes.ARCHIVE);
inputs.add(new ReadEntity(tab));
if (partSpecs.size() > 1) {
throw new SemanticException(isUnArchive ? ErrorMsg.UNARCHIVE_ON_MULI_PARTS.getMsg() : ErrorMsg.ARCHIVE_ON_MULI_PARTS.getMsg());
}
if (partSpecs.size() == 0) {
throw new SemanticException(ErrorMsg.ARCHIVE_ON_TABLE.getMsg());
}
Map<String, String> partSpec = partSpecs.get(0);
try {
isValidPrefixSpec(tab, partSpec);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(getDotName(qualified), partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc)));
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeShowPartitions.
private void analyzeShowPartitions(ASTNode ast) throws SemanticException {
ShowPartitionsDesc showPartsDesc;
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
List<Map<String, String>> partSpecs = getPartitionSpecs(getTable(tableName), ast);
// We only can have a single partition spec
assert (partSpecs.size() <= 1);
Map<String, String> partSpec = null;
if (partSpecs.size() > 0) {
partSpec = partSpecs.get(0);
}
validateTable(tableName, null);
showPartsDesc = new ShowPartitionsDesc(tableName, ctx.getResFile(), partSpec);
inputs.add(new ReadEntity(getTable(tableName)));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showPartsDesc)));
setFetchTask(createFetchTask(showPartsDesc.getSchema()));
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeSwitchDatabase.
private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
Database database = getDatabase(dbName, true);
ReadEntity dbReadEntity = new ReadEntity(database);
dbReadEntity.noLockNeeded();
inputs.add(dbReadEntity);
SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc)));
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableRenamePart.
private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, HashMap<String, String> oldPartSpec) throws SemanticException {
Table tab = getTable(tblName, true);
validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION);
Map<String, String> newPartSpec = getValidatedPartSpec(tab, (ASTNode) ast.getChild(0), conf, false);
if (newPartSpec == null) {
throw new SemanticException("RENAME PARTITION Missing Destination" + ast);
}
ReadEntity re = new ReadEntity(tab);
re.noLockNeeded();
inputs.add(re);
List<Map<String, String>> partSpecs = new ArrayList<Map<String, String>>();
partSpecs.add(oldPartSpec);
partSpecs.add(newPartSpec);
addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_EXCLUSIVE);
RenamePartitionDesc renamePartitionDesc = new RenamePartitionDesc(tblName, oldPartSpec, newPartSpec, null);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), renamePartitionDesc)));
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeDropTable.
private void analyzeDropTable(ASTNode ast, TableType expectedType) throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
// we want to signal an error if the table/view doesn't exist and we're
// configured not to fail silently
boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
ReplicationSpec replicationSpec = new ReplicationSpec(ast);
Table tab = getTable(tableName, throwException);
if (tab != null) {
inputs.add(new ReadEntity(tab));
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE));
}
boolean ifPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null);
DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectedType, ifExists, ifPurge, replicationSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc)));
}
Aggregations