use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.
the class QTestUtil method executeTestCommand.
private int executeTestCommand(final String command) {
String commandName = command.trim().split("\\s+")[0];
String commandArgs = command.trim().substring(commandName.length());
if (commandArgs.endsWith(";")) {
commandArgs = StringUtils.chop(commandArgs);
}
//replace ${hiveconf:hive.metastore.warehouse.dir} with actual dir if existed.
//we only want the absolute path, so remove the header, such as hdfs://localhost:57145
String wareHouseDir = SessionState.get().getConf().getVar(ConfVars.METASTOREWAREHOUSE).replaceAll("^[a-zA-Z]+://.*?:\\d+", "");
commandArgs = commandArgs.replaceAll("\\$\\{hiveconf:hive\\.metastore\\.warehouse\\.dir\\}", wareHouseDir);
if (SessionState.get() != null) {
SessionState.get().setLastCommand(commandName + " " + commandArgs.trim());
}
enableTestOnlyCmd(SessionState.get().getConf());
try {
CommandProcessor proc = getTestCommand(commandName);
if (proc != null) {
CommandProcessorResponse response = proc.run(commandArgs.trim());
int rc = response.getResponseCode();
if (rc != 0) {
SessionState.get().out.println(response);
}
return rc;
} else {
throw new RuntimeException("Could not get CommandProcessor for command: " + commandName);
}
} catch (Exception e) {
throw new RuntimeException("Could not execute test command: " + e.getMessage());
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.
the class TestHCatLoaderEncryption method associateEncryptionZoneWithPath.
private void associateEncryptionZoneWithPath(String path) throws SQLException, CommandNeedRetryException {
LOG.info(this.storageFormat + ": associateEncryptionZoneWithPath");
assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
enableTestOnlyCmd(SessionState.get().getConf());
CommandProcessor crypto = getTestCommand("crypto");
if (crypto == null)
return;
checkExecutionResponse(crypto.run("CREATE_KEY --keyName key_128 --bitLength 128"));
checkExecutionResponse(crypto.run("CREATE_ZONE --keyName key_128 --path " + path));
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.
the class TestHCatLoaderEncryption method removeEncryptionZone.
private void removeEncryptionZone() throws SQLException, CommandNeedRetryException {
LOG.info(this.storageFormat + ": removeEncryptionZone");
enableTestOnlyCmd(SessionState.get().getConf());
CommandProcessor crypto = getTestCommand("crypto");
if (crypto == null) {
return;
}
checkExecutionResponse(crypto.run("DELETE_KEY --keyName key_128"));
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.
the class ExecuteStatementOperation method newExecuteStatementOperation.
public static ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync, long queryTimeout) throws HiveSQLException {
String[] tokens = statement.trim().split("\\s+");
CommandProcessor processor = null;
try {
processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf());
} catch (SQLException e) {
throw new HiveSQLException(e.getMessage(), e.getSQLState(), e);
}
if (processor == null) {
// runAsync, queryTimeout makes sense only for a SQLOperation
return new SQLOperation(parentSession, statement, confOverlay, runAsync, queryTimeout);
}
return new HiveCommandOperation(parentSession, statement, processor, confOverlay);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.
the class CliDriver method processCmd.
public int processCmd(String cmd) {
CliSessionState ss = (CliSessionState) SessionState.get();
ss.setLastCommand(cmd);
ss.updateThreadName();
// Flush the print stream, so it doesn't include output from the last command
ss.err.flush();
String cmd_trimmed = cmd.trim();
String[] tokens = tokenizeCmd(cmd_trimmed);
int ret = 0;
if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
// if we have come this far - either the previous commands
// are all successful or this is command line. in either case
// this counts as a successful run
ss.close();
System.exit(0);
} else if (tokens[0].equalsIgnoreCase("source")) {
String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
cmd_1 = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), cmd_1);
File sourceFile = new File(cmd_1);
if (!sourceFile.isFile()) {
console.printError("File: " + cmd_1 + " is not a file.");
ret = 1;
} else {
try {
ret = processFile(cmd_1);
} catch (IOException e) {
console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
}
} else if (cmd_trimmed.startsWith("!")) {
String shell_cmd = cmd_trimmed.substring(1);
shell_cmd = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), shell_cmd);
// shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
try {
ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
ret = executor.execute();
if (ret != 0) {
console.printError("Command failed with exit code = " + ret);
}
} catch (Exception e) {
console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
} else {
// local mode
try {
CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf);
ret = processLocalCmd(cmd, proc, ss);
} catch (SQLException e) {
console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
}
}
ss.resetThreadName();
return ret;
}
Aggregations