use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class CliDriver method processCmd.
public int processCmd(String cmd) {
CliSessionState ss = (CliSessionState) SessionState.get();
ss.setLastCommand(cmd);
ss.updateThreadName();
// Flush the print stream, so it doesn't include output from the last command
ss.err.flush();
String cmd_trimmed = HiveStringUtils.removeComments(cmd).trim();
String[] tokens = tokenizeCmd(cmd_trimmed);
int ret = 0;
if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
// if we have come this far - either the previous commands
// are all successful or this is command line. in either case
// this counts as a successful run
ss.close();
System.exit(0);
} else if (tokens[0].equalsIgnoreCase("source")) {
String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
cmd_1 = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), cmd_1);
File sourceFile = new File(cmd_1);
if (!sourceFile.isFile()) {
console.printError("File: " + cmd_1 + " is not a file.");
ret = 1;
} else {
try {
ret = processFile(cmd_1);
} catch (IOException e) {
console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
}
} else if (cmd_trimmed.startsWith("!")) {
// for shell commands, use unstripped command
String shell_cmd = cmd.trim().substring(1);
shell_cmd = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), shell_cmd);
// shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
try {
ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
ret = executor.execute();
if (ret != 0) {
console.printError("Command failed with exit code = " + ret);
}
} catch (Exception e) {
console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
} else {
// local mode
try {
try (CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf)) {
if (proc instanceof IDriver) {
// Let Driver strip comments using sql parser
ret = processLocalCmd(cmd, proc, ss);
} else {
ret = processLocalCmd(cmd_trimmed, proc, ss);
}
}
} catch (SQLException e) {
console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
ss.resetThreadName();
return ret;
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class CompileProcessor method parse.
/**
* Parses the supplied command
* @param ss
* @throws CompileProcessorException if the code can not be compiled or the jar can not be made
*/
@VisibleForTesting
void parse(SessionState ss) throws CommandProcessorException {
if (ss != null) {
command = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), command);
}
if (command == null || command.length() == 0) {
throw new CommandProcessorException("Command was empty");
}
StringBuilder toCompile = new StringBuilder();
int startPosition = 0;
int endPosition = -1;
/* TODO Escape handling may be changed by a follow on.
* The largest issue is ; which are treated as statement
* terminators for the cli. Once the cli is fixed this
* code should be re-investigated
*/
while (command.charAt(startPosition++) != '`' && startPosition < command.length()) {
}
if (startPosition == command.length()) {
throw new CommandProcessorException(SYNTAX);
}
for (int i = startPosition; i < command.length(); i++) {
if (command.charAt(i) == '\\') {
toCompile.append(command.charAt(i + 1));
i = i + 1;
continue;
} else if (command.charAt(i) == '`') {
endPosition = i;
break;
} else {
toCompile.append(command.charAt(i));
}
}
if (endPosition == -1) {
throw new CommandProcessorException(SYNTAX);
}
StringTokenizer st = new StringTokenizer(command.substring(endPosition + 1), " ");
if (st.countTokens() != 4) {
throw new CommandProcessorException(SYNTAX);
}
String shouldBeAs = st.nextToken();
if (!shouldBeAs.equalsIgnoreCase(AS)) {
throw new CommandProcessorException(SYNTAX);
}
setLang(st.nextToken());
if (!lang.equalsIgnoreCase(GROOVY)) {
throw new CommandProcessorException("Can not compile " + lang + ". Hive can only compile " + GROOVY);
}
String shouldBeNamed = st.nextToken();
if (!shouldBeNamed.equalsIgnoreCase(NAMED)) {
throw new CommandProcessorException(SYNTAX);
}
setNamed(st.nextToken());
setCode(toCompile.toString());
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class Compiler method initialize.
private void initialize(String rawCommand) throws CommandProcessorException {
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.COMPILE);
driverState.compilingWithLocking();
VariableSubstitution variableSubstitution = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
});
String command = variableSubstitution.substitute(driverContext.getConf(), rawCommand);
String queryStr = command;
try {
// command should be redacted to avoid to logging sensitive data
queryStr = HookUtils.redactLogString(driverContext.getConf(), command);
} catch (Exception e) {
LOG.warn("WARNING! Query command could not be redacted." + e);
}
DriverUtils.checkInterrupted(driverState, driverContext, "at beginning of compilation.", null, null);
context.setCmd(command);
driverContext.getQueryDisplay().setQueryStr(queryStr);
LOG.info("Compiling command(queryId=" + driverContext.getQueryId() + "): " + queryStr);
driverContext.getConf().setQueryString(queryStr);
// FIXME: side effect will leave the last query set at the session level
if (SessionState.get() != null) {
SessionState.get().getConf().setQueryString(queryStr);
SessionState.get().setupQueryCurrentTimestamp();
}
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class SetProcessor method setConf.
/**
* @return A console message that is not strong enough to fail the command (e.g. deprecation).
*/
static String setConf(SessionState ss, String varname, String key, String varvalue, boolean register) throws IllegalArgumentException {
String result = null;
HiveConf conf = ss.getConf();
String value = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return ss.getHiveVariables();
}
}).substitute(conf, varvalue);
if (conf.getBoolVar(HiveConf.ConfVars.HIVECONFVALIDATION)) {
HiveConf.ConfVars confVars = HiveConf.getConfVars(key);
if (confVars != null) {
if (!confVars.isType(value)) {
StringBuilder message = new StringBuilder();
message.append("'SET ").append(varname).append('=').append(varvalue);
message.append("' FAILED because ").append(key).append(" expects ");
message.append(confVars.typeString()).append(" type value.");
throw new IllegalArgumentException(message.toString());
}
String fail = confVars.validate(value);
if (fail != null) {
StringBuilder message = new StringBuilder();
message.append("'SET ").append(varname).append('=').append(varvalue);
message.append("' FAILED in validation : ").append(fail).append('.');
throw new IllegalArgumentException(message.toString());
}
} else if (!removedConfigs.contains(key) && key.startsWith("hive.")) {
throw new IllegalArgumentException("hive configuration " + key + " does not exists.");
}
}
conf.verifyAndSet(key, value);
if (HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname.equals(key)) {
if (!"spark".equals(value)) {
ss.closeSparkSession();
}
if ("mr".equals(value)) {
result = HiveConf.generateMrDeprecationWarning();
LOG.warn(result);
}
}
if (HiveConf.ConfVars.CREATE_TABLE_AS_EXTERNAL.varname.equals(key)) {
result = HiveConf.generateDeprecationWarning();
LOG.warn(result);
}
if (register) {
ss.getOverriddenConfigurations().put(key, value);
}
return result;
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class DfsProcessor method run.
@Override
public CommandProcessorResponse run(String command) throws CommandProcessorException {
try {
SessionState ss = SessionState.get();
command = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), command);
String[] tokens = splitCmd(command);
CommandProcessorResponse authErrResp = CommandUtil.authorizeCommand(ss, HiveOperationType.DFS, Arrays.asList(tokens));
if (authErrResp != null) {
// there was an authorization issue
return authErrResp;
}
PrintStream oldOut = System.out;
if (ss != null && ss.out != null) {
System.setOut(ss.out);
}
int ret = dfs.run(tokens);
System.setOut(oldOut);
if (ret != 0) {
console.printError("Command " + command + " failed with exit code = " + ret);
throw new CommandProcessorException(ret);
}
return new CommandProcessorResponse(dfsSchema, null);
} catch (CommandProcessorException e) {
throw e;
} catch (Exception e) {
console.printError("Exception raised from DFSShell.run " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new CommandProcessorException(1);
}
}
Aggregations