Search in sources :

Example 1 with ShellCmdExecutor

use of org.apache.hadoop.hive.common.cli.ShellCmdExecutor in project hive by apache.

the class Commands method sh.

public boolean sh(String line) {
    if (line == null || line.length() == 0) {
        return false;
    }
    if (!line.startsWith("sh")) {
        return false;
    }
    line = line.substring("sh".length()).trim();
    if (!beeLine.isBeeLine())
        line = substituteVariables(getHiveConf(false), line.trim());
    try {
        ShellCmdExecutor executor = new ShellCmdExecutor(line, beeLine.getOutputStream(), beeLine.getErrorStream());
        int ret = executor.execute();
        if (ret != 0) {
            beeLine.output("Command failed with exit code = " + ret);
            return false;
        }
        return true;
    } catch (Exception e) {
        beeLine.error("Exception raised from Shell command " + e);
        return false;
    }
}
Also used : URISyntaxException(java.net.URISyntaxException) SQLException(java.sql.SQLException) IOException(java.io.IOException) ShellCmdExecutor(org.apache.hadoop.hive.common.cli.ShellCmdExecutor)

Example 2 with ShellCmdExecutor

use of org.apache.hadoop.hive.common.cli.ShellCmdExecutor in project hive by apache.

the class CliDriver method processCmd.

public int processCmd(String cmd) {
    CliSessionState ss = (CliSessionState) SessionState.get();
    ss.setLastCommand(cmd);
    ss.updateThreadName();
    // Flush the print stream, so it doesn't include output from the last command
    ss.err.flush();
    String cmd_trimmed = cmd.trim();
    String[] tokens = tokenizeCmd(cmd_trimmed);
    int ret = 0;
    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
        // if we have come this far - either the previous commands
        // are all successful or this is command line. in either case
        // this counts as a successful run
        ss.close();
        System.exit(0);
    } else if (tokens[0].equalsIgnoreCase("source")) {
        String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
        cmd_1 = new VariableSubstitution(new HiveVariableSource() {

            @Override
            public Map<String, String> getHiveVariable() {
                return SessionState.get().getHiveVariables();
            }
        }).substitute(ss.getConf(), cmd_1);
        File sourceFile = new File(cmd_1);
        if (!sourceFile.isFile()) {
            console.printError("File: " + cmd_1 + " is not a file.");
            ret = 1;
        } else {
            try {
                ret = processFile(cmd_1);
            } catch (IOException e) {
                console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
                ret = 1;
            }
        }
    } else if (cmd_trimmed.startsWith("!")) {
        String shell_cmd = cmd_trimmed.substring(1);
        shell_cmd = new VariableSubstitution(new HiveVariableSource() {

            @Override
            public Map<String, String> getHiveVariable() {
                return SessionState.get().getHiveVariables();
            }
        }).substitute(ss.getConf(), shell_cmd);
        // shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
        try {
            ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
            ret = executor.execute();
            if (ret != 0) {
                console.printError("Command failed with exit code = " + ret);
            }
        } catch (Exception e) {
            console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
            ret = 1;
        }
    } else {
        // local mode
        try {
            CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf);
            ret = processLocalCmd(cmd, proc, ss);
        } catch (SQLException e) {
            console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
        }
    }
    ss.resetThreadName();
    return ret;
}
Also used : SQLException(java.sql.SQLException) VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) IOException(java.io.IOException) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) FileNotFoundException(java.io.FileNotFoundException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) SQLException(java.sql.SQLException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) IOException(java.io.IOException) CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor) File(java.io.File) Map(java.util.Map) ShellCmdExecutor(org.apache.hadoop.hive.common.cli.ShellCmdExecutor)

Aggregations

IOException (java.io.IOException)2 SQLException (java.sql.SQLException)2 ShellCmdExecutor (org.apache.hadoop.hive.common.cli.ShellCmdExecutor)2 File (java.io.File)1 FileNotFoundException (java.io.FileNotFoundException)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 URISyntaxException (java.net.URISyntaxException)1 Map (java.util.Map)1 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)1 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)1 HiveVariableSource (org.apache.hadoop.hive.conf.HiveVariableSource)1 VariableSubstitution (org.apache.hadoop.hive.conf.VariableSubstitution)1 CommandNeedRetryException (org.apache.hadoop.hive.ql.CommandNeedRetryException)1 CommandProcessor (org.apache.hadoop.hive.ql.processors.CommandProcessor)1 StringUtils.stringifyException (org.apache.hadoop.util.StringUtils.stringifyException)1