Search in sources :

Example 1 with CommandProcessor

use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.

the class QTestUtil method executeTestCommand.

private int executeTestCommand(final String command) {
    String commandName = command.trim().split("\\s+")[0];
    String commandArgs = command.trim().substring(commandName.length());
    if (commandArgs.endsWith(";")) {
        commandArgs = StringUtils.chop(commandArgs);
    }
    //replace ${hiveconf:hive.metastore.warehouse.dir} with actual dir if existed.
    //we only want the absolute path, so remove the header, such as hdfs://localhost:57145
    String wareHouseDir = SessionState.get().getConf().getVar(ConfVars.METASTOREWAREHOUSE).replaceAll("^[a-zA-Z]+://.*?:\\d+", "");
    commandArgs = commandArgs.replaceAll("\\$\\{hiveconf:hive\\.metastore\\.warehouse\\.dir\\}", wareHouseDir);
    if (SessionState.get() != null) {
        SessionState.get().setLastCommand(commandName + " " + commandArgs.trim());
    }
    enableTestOnlyCmd(SessionState.get().getConf());
    try {
        CommandProcessor proc = getTestCommand(commandName);
        if (proc != null) {
            CommandProcessorResponse response = proc.run(commandArgs.trim());
            int rc = response.getResponseCode();
            if (rc != 0) {
                SessionState.get().out.println(response);
            }
            return rc;
        } else {
            throw new RuntimeException("Could not get CommandProcessor for command: " + commandName);
        }
    } catch (Exception e) {
        throw new RuntimeException("Could not execute test command: " + e.getMessage());
    }
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor) SQLException(java.sql.SQLException) IOException(java.io.IOException) BuildException(org.apache.tools.ant.BuildException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) ParseException(org.apache.hadoop.hive.ql.parse.ParseException) FileNotFoundException(java.io.FileNotFoundException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Example 2 with CommandProcessor

use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.

the class TestHCatLoaderEncryption method associateEncryptionZoneWithPath.

private void associateEncryptionZoneWithPath(String path) throws SQLException, CommandNeedRetryException {
    LOG.info(this.storageFormat + ": associateEncryptionZoneWithPath");
    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
    enableTestOnlyCmd(SessionState.get().getConf());
    CommandProcessor crypto = getTestCommand("crypto");
    if (crypto == null)
        return;
    checkExecutionResponse(crypto.run("CREATE_KEY --keyName key_128 --bitLength 128"));
    checkExecutionResponse(crypto.run("CREATE_ZONE --keyName key_128 --path " + path));
}
Also used : CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor)

Example 3 with CommandProcessor

use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.

the class TestHCatLoaderEncryption method removeEncryptionZone.

private void removeEncryptionZone() throws SQLException, CommandNeedRetryException {
    LOG.info(this.storageFormat + ": removeEncryptionZone");
    enableTestOnlyCmd(SessionState.get().getConf());
    CommandProcessor crypto = getTestCommand("crypto");
    if (crypto == null) {
        return;
    }
    checkExecutionResponse(crypto.run("DELETE_KEY --keyName key_128"));
}
Also used : CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor)

Example 4 with CommandProcessor

use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.

the class ExecuteStatementOperation method newExecuteStatementOperation.

public static ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync, long queryTimeout) throws HiveSQLException {
    String[] tokens = statement.trim().split("\\s+");
    CommandProcessor processor = null;
    try {
        processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf());
    } catch (SQLException e) {
        throw new HiveSQLException(e.getMessage(), e.getSQLState(), e);
    }
    if (processor == null) {
        // runAsync, queryTimeout makes sense only for a SQLOperation
        return new SQLOperation(parentSession, statement, confOverlay, runAsync, queryTimeout);
    }
    return new HiveCommandOperation(parentSession, statement, processor, confOverlay);
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SQLException(java.sql.SQLException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor)

Example 5 with CommandProcessor

use of org.apache.hadoop.hive.ql.processors.CommandProcessor in project hive by apache.

the class CliDriver method processCmd.

public int processCmd(String cmd) {
    CliSessionState ss = (CliSessionState) SessionState.get();
    ss.setLastCommand(cmd);
    ss.updateThreadName();
    // Flush the print stream, so it doesn't include output from the last command
    ss.err.flush();
    String cmd_trimmed = cmd.trim();
    String[] tokens = tokenizeCmd(cmd_trimmed);
    int ret = 0;
    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
        // if we have come this far - either the previous commands
        // are all successful or this is command line. in either case
        // this counts as a successful run
        ss.close();
        System.exit(0);
    } else if (tokens[0].equalsIgnoreCase("source")) {
        String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
        cmd_1 = new VariableSubstitution(new HiveVariableSource() {

            @Override
            public Map<String, String> getHiveVariable() {
                return SessionState.get().getHiveVariables();
            }
        }).substitute(ss.getConf(), cmd_1);
        File sourceFile = new File(cmd_1);
        if (!sourceFile.isFile()) {
            console.printError("File: " + cmd_1 + " is not a file.");
            ret = 1;
        } else {
            try {
                ret = processFile(cmd_1);
            } catch (IOException e) {
                console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
                ret = 1;
            }
        }
    } else if (cmd_trimmed.startsWith("!")) {
        String shell_cmd = cmd_trimmed.substring(1);
        shell_cmd = new VariableSubstitution(new HiveVariableSource() {

            @Override
            public Map<String, String> getHiveVariable() {
                return SessionState.get().getHiveVariables();
            }
        }).substitute(ss.getConf(), shell_cmd);
        // shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
        try {
            ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
            ret = executor.execute();
            if (ret != 0) {
                console.printError("Command failed with exit code = " + ret);
            }
        } catch (Exception e) {
            console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
            ret = 1;
        }
    } else {
        // local mode
        try {
            CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf);
            ret = processLocalCmd(cmd, proc, ss);
        } catch (SQLException e) {
            console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
        }
    }
    ss.resetThreadName();
    return ret;
}
Also used : SQLException(java.sql.SQLException) VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) IOException(java.io.IOException) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) FileNotFoundException(java.io.FileNotFoundException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) SQLException(java.sql.SQLException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) IOException(java.io.IOException) CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor) File(java.io.File) Map(java.util.Map) ShellCmdExecutor(org.apache.hadoop.hive.common.cli.ShellCmdExecutor)

Aggregations

CommandProcessor (org.apache.hadoop.hive.ql.processors.CommandProcessor)5 SQLException (java.sql.SQLException)3 FileNotFoundException (java.io.FileNotFoundException)2 IOException (java.io.IOException)2 File (java.io.File)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 Map (java.util.Map)1 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)1 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)1 ShellCmdExecutor (org.apache.hadoop.hive.common.cli.ShellCmdExecutor)1 HiveVariableSource (org.apache.hadoop.hive.conf.HiveVariableSource)1 VariableSubstitution (org.apache.hadoop.hive.conf.VariableSubstitution)1 CommandNeedRetryException (org.apache.hadoop.hive.ql.CommandNeedRetryException)1 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)1 ParseException (org.apache.hadoop.hive.ql.parse.ParseException)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)1 StringUtils.stringifyException (org.apache.hadoop.util.StringUtils.stringifyException)1 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)1 BuildException (org.apache.tools.ant.BuildException)1