Search in sources :

Example 1 with OptionsProcessor

use of org.apache.hadoop.hive.cli.OptionsProcessor in project SQLWindowing by hbutani.

the class WindowingHiveCliDriver method run.

public static int run(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
        return 1;
    }
    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    boolean logInitFailed = false;
    String logInitDetailMessage;
    try {
        logInitDetailMessage = LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {
        logInitFailed = true;
        logInitDetailMessage = e.getMessage();
    }
    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new PrintStream(System.out, true, "UTF-8");
        ss.info = new PrintStream(System.err, true, "UTF-8");
        ss.err = new CachingPrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        return 3;
    }
    if (!oproc.process_stage2(ss)) {
        return 2;
    }
    if (!ss.getIsSilent()) {
        if (logInitFailed) {
            System.err.println(logInitDetailMessage);
        } else {
            SessionState.getConsole().printInfo(logInitDetailMessage);
        }
    }
    // set all properties specified via command line
    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
        ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
    }
    SessionState.start(ss);
    // connect to Hive Server
    if (ss.getHost() != null) {
        ss.connect();
        if (ss.isRemoteMode()) {
            prompt = "[" + ss.getHost() + ':' + ss.getPort() + "] " + prompt;
            char[] spaces = new char[prompt.length()];
            Arrays.fill(spaces, ' ');
            prompt2 = new String(spaces);
        }
    }
    // CLI remote mode is a thin client: only load auxJars in local mode
    if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) {
        // hadoop-20 and above - we need to augment classpath using hiveconf
        // components
        // see also: code in ExecDriver.java
        ClassLoader loader = conf.getClassLoader();
        String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
        if (StringUtils.isNotBlank(auxJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        }
        conf.setClassLoader(loader);
        Thread.currentThread().setContextClassLoader(loader);
    }
    WindowingHiveCliDriver cli = new WindowingHiveCliDriver();
    cli.setHiveVariables(oproc.getHiveVariables());
    // use the specified database if specified
    cli.processSelectDatabase(ss);
    // Execute -i init files (always in silent mode)
    cli.processInitFiles(ss);
    cli.setupWindowing();
    if (ss.execString != null) {
        return cli.processLine(ss.execString);
    }
    try {
        if (ss.fileName != null) {
            return cli.processFile(ss.fileName);
        }
    } catch (FileNotFoundException e) {
        System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
        return 3;
    }
    ConsoleReader reader = new ConsoleReader();
    reader.setBellEnabled(false);
    // true)));
    for (Completor completor : getCommandCompletor()) {
        reader.addCompletor(completor);
    }
    String line;
    final String HISTORYFILE = ".hivehistory";
    String historyDirectory = System.getProperty("user.home");
    try {
        if ((new File(historyDirectory)).exists()) {
            String historyFile = historyDirectory + File.separator + HISTORYFILE;
            reader.setHistory(new History(new File(historyFile)));
        } else {
            System.err.println("WARNING: Directory for Hive history file: " + historyDirectory + " does not exist.   History will not be available during this session.");
        }
    } catch (Exception e) {
        System.err.println("WARNING: Encountered an error while trying to initialize Hive's " + "history file.  History will not be available during this session.");
        System.err.println(e.getMessage());
    }
    int ret = 0;
    String prefix = "";
    String curDB = getFormattedDb(conf, ss);
    String curPrompt = prompt + curDB;
    String dbSpaces = spacesForString(curDB);
    while ((line = reader.readLine(curPrompt + "> ")) != null) {
        if (!prefix.equals("")) {
            prefix += '\n';
        }
        if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
            line = prefix + line;
            ret = cli.processLine(line, true);
            prefix = "";
            curDB = getFormattedDb(conf, ss);
            curPrompt = prompt + curDB;
            dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
        } else {
            prefix = prefix + line;
            curPrompt = prompt2 + dbSpaces;
            continue;
        }
    }
    ss.close();
    return ret;
}
Also used : CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) SessionState(org.apache.hadoop.hive.ql.session.SessionState) PrintStream(java.io.PrintStream) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) ConsoleReader(jline.ConsoleReader) FileNotFoundException(java.io.FileNotFoundException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) History(jline.History) OptionsProcessor(org.apache.hadoop.hive.cli.OptionsProcessor) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) FileNotFoundException(java.io.FileNotFoundException) WindowingException(com.sap.hadoop.windowing.WindowingException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Completor(jline.Completor) Map(java.util.Map) File(java.io.File)

Example 2 with OptionsProcessor

use of org.apache.hadoop.hive.cli.OptionsProcessor in project hive by apache.

the class CliDriver method run.

public int run(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
        return 1;
    }
    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    boolean logInitFailed = false;
    String logInitDetailMessage;
    try {
        logInitDetailMessage = LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {
        logInitFailed = true;
        logInitDetailMessage = e.getMessage();
    }
    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new PrintStream(System.out, true, "UTF-8");
        ss.info = new PrintStream(System.err, true, "UTF-8");
        ss.err = new CachingPrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        return 3;
    }
    if (!oproc.process_stage2(ss)) {
        return 2;
    }
    if (!ss.getIsSilent()) {
        if (logInitFailed) {
            System.err.println(logInitDetailMessage);
        } else {
            SessionState.getConsole().printInfo(logInitDetailMessage);
        }
    }
    // set all properties specified via command line
    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
        ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
    }
    // read prompt configuration and substitute variables.
    prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
    prompt = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return SessionState.get().getHiveVariables();
        }
    }).substitute(conf, prompt);
    prompt2 = spacesForString(prompt);
    if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
        // Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
        // the session are needed, the corresponding getters and other methods will wait as needed.
        SessionState.beginStart(ss, console);
    } else {
        SessionState.start(ss);
    }
    ss.updateThreadName();
    // execute cli driver work
    try {
        return executeDriver(ss, conf, oproc);
    } finally {
        ss.resetThreadName();
        ss.close();
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) PrintStream(java.io.PrintStream) VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) UnsupportedEncodingException(java.io.UnsupportedEncodingException) OptionsProcessor(org.apache.hadoop.hive.cli.OptionsProcessor) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Map(java.util.Map)

Aggregations

PrintStream (java.io.PrintStream)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 Map (java.util.Map)2 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)2 OptionsProcessor (org.apache.hadoop.hive.cli.OptionsProcessor)2 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)2 CachingPrintStream (org.apache.hadoop.hive.common.io.CachingPrintStream)2 HiveConf (org.apache.hadoop.hive.conf.HiveConf)2 SessionState (org.apache.hadoop.hive.ql.session.SessionState)2 WindowingException (com.sap.hadoop.windowing.WindowingException)1 File (java.io.File)1 FileNotFoundException (java.io.FileNotFoundException)1 Completor (jline.Completor)1 ConsoleReader (jline.ConsoleReader)1 History (jline.History)1 HiveVariableSource (org.apache.hadoop.hive.conf.HiveVariableSource)1 VariableSubstitution (org.apache.hadoop.hive.conf.VariableSubstitution)1