Search in sources :

Example 6 with HiveVariableSource

use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.

the class ColumnStatsSemanticAnalyzer method genRewrittenQuery.

private String genRewrittenQuery(List<String> colNames, HiveConf conf, Map<String, String> partSpec, boolean isPartitionStats) throws SemanticException {
    StringBuilder rewrittenQueryBuilder = new StringBuilder("select ");
    for (int i = 0; i < colNames.size(); i++) {
        if (i > 0) {
            rewrittenQueryBuilder.append(" , ");
        }
        String func = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_STATS_NDV_ALGO).toLowerCase();
        rewrittenQueryBuilder.append("compute_stats(`");
        rewrittenQueryBuilder.append(escapeBackTicks(colNames.get(i)));
        rewrittenQueryBuilder.append("`, '" + func + "'");
        if ("fm".equals(func)) {
            int numBitVectors = 0;
            try {
                numBitVectors = HiveStatsUtils.getNumBitVectorsForNDVEstimation(conf);
            } catch (Exception e) {
                throw new SemanticException(e.getMessage());
            }
            rewrittenQueryBuilder.append(", " + numBitVectors);
        }
        rewrittenQueryBuilder.append(')');
    }
    if (isPartitionStats) {
        for (FieldSchema fs : tbl.getPartCols()) {
            rewrittenQueryBuilder.append(" , `" + fs.getName() + "`");
        }
    }
    rewrittenQueryBuilder.append(" from `");
    rewrittenQueryBuilder.append(tbl.getDbName());
    rewrittenQueryBuilder.append("`.");
    rewrittenQueryBuilder.append("`" + tbl.getTableName() + "`");
    isRewritten = true;
    // query
    if (isPartitionStats) {
        rewrittenQueryBuilder.append(genPartitionClause(partSpec));
    }
    String rewrittenQuery = rewrittenQueryBuilder.toString();
    rewrittenQuery = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return SessionState.get().getHiveVariables();
        }
    }).substitute(conf, rewrittenQuery);
    return rewrittenQuery;
}
Also used : VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) Map(java.util.Map) IOException(java.io.IOException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 7 with HiveVariableSource

use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.

the class ColumnStatsSemanticAnalyzer method genRewrittenQuery.

private String genRewrittenQuery(List<String> colNames, int numBitVectors, Map<String, String> partSpec, boolean isPartitionStats) throws SemanticException {
    StringBuilder rewrittenQueryBuilder = new StringBuilder("select ");
    String rewrittenQuery;
    for (int i = 0; i < colNames.size(); i++) {
        if (i > 0) {
            rewrittenQueryBuilder.append(" , ");
        }
        rewrittenQueryBuilder.append("compute_stats(`");
        rewrittenQueryBuilder.append(colNames.get(i));
        rewrittenQueryBuilder.append("` , ");
        rewrittenQueryBuilder.append(numBitVectors);
        rewrittenQueryBuilder.append(" )");
    }
    if (isPartitionStats) {
        for (FieldSchema fs : tbl.getPartCols()) {
            rewrittenQueryBuilder.append(" , `" + fs.getName() + "`");
        }
    }
    rewrittenQueryBuilder.append(" from `");
    rewrittenQueryBuilder.append(tbl.getDbName());
    rewrittenQueryBuilder.append("`.");
    rewrittenQueryBuilder.append("`" + tbl.getTableName() + "`");
    isRewritten = true;
    // query
    if (isPartitionStats) {
        rewrittenQueryBuilder.append(genPartitionClause(partSpec));
    }
    rewrittenQuery = rewrittenQueryBuilder.toString();
    rewrittenQuery = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return SessionState.get().getHiveVariables();
        }
    }).substitute(conf, rewrittenQuery);
    return rewrittenQuery;
}
Also used : VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) Map(java.util.Map)

Example 8 with HiveVariableSource

use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.

the class AbstractCoreBlobstoreCliDriver method setupUniqueTestPath.

/**
 * Generates a unique test path for this particular CliDriver in the following form:
 *   ${test.blobstore.path}/CoreBlobstore[Negative]CliDriver/20160101.053046.332-{random number 000-999}
 * 20160101.053046.332 represents the current datetime:
 *   {year}{month}{day}.{hour}{minute}{second}.{millisecond}
 * Random integer 000-999 included to avoid collisions when two test runs are started at the same millisecond with
 *  the same ${test.blobstore.path} (possible if test runs are controlled by an automated system)
 */
private void setupUniqueTestPath() {
    String testBlobstorePath = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return null;
        }
    }).substitute(new HiveConf(), qt.getConf().get(HCONF_TEST_BLOBSTORE_PATH));
    testBlobstorePath = HiveTestEnvSetup.ensurePathEndsInSlash(testBlobstorePath);
    // name of child class
    testBlobstorePath += HiveTestEnvSetup.ensurePathEndsInSlash(this.getClass().getSimpleName());
    String uid = new SimpleDateFormat("yyyyMMdd.HHmmss.SSS").format(Calendar.getInstance().getTime()) + "-" + String.format("%03d", (int) (Math.random() * 999));
    testBlobstorePathUnique = testBlobstorePath + uid;
    qt.getQOutProcessor().addPatternWithMaskComment(testBlobstorePathUnique, String.format("### %s ###", HCONF_TEST_BLOBSTORE_PATH));
}
Also used : VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) HiveConf(org.apache.hadoop.hive.conf.HiveConf) SimpleDateFormat(java.text.SimpleDateFormat)

Example 9 with HiveVariableSource

use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.

the class CliDriver method run.

public int run(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
        return 1;
    }
    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    boolean logInitFailed = false;
    String logInitDetailMessage;
    try {
        logInitDetailMessage = LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {
        logInitFailed = true;
        logInitDetailMessage = e.getMessage();
    }
    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new PrintStream(System.out, true, "UTF-8");
        ss.info = new PrintStream(System.err, true, "UTF-8");
        ss.err = new CachingPrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        return 3;
    }
    if (!oproc.process_stage2(ss)) {
        return 2;
    }
    if (!ss.getIsSilent()) {
        if (logInitFailed) {
            System.err.println(logInitDetailMessage);
        } else {
            SessionState.getConsole().printInfo(logInitDetailMessage);
        }
    }
    // set all properties specified via command line
    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
        ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
    }
    // read prompt configuration and substitute variables.
    prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
    prompt = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return SessionState.get().getHiveVariables();
        }
    }).substitute(conf, prompt);
    prompt2 = spacesForString(prompt);
    if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
        // Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
        // the session are needed, the corresponding getters and other methods will wait as needed.
        SessionState.beginStart(ss, console);
    } else {
        SessionState.start(ss);
    }
    ss.updateThreadName();
    // Create views registry
    HiveMaterializedViewsRegistry.get().init();
    // execute cli driver work
    try {
        return executeDriver(ss, conf, oproc);
    } finally {
        ss.resetThreadName();
        ss.close();
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) PrintStream(java.io.PrintStream) VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) UnsupportedEncodingException(java.io.UnsupportedEncodingException) OptionsProcessor(org.apache.hadoop.hive.cli.OptionsProcessor) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Map(java.util.Map)

Example 10 with HiveVariableSource

use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.

the class CliDriver method processCmd.

public int processCmd(String cmd) {
    CliSessionState ss = (CliSessionState) SessionState.get();
    ss.setLastCommand(cmd);
    ss.updateThreadName();
    // Flush the print stream, so it doesn't include output from the last command
    ss.err.flush();
    String cmd_trimmed = HiveStringUtils.removeComments(cmd).trim();
    String[] tokens = tokenizeCmd(cmd_trimmed);
    int ret = 0;
    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
        // if we have come this far - either the previous commands
        // are all successful or this is command line. in either case
        // this counts as a successful run
        ss.close();
        System.exit(0);
    } else if (tokens[0].equalsIgnoreCase("source")) {
        String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
        cmd_1 = new VariableSubstitution(new HiveVariableSource() {

            @Override
            public Map<String, String> getHiveVariable() {
                return SessionState.get().getHiveVariables();
            }
        }).substitute(ss.getConf(), cmd_1);
        File sourceFile = new File(cmd_1);
        if (!sourceFile.isFile()) {
            console.printError("File: " + cmd_1 + " is not a file.");
            ret = 1;
        } else {
            try {
                ret = processFile(cmd_1);
            } catch (IOException e) {
                console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
                ret = 1;
            }
        }
    } else if (cmd_trimmed.startsWith("!")) {
        // for shell commands, use unstripped command
        String shell_cmd = cmd.trim().substring(1);
        shell_cmd = new VariableSubstitution(new HiveVariableSource() {

            @Override
            public Map<String, String> getHiveVariable() {
                return SessionState.get().getHiveVariables();
            }
        }).substitute(ss.getConf(), shell_cmd);
        // shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
        try {
            ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
            ret = executor.execute();
            if (ret != 0) {
                console.printError("Command failed with exit code = " + ret);
            }
        } catch (Exception e) {
            console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
            ret = 1;
        }
    } else {
        // local mode
        try {
            try (CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf)) {
                if (proc instanceof IDriver) {
                    // Let Driver strip comments using sql parser
                    ret = processLocalCmd(cmd, proc, ss);
                } else {
                    ret = processLocalCmd(cmd_trimmed, proc, ss);
                }
            }
        } catch (SQLException e) {
            console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    ss.resetThreadName();
    return ret;
}
Also used : SQLException(java.sql.SQLException) VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) IOException(java.io.IOException) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) FileNotFoundException(java.io.FileNotFoundException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) SQLException(java.sql.SQLException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) IOException(java.io.IOException) CommandProcessor(org.apache.hadoop.hive.ql.processors.CommandProcessor) IDriver(org.apache.hadoop.hive.ql.IDriver) File(java.io.File) Map(java.util.Map) ShellCmdExecutor(org.apache.hadoop.hive.common.cli.ShellCmdExecutor)

Aggregations

HiveVariableSource (org.apache.hadoop.hive.conf.HiveVariableSource)14 VariableSubstitution (org.apache.hadoop.hive.conf.VariableSubstitution)14 Map (java.util.Map)10 SessionState (org.apache.hadoop.hive.ql.session.SessionState)5 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 IOException (java.io.IOException)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 MetadataTypedColumnsetSerDe.defaultNullString (org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString)3 PrintStream (java.io.PrintStream)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)2 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 File (java.io.File)1 FileNotFoundException (java.io.FileNotFoundException)1 SQLException (java.sql.SQLException)1 SimpleDateFormat (java.text.SimpleDateFormat)1 HashMap (java.util.HashMap)1