use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.
the class ColumnStatsSemanticAnalyzer method genRewrittenQuery.
private String genRewrittenQuery(List<String> colNames, HiveConf conf, Map<String, String> partSpec, boolean isPartitionStats) throws SemanticException {
StringBuilder rewrittenQueryBuilder = new StringBuilder("select ");
for (int i = 0; i < colNames.size(); i++) {
if (i > 0) {
rewrittenQueryBuilder.append(" , ");
}
String func = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_STATS_NDV_ALGO).toLowerCase();
rewrittenQueryBuilder.append("compute_stats(`");
rewrittenQueryBuilder.append(escapeBackTicks(colNames.get(i)));
rewrittenQueryBuilder.append("`, '" + func + "'");
if ("fm".equals(func)) {
int numBitVectors = 0;
try {
numBitVectors = HiveStatsUtils.getNumBitVectorsForNDVEstimation(conf);
} catch (Exception e) {
throw new SemanticException(e.getMessage());
}
rewrittenQueryBuilder.append(", " + numBitVectors);
}
rewrittenQueryBuilder.append(')');
}
if (isPartitionStats) {
for (FieldSchema fs : tbl.getPartCols()) {
rewrittenQueryBuilder.append(" , `" + fs.getName() + "`");
}
}
rewrittenQueryBuilder.append(" from `");
rewrittenQueryBuilder.append(tbl.getDbName());
rewrittenQueryBuilder.append("`.");
rewrittenQueryBuilder.append("`" + tbl.getTableName() + "`");
isRewritten = true;
// query
if (isPartitionStats) {
rewrittenQueryBuilder.append(genPartitionClause(partSpec));
}
String rewrittenQuery = rewrittenQueryBuilder.toString();
rewrittenQuery = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, rewrittenQuery);
return rewrittenQuery;
}
use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.
the class ColumnStatsSemanticAnalyzer method genRewrittenQuery.
private String genRewrittenQuery(List<String> colNames, int numBitVectors, Map<String, String> partSpec, boolean isPartitionStats) throws SemanticException {
StringBuilder rewrittenQueryBuilder = new StringBuilder("select ");
String rewrittenQuery;
for (int i = 0; i < colNames.size(); i++) {
if (i > 0) {
rewrittenQueryBuilder.append(" , ");
}
rewrittenQueryBuilder.append("compute_stats(`");
rewrittenQueryBuilder.append(colNames.get(i));
rewrittenQueryBuilder.append("` , ");
rewrittenQueryBuilder.append(numBitVectors);
rewrittenQueryBuilder.append(" )");
}
if (isPartitionStats) {
for (FieldSchema fs : tbl.getPartCols()) {
rewrittenQueryBuilder.append(" , `" + fs.getName() + "`");
}
}
rewrittenQueryBuilder.append(" from `");
rewrittenQueryBuilder.append(tbl.getDbName());
rewrittenQueryBuilder.append("`.");
rewrittenQueryBuilder.append("`" + tbl.getTableName() + "`");
isRewritten = true;
// query
if (isPartitionStats) {
rewrittenQueryBuilder.append(genPartitionClause(partSpec));
}
rewrittenQuery = rewrittenQueryBuilder.toString();
rewrittenQuery = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, rewrittenQuery);
return rewrittenQuery;
}
use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.
the class AbstractCoreBlobstoreCliDriver method setupUniqueTestPath.
/**
* Generates a unique test path for this particular CliDriver in the following form:
* ${test.blobstore.path}/CoreBlobstore[Negative]CliDriver/20160101.053046.332-{random number 000-999}
* 20160101.053046.332 represents the current datetime:
* {year}{month}{day}.{hour}{minute}{second}.{millisecond}
* Random integer 000-999 included to avoid collisions when two test runs are started at the same millisecond with
* the same ${test.blobstore.path} (possible if test runs are controlled by an automated system)
*/
private void setupUniqueTestPath() {
String testBlobstorePath = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return null;
}
}).substitute(new HiveConf(), qt.getConf().get(HCONF_TEST_BLOBSTORE_PATH));
testBlobstorePath = HiveTestEnvSetup.ensurePathEndsInSlash(testBlobstorePath);
// name of child class
testBlobstorePath += HiveTestEnvSetup.ensurePathEndsInSlash(this.getClass().getSimpleName());
String uid = new SimpleDateFormat("yyyyMMdd.HHmmss.SSS").format(Calendar.getInstance().getTime()) + "-" + String.format("%03d", (int) (Math.random() * 999));
testBlobstorePathUnique = testBlobstorePath + uid;
qt.getQOutProcessor().addPatternWithMaskComment(testBlobstorePathUnique, String.format("### %s ###", HCONF_TEST_BLOBSTORE_PATH));
}
use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.
the class CliDriver method run.
public int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
boolean logInitFailed = false;
String logInitDetailMessage;
try {
logInitDetailMessage = LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
logInitFailed = true;
logInitDetailMessage = e.getMessage();
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new PrintStream(System.out, true, "UTF-8");
ss.info = new PrintStream(System.err, true, "UTF-8");
ss.err = new CachingPrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
return 3;
}
if (!oproc.process_stage2(ss)) {
return 2;
}
if (!ss.getIsSilent()) {
if (logInitFailed) {
System.err.println(logInitDetailMessage);
} else {
SessionState.getConsole().printInfo(logInitDetailMessage);
}
}
// set all properties specified via command line
HiveConf conf = ss.getConf();
for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
conf.set((String) item.getKey(), (String) item.getValue());
ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
}
// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, prompt);
prompt2 = spacesForString(prompt);
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC)) {
// Start the session in a fire-and-forget manner. When the asynchronously initialized parts of
// the session are needed, the corresponding getters and other methods will wait as needed.
SessionState.beginStart(ss, console);
} else {
SessionState.start(ss);
}
ss.updateThreadName();
// Create views registry
HiveMaterializedViewsRegistry.get().init();
// execute cli driver work
try {
return executeDriver(ss, conf, oproc);
} finally {
ss.resetThreadName();
ss.close();
}
}
use of org.apache.hadoop.hive.conf.HiveVariableSource in project hive by apache.
the class CliDriver method processCmd.
public int processCmd(String cmd) {
CliSessionState ss = (CliSessionState) SessionState.get();
ss.setLastCommand(cmd);
ss.updateThreadName();
// Flush the print stream, so it doesn't include output from the last command
ss.err.flush();
String cmd_trimmed = HiveStringUtils.removeComments(cmd).trim();
String[] tokens = tokenizeCmd(cmd_trimmed);
int ret = 0;
if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
// if we have come this far - either the previous commands
// are all successful or this is command line. in either case
// this counts as a successful run
ss.close();
System.exit(0);
} else if (tokens[0].equalsIgnoreCase("source")) {
String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
cmd_1 = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), cmd_1);
File sourceFile = new File(cmd_1);
if (!sourceFile.isFile()) {
console.printError("File: " + cmd_1 + " is not a file.");
ret = 1;
} else {
try {
ret = processFile(cmd_1);
} catch (IOException e) {
console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
}
} else if (cmd_trimmed.startsWith("!")) {
// for shell commands, use unstripped command
String shell_cmd = cmd.trim().substring(1);
shell_cmd = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), shell_cmd);
// shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
try {
ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
ret = executor.execute();
if (ret != 0) {
console.printError("Command failed with exit code = " + ret);
}
} catch (Exception e) {
console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
} else {
// local mode
try {
try (CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf)) {
if (proc instanceof IDriver) {
// Let Driver strip comments using sql parser
ret = processLocalCmd(cmd, proc, ss);
} else {
ret = processLocalCmd(cmd_trimmed, proc, ss);
}
}
} catch (SQLException e) {
console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
ss.resetThreadName();
return ret;
}
Aggregations