use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestReOptimization method createDriver.
private static IDriver createDriver(String strategies) {
HiveConf conf = env_setup.getTestCtx().hiveConf;
conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ENABLED, true);
conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STRATEGIES, strategies);
conf.setBoolVar(ConfVars.HIVE_EXPLAIN_USER, true);
conf.set("zzz", "1");
conf.set("reexec.overlay.zzz", "2000");
//
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
HiveConf.setVar(conf, HiveConf.ConfVars.POSTEXECHOOKS, OperatorStatsReaderHook.class.getName());
SessionState.start(conf);
IDriver driver = DriverFactory.newDriver(conf);
return driver;
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestReOptimization method beforeClass.
@BeforeClass
public static void beforeClass() throws Exception {
IDriver driver = createDriver("");
dropTables(driver);
String[] cmds = { // @formatter:off
"create table tu(id_uv int,id_uw int,u int)", "create table tv(id_uv int,v int)", "create table tw(id_uw int,w int)", "insert into tu values (10,10,10),(1,1,1),(2,2,2),(3,3,3),(4,4,4),(5,5,5),(6,6,6)", "insert into tv values (10,10),(1,1),(2,2),(3,3)", "insert into tw values (10,10),(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(9,9)" // @formatter:on
};
for (String cmd : cmds) {
int ret = driver.run(cmd).getResponseCode();
assertEquals("Checking command success", 0, ret);
}
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestHiveHistory method testSimpleQuery.
/**
* Check history file output for this query.
*/
public void testSimpleQuery() {
new LineageInfo();
try {
// before any of the other core hive classes are loaded
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
HiveConf hconf = new HiveConf(SessionState.class);
hconf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true);
CliSessionState ss = new CliSessionState(hconf);
ss.in = System.in;
try {
ss.out = new PrintStream(System.out, true, "UTF-8");
ss.err = new PrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(3);
}
SessionState.start(ss);
String cmd = "select a.key+1 from src a";
IDriver d = DriverFactory.newDriver(conf);
int ret = d.run(cmd).getResponseCode();
if (ret != 0) {
fail("Failed");
}
HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get().getHiveHistory().getHistFileName());
Map<String, QueryInfo> jobInfoMap = hv.getJobInfoMap();
Map<String, TaskInfo> taskInfoMap = hv.getTaskInfoMap();
if (jobInfoMap.size() != 1) {
fail("jobInfo Map size not 1");
}
if (taskInfoMap.size() != 1) {
fail("jobInfo Map size not 1");
}
cmd = (String) jobInfoMap.keySet().toArray()[0];
QueryInfo ji = jobInfoMap.get(cmd);
if (!ji.hm.get(Keys.QUERY_NUM_TASKS.name()).equals("1")) {
fail("Wrong number of tasks");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed");
}
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class HCatDataCheckUtil method instantiateDriver.
public static IDriver instantiateDriver(MiniCluster cluster) {
HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class);
for (Entry e : cluster.getProperties().entrySet()) {
hiveConf.set(e.getKey().toString(), e.getValue().toString());
}
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
LOG.debug("Hive conf : {}", hiveConf.getAllProperties());
IDriver driver = DriverFactory.newDriver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
return driver;
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class CliDriver method processCmd.
public int processCmd(String cmd) {
CliSessionState ss = (CliSessionState) SessionState.get();
ss.setLastCommand(cmd);
ss.updateThreadName();
// Flush the print stream, so it doesn't include output from the last command
ss.err.flush();
String cmd_trimmed = HiveStringUtils.removeComments(cmd).trim();
String[] tokens = tokenizeCmd(cmd_trimmed);
int ret = 0;
if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
// if we have come this far - either the previous commands
// are all successful or this is command line. in either case
// this counts as a successful run
ss.close();
System.exit(0);
} else if (tokens[0].equalsIgnoreCase("source")) {
String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length());
cmd_1 = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), cmd_1);
File sourceFile = new File(cmd_1);
if (!sourceFile.isFile()) {
console.printError("File: " + cmd_1 + " is not a file.");
ret = 1;
} else {
try {
ret = processFile(cmd_1);
} catch (IOException e) {
console.printError("Failed processing file " + cmd_1 + " " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
}
} else if (cmd_trimmed.startsWith("!")) {
// for shell commands, use unstripped command
String shell_cmd = cmd.trim().substring(1);
shell_cmd = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), shell_cmd);
// shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'";
try {
ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err);
ret = executor.execute();
if (ret != 0) {
console.printError("Command failed with exit code = " + ret);
}
} catch (Exception e) {
console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e));
ret = 1;
}
} else {
// local mode
try {
try (CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf)) {
if (proc instanceof IDriver) {
// Let Driver strip comments using sql parser
ret = processLocalCmd(cmd, proc, ss);
} else {
ret = processLocalCmd(cmd_trimmed, proc, ss);
}
}
} catch (SQLException e) {
console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
ss.resetThreadName();
return ret;
}
Aggregations