use of org.apache.hadoop.hive.ql.processors.SetProcessor in project hive by apache.
the class HCatCli method processCmd.
private static int processCmd(String cmd) {
SessionState ss = SessionState.get();
long start = System.currentTimeMillis();
cmd = cmd.trim();
String firstToken = cmd.split("\\s+")[0].trim();
if (firstToken.equalsIgnoreCase("set")) {
return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode();
} else if (firstToken.equalsIgnoreCase("dfs")) {
return new DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode();
}
HCatDriver driver = new HCatDriver();
int ret = driver.run(cmd).getResponseCode();
if (ret != 0) {
driver.close();
sysExit(ss, ret);
}
ArrayList<String> res = new ArrayList<String>();
try {
while (driver.getResults(res)) {
for (String r : res) {
ss.out.println(r);
}
res.clear();
}
} catch (IOException e) {
ss.err.println("Failed with exception " + e.getClass().getName() + ":" + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
} catch (CommandNeedRetryException e) {
ss.err.println("Failed with exception " + e.getClass().getName() + ":" + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
}
int cret = driver.close();
if (ret == 0) {
ret = cret;
}
long end = System.currentTimeMillis();
if (end > start) {
double timeTaken = (end - start) / 1000.0;
ss.err.println("Time taken: " + timeTaken + " seconds");
}
return ret;
}
Aggregations