use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TestHiveHistory method testHiveHistoryConfigEnabled.
/**
* Check if HiveHistoryImpl class is returned when hive history is enabled
* @throws Exception
*/
public void testHiveHistoryConfigEnabled() throws Exception {
HiveConf conf = new HiveConf(SessionState.class);
conf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true);
SessionState ss = new CliSessionState(conf);
SessionState.start(ss);
HiveHistory hHistory = ss.getHiveHistory();
assertEquals("checking hive history class when history is enabled", hHistory.getClass(), HiveHistoryImpl.class);
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class QTestUtil method startSessionState.
private CliSessionState startSessionState(boolean canReuseSession) throws IOException {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
CliSessionState ss = createSessionState();
assert ss != null;
ss.in = System.in;
ss.out = System.out;
ss.err = System.out;
SessionState oldSs = SessionState.get();
if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
// Copy the tezSessionState from the old CliSessionState.
tezSessionState = oldSs.getTezSession();
ss.setTezSession(tezSessionState);
oldSs.setTezSession(null);
oldSs.close();
}
if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
sparkSession = oldSs.getSparkSession();
ss.setSparkSession(sparkSession);
oldSs.setSparkSession(null);
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
isSessionStateStarted = true;
conf.set("hive.execution.engine", execEngine);
return ss;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class VerifyCachingPrintStreamHook method run.
public void run(HookContext hookContext) {
SessionState ss = SessionState.get();
assert (ss.err instanceof CachingPrintStream);
if (hookContext.getHookType() == HookType.ON_FAILURE_HOOK) {
assert (ss.err instanceof CachingPrintStream);
ss.out.println("Begin cached logs.");
for (String output : ((CachingPrintStream) ss.err).getOutput()) {
ss.out.println(output);
}
ss.out.println("End cached logs.");
} else {
ss.err.println("TEST, this should only appear once in the log.");
}
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class AddResourceProcessor method run.
@Override
public CommandProcessorResponse run(String command) {
SessionState ss = SessionState.get();
command = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), command);
String[] tokens = command.split("\\s+");
SessionState.ResourceType t;
if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[0])) == null) {
console.printError("Usage: add [" + StringUtils.join(SessionState.ResourceType.values(), "|") + "] <value> [<value>]*");
return new CommandProcessorResponse(1);
}
CommandProcessorResponse authErrResp = CommandUtil.authorizeCommand(ss, HiveOperationType.ADD, Arrays.asList(tokens));
if (authErrResp != null) {
// there was an authorization issue
return authErrResp;
}
try {
ss.add_resources(t, Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
} catch (Exception e) {
return CommandProcessorResponse.create(e);
}
return new CommandProcessorResponse(0);
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class CommandProcessorFactory method getForHiveCommandInternal.
public static CommandProcessor getForHiveCommandInternal(String[] cmd, HiveConf conf, boolean testOnly) throws SQLException {
HiveCommand hiveCommand = HiveCommand.find(cmd, testOnly);
if (hiveCommand == null || isBlank(cmd[0])) {
return null;
}
if (conf == null) {
conf = new HiveConf();
}
Set<String> availableCommands = new HashSet<String>();
for (String availableCommand : conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST).split(",")) {
availableCommands.add(availableCommand.toLowerCase().trim());
}
if (!availableCommands.contains(cmd[0].trim().toLowerCase())) {
throw new SQLException("Insufficient privileges to execute " + cmd[0], "42000");
}
if (cmd.length > 1 && "reload".equalsIgnoreCase(cmd[0]) && "function".equalsIgnoreCase(cmd[1])) {
// special handling for SQL "reload function"
return null;
}
switch(hiveCommand) {
case SET:
return new SetProcessor();
case RESET:
return new ResetProcessor();
case DFS:
SessionState ss = SessionState.get();
return new DfsProcessor(ss.getConf());
case ADD:
return new AddResourceProcessor();
case LIST:
return new ListResourceProcessor();
case DELETE:
return new DeleteResourceProcessor();
case COMPILE:
return new CompileProcessor();
case RELOAD:
return new ReloadProcessor();
case CRYPTO:
try {
return new CryptoProcessor(SessionState.get().getHdfsEncryptionShim(), conf);
} catch (HiveException e) {
throw new SQLException("Fail to start the command processor due to the exception: ", e);
}
default:
throw new AssertionError("Unknown HiveCommand " + hiveCommand);
}
}
Aggregations