use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TestHiveAuthorizerShowFilters method beforeTest.
@BeforeClass
public static void beforeTest() throws Exception {
conf = new HiveConf();
// Turn on mocked authorization
conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName());
conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
UtilsForTest.setNewDerbyDbLocation(conf, TestHiveAuthorizerShowFilters.class.getSimpleName());
SessionState ss = SessionState.start(conf);
ss.applyAuthorizationPolicy();
driver = DriverFactory.newDriver(conf);
runCmd("create table " + tableName1 + " (i int, j int, k string) partitioned by (city string, `date` string) ");
runCmd("create table " + tableName2 + "(i int)");
runCmd("create database " + dbName1);
runCmd("create database " + dbName2);
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class QTestUtil method executeDiffCommand.
private static QTestProcessExecResult executeDiffCommand(String inFileName, String outFileName, boolean ignoreWhiteSpace, boolean sortResults) throws Exception {
QTestProcessExecResult result;
if (sortResults) {
// sort will try to open the output file in write mode on windows. We need to
// close it first.
SessionState ss = SessionState.get();
if (ss != null && ss.out != null && ss.out != System.out) {
ss.out.close();
}
String inSorted = inFileName + SORT_SUFFIX;
String outSorted = outFileName + SORT_SUFFIX;
sortFiles(inFileName, inSorted);
sortFiles(outFileName, outSorted);
inFileName = inSorted;
outFileName = outSorted;
}
ArrayList<String> diffCommandArgs = new ArrayList<String>();
diffCommandArgs.add("diff");
// Text file comparison
diffCommandArgs.add("-a");
// Ignore changes in the amount of white space
if (ignoreWhiteSpace) {
diffCommandArgs.add("-b");
}
// Add files to compare to the arguments list
diffCommandArgs.add(getQuotedString(inFileName));
diffCommandArgs.add(getQuotedString(outFileName));
result = executeCmd(diffCommandArgs);
if (sortResults) {
new File(inFileName).delete();
new File(outFileName).delete();
}
return result;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class QTestUtil method startSessionState.
private CliSessionState startSessionState(boolean canReuseSession) throws IOException {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
ss.out = System.out;
ss.err = System.out;
SessionState oldSs = SessionState.get();
if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
// Copy the tezSessionState from the old CliSessionState.
TezSessionState tezSessionState = oldSs.getTezSession();
ss.setTezSession(tezSessionState);
oldSs.setTezSession(null);
oldSs.close();
}
if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
sparkSession = oldSs.getSparkSession();
ss.setSparkSession(sparkSession);
oldSs.setSparkSession(null);
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
if (oldSs != null) {
oldSs.close();
}
SessionState.start(ss);
isSessionStateStarted = true;
conf.set("hive.execution.engine", execEngine);
return ss;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class HiveCommandOperation method readResults.
/**
* Reads the temporary results for non-Hive (non-Driver) commands to the
* resulting List of strings.
* @param nLines number of lines read at once. If it is <= 0, then read all lines.
*/
private List<String> readResults(int nLines) throws HiveSQLException {
if (resultReader == null) {
SessionState sessionState = getParentSession().getSessionState();
File tmp = sessionState.getTmpOutputFile();
try {
resultReader = new BufferedReader(new FileReader(tmp));
} catch (FileNotFoundException e) {
LOG.error("File " + tmp + " not found. ", e);
throw new HiveSQLException(e);
}
}
List<String> results = new ArrayList<String>();
for (int i = 0; i < nLines || nLines <= 0; ++i) {
try {
String line = resultReader.readLine();
if (line == null) {
// reached the end of the result file
break;
} else {
results.add(line);
}
} catch (IOException e) {
LOG.error("Reading temp results encountered an exception: ", e);
throw new HiveSQLException(e);
}
}
return results;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class SQLOperation method cleanup.
private synchronized void cleanup(OperationState state) throws HiveSQLException {
setState(state);
// a cancel request is sent.
if (shouldRunAsync() && state != OperationState.CANCELED && state != OperationState.TIMEDOUT) {
Future<?> backgroundHandle = getBackgroundHandle();
if (backgroundHandle != null) {
boolean success = backgroundHandle.cancel(true);
String queryId = queryState.getQueryId();
if (success) {
LOG.info("The running operation has been successfully interrupted: " + queryId);
} else if (state == OperationState.CANCELED) {
LOG.info("The running operation could not be cancelled, typically because it has already completed normally: " + queryId);
}
}
}
if (driver != null) {
driver.close();
driver.destroy();
}
driver = null;
SessionState ss = SessionState.get();
if (ss == null) {
LOG.warn("Operation seems to be in invalid state, SessionState is null");
} else {
ss.deleteTmpOutputFile();
ss.deleteTmpErrOutputFile();
}
// Shutdown the timeout thread if any, while closing this operation
if ((timeoutExecutor != null) && (state != OperationState.TIMEDOUT) && (state.isTerminal())) {
timeoutExecutor.shutdownNow();
}
}
Aggregations