use of org.apache.hive.service.cli.OperationHandle in project hive by apache.
the class KillQueryImpl method killQuery.
@Override
public void killQuery(String queryId, String errMsg) throws HiveException {
try {
Operation operation = operationManager.getOperationByQueryId(queryId);
if (operation == null) {
LOG.info("Query not found: " + queryId);
} else {
OperationHandle handle = operation.getHandle();
operationManager.cancelOperation(handle, errMsg);
}
} catch (HiveSQLException e) {
throw new HiveException(e);
}
}
use of org.apache.hive.service.cli.OperationHandle in project hive by apache.
the class ThriftCliServiceTestWithCookie method testExecuteStatement.
/**
* Test synchronous query execution
* @throws Exception
*/
@Test
public void testExecuteStatement() throws Exception {
Map<String, String> opConf = new HashMap<String, String>();
// Open a new client session
SessionHandle sessHandle = client.openSession(USERNAME, PASSWORD, opConf);
// Session handle should not be null
assertNotNull("Session handle should not be null", sessHandle);
// Change lock manager to embedded mode
String queryString = "SET hive.lock.manager=" + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager";
client.executeStatement(sessHandle, queryString, opConf);
// Drop the table if it exists
queryString = "DROP TABLE IF EXISTS TEST_EXEC_THRIFT";
client.executeStatement(sessHandle, queryString, opConf);
// Create a test table
queryString = "CREATE TABLE TEST_EXEC_THRIFT(ID STRING)";
client.executeStatement(sessHandle, queryString, opConf);
// Execute another query
queryString = "SELECT ID+1 FROM TEST_EXEC_THRIFT";
OperationHandle opHandle = client.executeStatement(sessHandle, queryString, opConf);
assertNotNull(opHandle);
OperationStatus opStatus = client.getOperationStatus(opHandle, false);
assertNotNull(opStatus);
OperationState state = opStatus.getState();
// Expect query to be completed now
assertEquals("Query should be finished", OperationState.FINISHED, state);
// Cleanup
queryString = "DROP TABLE TEST_EXEC_THRIFT";
client.executeStatement(sessHandle, queryString, opConf);
client.closeSession(sessHandle);
}
use of org.apache.hive.service.cli.OperationHandle in project cdap by caskdata.
the class BaseHiveExploreService method getTypeInfo.
@Override
public QueryHandle getTypeInfo() throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getTypeInfo(sessionHandle);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", "");
LOG.trace("Retrieving type info");
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", ""));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of org.apache.hive.service.cli.OperationHandle in project cdap by caskdata.
the class BaseHiveExploreService method fetchNextResults.
@SuppressWarnings("unchecked")
private List<QueryResult> fetchNextResults(QueryHandle handle, int size) throws HiveSQLException, ExploreException, HandleNotFoundException {
startAndWait();
Lock nextLock = getActiveOperationInfo(handle).getNextLock();
nextLock.lock();
try {
// Fetch results from Hive
LOG.trace("Getting results for handle {}", handle);
OperationHandle operationHandle = getOperationHandle(handle);
if (operationHandle.hasResultSet()) {
return doFetchNextResults(operationHandle, FetchOrientation.FETCH_NEXT, size);
} else {
return Collections.emptyList();
}
} catch (Exception e) {
throw Throwables.propagate(e);
} finally {
nextLock.unlock();
}
}
use of org.apache.hive.service.cli.OperationHandle in project cdap by caskdata.
the class BaseHiveExploreService method deleteNamespace.
@Override
public QueryHandle deleteNamespace(NamespaceId namespace) throws ExploreException, SQLException {
startAndWait();
String customHiveDatabase;
try {
customHiveDatabase = namespaceQueryAdmin.get(namespace).getConfig().getHiveDatabase();
} catch (Exception e) {
throw new ExploreException(String.format("Failed to get namespace meta for the namespace %s", namespace));
}
if (Strings.isNullOrEmpty(customHiveDatabase)) {
// no custom hive database was given for this namespace so we need to delete it
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
String database = getHiveDatabase(namespace.getNamespace());
try {
sessionHandle = openHiveSession(sessionConf);
String statement = String.format("DROP DATABASE IF EXISTS %s", database);
operationHandle = executeAsync(sessionHandle, statement);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, statement, database);
LOG.info("Deleting database {} with handle {}", database, handle);
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
} else {
// a custom hive database was provided for this namespace do we don't need to delete it.
LOG.debug("Custom hive database {}. Skipping delete.", customHiveDatabase, namespace);
return QueryHandle.NO_OP;
}
}
Aggregations