use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.
the class SQLOperation method prepare.
/**
* Compile the query and extract metadata
* @param sqlOperationConf
* @throws HiveSQLException
*/
public void prepare(QueryState queryState) throws HiveSQLException {
setState(OperationState.RUNNING);
try {
driver = new Driver(queryState, getParentSession().getUserName());
// queryTimeout == 0 means no timeout
if (queryTimeout > 0) {
timeoutExecutor = new ScheduledThreadPoolExecutor(1);
Runnable timeoutTask = new Runnable() {
@Override
public void run() {
try {
LOG.info("Query timed out after: " + queryTimeout + " seconds. Cancelling the execution now.");
SQLOperation.this.cancel(OperationState.TIMEDOUT);
} catch (HiveSQLException e) {
LOG.error("Error cancelling the query after timeout: " + queryTimeout + " seconds", e);
} finally {
// Stop
timeoutExecutor.shutdown();
}
}
};
timeoutExecutor.schedule(timeoutTask, queryTimeout, TimeUnit.SECONDS);
}
sqlOpDisplay.setQueryDisplay(driver.getQueryDisplay());
// set the operation handle information in Driver, so that thrift API users
// can use the operation handle they receive, to lookup query information in
// Yarn ATS
String guid64 = Base64.encodeBase64URLSafeString(getHandle().getHandleIdentifier().toTHandleIdentifier().getGuid()).trim();
driver.setOperationId(guid64);
// In Hive server mode, we are not able to retry in the FetchTask
// case, when calling fetch queries since execute() has returned.
// For now, we disable the test attempts.
driver.setTryCount(Integer.MAX_VALUE);
response = driver.compileAndRespond(statement);
if (0 != response.getResponseCode()) {
throw toSQLException("Error while compiling statement", response);
}
mResultSchema = driver.getSchema();
// "explain" is an exception for now
if (driver.getPlan().getFetchTask() != null) {
//Schema has to be set
if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) {
throw new HiveSQLException("Error compiling query: Schema and FieldSchema " + "should be set when query plan has a FetchTask");
}
resultSchema = new TableSchema(mResultSchema);
setHasResultSet(true);
} else {
setHasResultSet(false);
}
// TODO explain should use a FetchTask for reading
for (Task<? extends Serializable> task : driver.getPlan().getRootTasks()) {
if (task.getClass() == ExplainTask.class) {
resultSchema = new TableSchema(mResultSchema);
setHasResultSet(true);
break;
}
}
} catch (HiveSQLException e) {
setState(OperationState.ERROR);
throw e;
} catch (Throwable e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
}
use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.
the class SQLOperation method getTaskStatus.
@Override
public String getTaskStatus() throws HiveSQLException {
if (driver != null) {
List<QueryDisplay.TaskDisplay> statuses = driver.getQueryDisplay().getTaskDisplays();
if (statuses != null) {
ByteArrayOutputStream out = null;
try {
ObjectMapper mapper = new ObjectMapper();
out = new ByteArrayOutputStream();
mapper.writeValue(out, statuses);
return out.toString("UTF-8");
} catch (JsonGenerationException e) {
throw new HiveSQLException(e);
} catch (JsonMappingException e) {
throw new HiveSQLException(e);
} catch (IOException e) {
throw new HiveSQLException(e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
throw new HiveSQLException(e);
}
}
}
}
}
// Driver not initialized
return null;
}
use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.
the class SQLOperation method runQuery.
private void runQuery() throws HiveSQLException {
try {
OperationState opState = getStatus().getState();
// Operation may have been cancelled by another thread
if (opState.isTerminal()) {
LOG.info("Not running the query. Operation is already in terminal state: " + opState + ", perhaps cancelled due to query timeout or by another thread.");
return;
}
// In Hive server mode, we are not able to retry in the FetchTask
// case, when calling fetch queries since execute() has returned.
// For now, we disable the test attempts.
driver.setTryCount(Integer.MAX_VALUE);
response = driver.run();
if (0 != response.getResponseCode()) {
throw toSQLException("Error while processing statement", response);
}
} catch (Throwable e) {
/**
* If the operation was cancelled by another thread, or the execution timed out, Driver#run
* may return a non-zero response code. We will simply return if the operation state is
* CANCELED, TIMEDOUT or CLOSED, otherwise throw an exception
*/
if ((getStatus().getState() == OperationState.CANCELED) || (getStatus().getState() == OperationState.TIMEDOUT) || (getStatus().getState() == OperationState.CLOSED)) {
LOG.warn("Ignore exception in terminal state", e);
return;
}
setState(OperationState.ERROR);
if (e instanceof HiveSQLException) {
throw (HiveSQLException) e;
} else {
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
}
setState(OperationState.FINISHED);
}
use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.
the class SQLOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.PENDING);
boolean runAsync = shouldRunAsync();
final boolean asyncPrepare = runAsync && HiveConf.getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_ASYNC_COMPILE);
if (!asyncPrepare) {
prepare(queryState);
}
if (!runAsync) {
runQuery();
} else {
// We'll pass ThreadLocals in the background thread from the foreground (handler) thread.
// 1) ThreadLocal Hive object needs to be set in background thread
// 2) The metastore client in Hive is associated with right user.
// 3) Current UGI will get used by metastore when metastore is in embedded mode
Runnable work = new BackgroundWork(getCurrentUGI(), parentSession.getSessionHive(), SessionState.getPerfLogger(), SessionState.get(), asyncPrepare);
try {
// This submit blocks if no background threads are available to run this operation
Future<?> backgroundHandle = getParentSession().submitBackgroundOperation(work);
setBackgroundHandle(backgroundHandle);
} catch (RejectedExecutionException rejected) {
setState(OperationState.ERROR);
throw new HiveSQLException("The background threadpool cannot accept" + " new task for execution, please retry the operation", rejected);
}
}
}
use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.
the class HiveSessionImpl method getTableTypes.
@Override
public OperationHandle getTableTypes() throws HiveSQLException {
acquire(true, true);
OperationManager operationManager = getOperationManager();
GetTableTypesOperation operation = operationManager.newGetTableTypesOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
addOpHandle(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true, true);
}
}
Aggregations