use of io.prestosql.queryeditorui.protocol.JobSessionContext in project hetu-core by openlookeng.
the class ExecutionClient method runQuery.
public List<UUID> runQuery(final ExecutionRequest request, final String user, final Duration timeout, HttpServletRequest servletRequest) {
String query = request.getQuery();
JobSessionContext sessionContext = request.getSessionContext();
Map<String, String> properties = sessionContext != null && sessionContext.getProperties() != null ? sessionContext.getProperties() : ImmutableMap.of();
QueryRunner queryRunner = queryRunnerFactory.create(user, request.getDefaultConnector(), request.getDefaultSchema(), properties);
QueryExecutionAuthorizer authorizer = new QueryExecutionAuthorizer(user, request.getDefaultConnector(), request.getDefaultSchema());
// When multiple statements are submitted together, split them and execute in sequence.
List<String> subStatements = QUERY_SPLITTER.splitToList(query);
BlockingQueue<Job> jobs = new ArrayBlockingQueue<>(subStatements.size());
ImmutableList.Builder<UUID> results = ImmutableList.builder();
URI requestURI = URI.create(servletRequest.getRequestURL().toString());
for (String statement : subStatements) {
final UUID uuid = UUID.randomUUID();
Job job = new Job(user, statement, uuid, persistentJobOutputFactory.create(null, uuid), null, JobState.QUEUED, Collections.emptyList(), null, null, null);
results.add(job.getUuid());
jobs.offer(job);
}
scheduleExecution(timeout, queryRunner, authorizer, jobs, requestURI);
return results.build();
}
use of io.prestosql.queryeditorui.protocol.JobSessionContext in project hetu-core by openlookeng.
the class Execution method doExecute.
private Job doExecute() throws ExecutionFailureException {
final String userQuery = QUERY_SPLITTER.splitToList(getJob().getQuery()).get(0);
final JobOutputBuilder outputBuilder;
job.setQueryStats(createNoOpQueryStats());
try {
outputBuilder = outputBuilderFactory.forJob(job);
} catch (IOException e) {
throw new ExecutionFailureException(job, "Could not create output builder for job", e);
} catch (InvalidQueryException e) {
throw new ExecutionFailureException(job, e.getMessage(), e);
}
final Persistor persistor = persistorFactory.getPersistor(job, job.getOutput());
final String query = job.getOutput().processQuery(userQuery);
if (!persistor.canPersist(authorizer)) {
throw new ExecutionFailureException(job, "Not authorized to create tables", null);
}
final Set<Table> tables = new HashSet<>();
try {
tables.addAll(authorizer.tablesUsedByQuery(query));
} catch (ParsingException e) {
job.setError(new QueryError(e.getMessage(), null, -1, null, Optional.empty(), null, new ErrorLocation(e.getLineNumber(), e.getColumnNumber()), null));
throw new ExecutionFailureException(job, "Invalid query, could not parse", e);
}
if (!authorizer.isAuthorizedRead(tables)) {
job.setQueryStats(createNoOpQueryStats());
throw new ExecutionFailureException(job, "Cannot access tables", null);
}
JobSessionContext jobSessionContext = JobSessionContext.buildFromClient(queryRunner.getSession());
job.setSessionContext(jobSessionContext);
QueryClient queryClient = new QueryClient(queryRunner, timeout, query);
try {
queryClient.executeWith((client) -> {
if (client == null) {
return null;
}
QueryStatusInfo statusInfo = client.currentStatusInfo();
QueryData data = client.currentData();
List<Column> resultColumns = null;
JobState jobState = null;
QueryError queryError = null;
QueryStats queryStats = null;
if (isCancelled) {
throw new ExecutionFailureException(job, "Query was cancelled", null);
}
if (statusInfo.getError() != null) {
queryError = statusInfo.getError();
jobState = JobState.FAILED;
}
if ((statusInfo.getInfoUri() != null) && (jobState != JobState.FAILED)) {
BasicQueryInfo queryInfo = queryInfoClient.from(statusInfo.getInfoUri(), statusInfo.getId());
if (queryInfo != null) {
queryStats = queryInfo.getQueryStats();
}
}
if (statusInfo.getInfoUri() != null && job.getInfoUri() == null) {
URI infoUri = statusInfo.getInfoUri();
String path = infoUri.getPath();
path = path.substring(path.indexOf("query.html"));
infoUri = URI.create(path + "?" + infoUri.getQuery());
job.setInfoUri(infoUri);
}
if (statusInfo.getStats() != null) {
jobState = JobState.fromStatementState(statusInfo.getStats().getState());
}
try {
if (statusInfo.getColumns() != null) {
resultColumns = statusInfo.getColumns();
outputBuilder.addColumns(resultColumns);
}
if (data.getData() != null) {
List<List<Object>> resultsData = ImmutableList.copyOf(data.getData());
for (List<Object> row : resultsData) {
outputBuilder.addRow(row);
}
}
} catch (FileTooLargeException e) {
throw new ExecutionFailureException(job, "Output file exceeded maximum configured filesize", e);
}
rlUpdateJobInfo(tables, resultColumns, queryStats, jobState, queryError);
return null;
});
} catch (QueryTimeOutException e) {
throw new ExecutionFailureException(job, format("Query exceeded maximum execution time of %s minutes", Duration.millis(e.getElapsedMs()).getStandardMinutes()), e);
}
QueryStatusInfo finalResults = queryClient.finalResults();
if (finalResults != null && finalResults.getInfoUri() != null) {
BasicQueryInfo queryInfo = queryInfoClient.from(finalResults.getInfoUri(), finalResults.getId());
if (queryInfo != null) {
updateJobInfo(null, null, queryInfo.getQueryStats(), JobState.fromStatementState(finalResults.getStats().getState()), finalResults.getError());
}
}
if (job.getState() != JobState.FAILED) {
URI location = persistor.persist(outputBuilder, job);
if (location != null) {
job.getOutput().setLocation(location);
}
} else {
throw new ExecutionFailureException(job, null, null);
}
return getJob();
}
Aggregations