use of io.prestosql.client.ErrorLocation in project hetu-core by openlookeng.
the class CubeConsole method createCubeCommand.
/**
* Process the Create Cube Query
*
* @param queryRunner queryRunner
* @param outputFormat outputFormat
* @param schemaChanged schemaChanged
* @param usePager usePager
* @param schemaChanged schemaChanged
* @param showProgress showProgress
* @param terminal terminal
* @param out out
* @param errorChannel errorChannel
* @return boolean after processing the create cube query command.
*/
public boolean createCubeCommand(String query, QueryRunner queryRunner, ClientOptions.OutputFormat outputFormat, Runnable schemaChanged, boolean usePager, boolean showProgress, Terminal terminal, PrintStream out, PrintStream errorChannel) {
boolean success = true;
SqlParser parser = new SqlParser();
QualifiedName cubeName = null;
try {
CreateCube createCube = (CreateCube) parser.createStatement(query, new ParsingOptions(ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE));
cubeName = createCube.getCubeName();
QualifiedName sourceTableName = createCube.getSourceTableName();
String whereClause = createCube.getWhere().get().toString();
Set<FunctionCall> aggregations = createCube.getAggregations();
List<Identifier> groupingSet = createCube.getGroupingSet();
List<Property> properties = createCube.getProperties();
boolean notExists = createCube.isNotExists();
CreateCube modifiedCreateCube = new CreateCube(cubeName, sourceTableName, groupingSet, aggregations, notExists, properties, Optional.empty(), createCube.getSourceFilter().orElse(null));
String queryCreateCube = SqlFormatter.formatSql(modifiedCreateCube, Optional.empty());
if (!console.runQuery(queryRunner, queryCreateCube, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
// we check whether the create cube expression can be processed
if (isSupportedExpression(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
if (createCube.getWhere().get() instanceof BetweenPredicate) {
// we process the between predicate in the create cube query where clause
success = processBetweenPredicate(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel, parser);
}
if (createCube.getWhere().get() instanceof ComparisonExpression) {
// we process the comparison expression in the create cube query where clause
success = processComparisonExpression(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel, parser);
}
} else {
// if we donot support processing the create cube query with multiple inserts, then only a single insert is run internally.
String queryInsert = String.format(INSERT_INTO_CUBE_STRING, cubeName, whereClause);
success = console.runQuery(queryRunner, queryInsert, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
if (!success) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
} catch (ParsingException e) {
if (cubeName != null) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
System.out.println(e.getMessage());
Query.renderErrorLocation(query, new ErrorLocation(e.getLineNumber(), e.getColumnNumber()), errorChannel);
success = false;
} catch (Exception e) {
if (cubeName != null) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
// Add blank line after progress bar
System.out.println();
System.out.println(e.getMessage());
success = false;
}
return success;
}
use of io.prestosql.client.ErrorLocation in project hetu-core by openlookeng.
the class Execution method doExecute.
private Job doExecute() throws ExecutionFailureException {
final String userQuery = QUERY_SPLITTER.splitToList(getJob().getQuery()).get(0);
final JobOutputBuilder outputBuilder;
job.setQueryStats(createNoOpQueryStats());
try {
outputBuilder = outputBuilderFactory.forJob(job);
} catch (IOException e) {
throw new ExecutionFailureException(job, "Could not create output builder for job", e);
} catch (InvalidQueryException e) {
throw new ExecutionFailureException(job, e.getMessage(), e);
}
final Persistor persistor = persistorFactory.getPersistor(job, job.getOutput());
final String query = job.getOutput().processQuery(userQuery);
if (!persistor.canPersist(authorizer)) {
throw new ExecutionFailureException(job, "Not authorized to create tables", null);
}
final Set<Table> tables = new HashSet<>();
try {
tables.addAll(authorizer.tablesUsedByQuery(query));
} catch (ParsingException e) {
job.setError(new QueryError(e.getMessage(), null, -1, null, Optional.empty(), null, new ErrorLocation(e.getLineNumber(), e.getColumnNumber()), null));
throw new ExecutionFailureException(job, "Invalid query, could not parse", e);
}
if (!authorizer.isAuthorizedRead(tables)) {
job.setQueryStats(createNoOpQueryStats());
throw new ExecutionFailureException(job, "Cannot access tables", null);
}
JobSessionContext jobSessionContext = JobSessionContext.buildFromClient(queryRunner.getSession());
job.setSessionContext(jobSessionContext);
QueryClient queryClient = new QueryClient(queryRunner, timeout, query);
try {
queryClient.executeWith((client) -> {
if (client == null) {
return null;
}
QueryStatusInfo statusInfo = client.currentStatusInfo();
QueryData data = client.currentData();
List<Column> resultColumns = null;
JobState jobState = null;
QueryError queryError = null;
QueryStats queryStats = null;
if (isCancelled) {
throw new ExecutionFailureException(job, "Query was cancelled", null);
}
if (statusInfo.getError() != null) {
queryError = statusInfo.getError();
jobState = JobState.FAILED;
}
if ((statusInfo.getInfoUri() != null) && (jobState != JobState.FAILED)) {
BasicQueryInfo queryInfo = queryInfoClient.from(statusInfo.getInfoUri(), statusInfo.getId());
if (queryInfo != null) {
queryStats = queryInfo.getQueryStats();
}
}
if (statusInfo.getInfoUri() != null && job.getInfoUri() == null) {
URI infoUri = statusInfo.getInfoUri();
String path = infoUri.getPath();
path = path.substring(path.indexOf("query.html"));
infoUri = URI.create(path + "?" + infoUri.getQuery());
job.setInfoUri(infoUri);
}
if (statusInfo.getStats() != null) {
jobState = JobState.fromStatementState(statusInfo.getStats().getState());
}
try {
if (statusInfo.getColumns() != null) {
resultColumns = statusInfo.getColumns();
outputBuilder.addColumns(resultColumns);
}
if (data.getData() != null) {
List<List<Object>> resultsData = ImmutableList.copyOf(data.getData());
for (List<Object> row : resultsData) {
outputBuilder.addRow(row);
}
}
} catch (FileTooLargeException e) {
throw new ExecutionFailureException(job, "Output file exceeded maximum configured filesize", e);
}
rlUpdateJobInfo(tables, resultColumns, queryStats, jobState, queryError);
return null;
});
} catch (QueryTimeOutException e) {
throw new ExecutionFailureException(job, format("Query exceeded maximum execution time of %s minutes", Duration.millis(e.getElapsedMs()).getStandardMinutes()), e);
}
QueryStatusInfo finalResults = queryClient.finalResults();
if (finalResults != null && finalResults.getInfoUri() != null) {
BasicQueryInfo queryInfo = queryInfoClient.from(finalResults.getInfoUri(), finalResults.getId());
if (queryInfo != null) {
updateJobInfo(null, null, queryInfo.getQueryStats(), JobState.fromStatementState(finalResults.getStats().getState()), finalResults.getError());
}
}
if (job.getState() != JobState.FAILED) {
URI location = persistor.persist(outputBuilder, job);
if (location != null) {
job.getOutput().setLocation(location);
}
} else {
throw new ExecutionFailureException(job, null, null);
}
return getJob();
}
Aggregations