use of org.apache.hyracks.api.dataset.IHyracksDataset in project asterixdb by apache.
the class RestApiServlet method doHandle.
private void doHandle(IServletResponse response, String query, SessionOutput sessionOutput, ResultDelivery resultDelivery) throws JsonProcessingException {
try {
response.setStatus(HttpResponseStatus.OK);
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
}
IParser parser = parserFactory.createParser(query);
List<Statement> aqlStatements = parser.parse();
validate(aqlStatements);
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExecutorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
translator.compileAndExecute(hcc, hds, resultDelivery, null, new IStatementExecutor.Stats());
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe);
String errorMessage = ResultUtil.buildParseExceptionMessage(pe, query);
ObjectNode errorResp = ResultUtil.getErrorResponse(2, errorMessage, "", ResultUtil.extractFullStackTrace(pe));
sessionOutput.out().write(new ObjectMapper().writeValueAsString(errorResp));
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
ResultUtil.apiErrorHandler(sessionOutput.out(), e);
}
}
use of org.apache.hyracks.api.dataset.IHyracksDataset in project asterixdb by apache.
the class QueryStatusApiServlet method get.
@Override
protected void get(IServletRequest request, IServletResponse response) throws Exception {
final String strHandle = localPath(request);
final ResultHandle handle = ResultHandle.parse(strHandle);
if (handle == null) {
response.setStatus(HttpResponseStatus.BAD_REQUEST);
return;
}
IHyracksDataset hds = getHyracksDataset();
ResultReader resultReader = new ResultReader(hds, handle.getJobId(), handle.getResultSetId());
final DatasetJobRecord.Status resultReaderStatus = resultReader.getStatus();
if (resultReaderStatus == null) {
LOGGER.log(Level.INFO, "No results for: \"" + strHandle + "\"");
response.setStatus(HttpResponseStatus.NOT_FOUND);
return;
}
ResultStatus resultStatus = resultStatus(resultReaderStatus);
Exception ex = extractException(resultReaderStatus);
final StringWriter stringWriter = new StringWriter();
final PrintWriter resultWriter = new PrintWriter(stringWriter);
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
HttpResponseStatus httpStatus = HttpResponseStatus.OK;
resultWriter.print("{\n");
ResultUtil.printStatus(resultWriter, resultStatus, (ex != null) || ResultStatus.SUCCESS == resultStatus);
if (ResultStatus.SUCCESS == resultStatus) {
String servletPath = servletPath(request).replace("status", "result");
String resHandle = "http://" + host(request) + servletPath + strHandle;
printHandle(resultWriter, resHandle, false);
} else if (ex != null) {
ResultUtil.printError(resultWriter, ex, false);
}
resultWriter.print("}\n");
resultWriter.flush();
String result = stringWriter.toString();
response.setStatus(httpStatus);
response.writer().print(result);
if (response.writer().checkError()) {
LOGGER.warning("Error flushing output writer");
}
}
use of org.apache.hyracks.api.dataset.IHyracksDataset in project asterixdb by apache.
the class AbstractIntegrationTest method readResults.
protected List<String> readResults(JobSpecification spec, JobId jobId, ResultSetId resultSetId) throws Exception {
int nReaders = 1;
IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor();
IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, resultSetId);
List<String> resultRecords = new ArrayList<>();
ByteBufferInputStream bbis = new ByteBufferInputStream();
FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
VSizeFrame frame = new VSizeFrame(resultDisplayFrameMgr);
int readSize = reader.read(frame);
while (readSize > 0) {
try {
frameTupleAccessor.reset(frame.getBuffer());
for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
int start = frameTupleAccessor.getTupleStartOffset(tIndex);
int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
bbis.setByteBuffer(frame.getBuffer(), start);
byte[] recordBytes = new byte[length];
bbis.read(recordBytes, 0, length);
resultRecords.add(new String(recordBytes, 0, length));
}
} finally {
bbis.close();
}
readSize = reader.read(frame);
}
return resultRecords;
}
use of org.apache.hyracks.api.dataset.IHyracksDataset in project asterixdb by apache.
the class AbstractQueryApiServlet method getHyracksDataset.
protected IHyracksDataset getHyracksDataset() throws Exception {
// NOSONAR
synchronized (ctx) {
IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(getHyracksClientConnection(), appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
return hds;
}
}
use of org.apache.hyracks.api.dataset.IHyracksDataset in project asterixdb by apache.
the class ApiServlet method post.
@Override
protected void post(IServletRequest request, IServletResponse response) {
// Query language
ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language")) ? aqlCompilationProvider : sqlppCompilationProvider;
IParserFactory parserFactory = compilationProvider.getParserFactory();
// Output format.
PrintWriter out = response.writer();
OutputFormat format;
boolean csvAndHeader = false;
String output = request.getParameter("output-format");
try {
format = OutputFormat.valueOf(output);
} catch (IllegalArgumentException e) {
LOGGER.log(Level.INFO, output + ": unsupported output-format, using " + OutputFormat.CLEAN_JSON + " instead", e);
// Default output format
format = OutputFormat.CLEAN_JSON;
}
String query = request.getParameter("query");
String wrapperArray = request.getParameter("wrapper-array");
String printExprParam = request.getParameter("print-expr-tree");
String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
String printLogicalPlanParam = request.getParameter("print-logical-plan");
String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
String printJob = request.getParameter("print-job");
String executeQuery = request.getParameter("execute-query");
try {
response.setStatus(HttpResponseStatus.OK);
HttpUtil.setContentType(response, ContentType.TEXT_HTML, Encoding.UTF8);
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
try {
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
}
IParser parser = parserFactory.createParser(query);
List<Statement> aqlStatements = parser.parse();
SessionConfig sessionConfig = new SessionConfig(format, true, isSet(executeQuery), true);
sessionConfig.set(SessionConfig.FORMAT_HTML, true);
sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csvAndHeader);
sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, isSet(wrapperArray));
sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam), isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
SessionOutput sessionOutput = new SessionOutput(sessionConfig, out);
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
double duration;
long startTime = System.currentTimeMillis();
translator.compileAndExecute(hcc, hds, IStatementExecutor.ResultDelivery.IMMEDIATE, null, new IStatementExecutor.Stats());
long endTime = System.currentTimeMillis();
duration = (endTime - startTime) / 1000.00;
out.println(HTML_STATEMENT_SEPARATOR);
out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
ResultUtil.webUIParseExceptionHandler(out, pe, query);
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
ResultUtil.webUIErrorHandler(out, e);
}
}
Aggregations