use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class FilePartition method get.
@Override
protected void get(IServletRequest request, IServletResponse response) {
response.setStatus(HttpResponseStatus.OK);
try {
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
response.writer().write(e.toString());
return;
}
PrintWriter out = response.writer();
try {
ObjectMapper om = new ObjectMapper();
ObjectNode jsonResponse = om.createObjectNode();
String dataverseName = request.getParameter("dataverseName");
String datasetName = request.getParameter("datasetName");
if (dataverseName == null || datasetName == null) {
jsonResponse.put("error", "Parameter dataverseName or datasetName is null,");
out.write(jsonResponse.toString());
return;
}
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
// Metadata transaction begins.
MetadataManager.INSTANCE.init();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// Retrieves file splits of the dataset.
MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
jsonResponse.put("error", "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
out.write(jsonResponse.toString());
out.flush();
return;
}
boolean temp = dataset.getDatasetDetails().isTemp();
FileSplit[] fileSplits = metadataProvider.splitsForIndex(mdTxnCtx, dataset, datasetName);
ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
List<List<String>> primaryKeys = dataset.getPrimaryKeys();
StringBuilder pkStrBuf = new StringBuilder();
for (List<String> keys : primaryKeys) {
for (String key : keys) {
pkStrBuf.append(key).append(",");
}
}
pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
// Constructs the returned json object.
formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), temp, hcc.getNodeControllerInfos());
// Flush the cached contents of the dataset to file system.
FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName, datasetName);
// Metadata transaction commits.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
// Writes file splits.
out.write(jsonResponse.toString());
} finally {
metadataProvider.getLocks().unlock();
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failure handling a request", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
out.write(e.toString());
} finally {
out.flush();
}
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class ApiServlet method post.
@Override
protected void post(IServletRequest request, IServletResponse response) {
// Query language
ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language")) ? aqlCompilationProvider : sqlppCompilationProvider;
IParserFactory parserFactory = compilationProvider.getParserFactory();
// Output format.
PrintWriter out = response.writer();
OutputFormat format;
boolean csvAndHeader = false;
String output = request.getParameter("output-format");
try {
format = OutputFormat.valueOf(output);
} catch (IllegalArgumentException e) {
LOGGER.log(Level.INFO, output + ": unsupported output-format, using " + OutputFormat.CLEAN_JSON + " instead", e);
// Default output format
format = OutputFormat.CLEAN_JSON;
}
String query = request.getParameter("query");
String wrapperArray = request.getParameter("wrapper-array");
String printExprParam = request.getParameter("print-expr-tree");
String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
String printLogicalPlanParam = request.getParameter("print-logical-plan");
String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
String printJob = request.getParameter("print-job");
String executeQuery = request.getParameter("execute-query");
try {
response.setStatus(HttpResponseStatus.OK);
HttpUtil.setContentType(response, ContentType.TEXT_HTML, Encoding.UTF8);
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
try {
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
}
IParser parser = parserFactory.createParser(query);
List<Statement> aqlStatements = parser.parse();
SessionConfig sessionConfig = new SessionConfig(format, true, isSet(executeQuery), true);
sessionConfig.set(SessionConfig.FORMAT_HTML, true);
sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csvAndHeader);
sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, isSet(wrapperArray));
sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam), isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
SessionOutput sessionOutput = new SessionOutput(sessionConfig, out);
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
double duration;
long startTime = System.currentTimeMillis();
translator.compileAndExecute(hcc, hds, IStatementExecutor.ResultDelivery.IMMEDIATE, null, new IStatementExecutor.Stats());
long endTime = System.currentTimeMillis();
duration = (endTime - startTime) / 1000.00;
out.println(HTML_STATEMENT_SEPARATOR);
out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
ResultUtil.webUIParseExceptionHandler(out, pe, query);
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
ResultUtil.webUIErrorHandler(out, e);
}
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class QueryCancellationServlet method delete.
@Override
protected void delete(IServletRequest request, IServletResponse response) throws IOException {
// gets the parameter client_context_id from the request.
String clientContextId = request.getParameter(CLIENT_CONTEXT_ID);
if (clientContextId == null) {
response.setStatus(HttpResponseStatus.BAD_REQUEST);
return;
}
// Retrieves the corresponding Hyracks job id.
IStatementExecutorContext runningQueries = (IStatementExecutorContext) ctx.get(ServletConstants.RUNNING_QUERIES_ATTR);
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(ServletConstants.HYRACKS_CONNECTION_ATTR);
JobId jobId = runningQueries.getJobIdFromClientContextId(clientContextId);
if (jobId == null) {
// response: NOT FOUND
response.setStatus(HttpResponseStatus.NOT_FOUND);
return;
}
try {
// Cancels the on-going job.
hcc.cancelJob(jobId);
// Removes the cancelled query from the map activeQueries.
runningQueries.removeJobIdFromClientContextId(clientContextId);
// response: OK
response.setStatus(HttpResponseStatus.OK);
} catch (Exception e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, e.getMessage(), e);
}
// response: INTERNAL SERVER ERROR
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class RebalanceApiServlet method rebalanceDataset.
// Rebalances a given dataset.
private void rebalanceDataset(String dataverseName, String datasetName, String[] targetNodes) throws Exception {
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
RebalanceUtil.rebalance(dataverseName, datasetName, new LinkedHashSet<>(Arrays.asList(targetNodes)), metadataProvider, hcc);
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class CCApplication method setupJSONAPIServer.
protected HttpServer setupJSONAPIServer(ExternalProperties externalProperties) throws Exception {
HttpServer jsonAPIServer = new HttpServer(webManager.getBosses(), webManager.getWorkers(), externalProperties.getAPIServerPort());
IHyracksClientConnection hcc = getHcc();
jsonAPIServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, appCtx);
jsonAPIServer.setAttribute(ServletConstants.EXECUTOR_SERVICE_ATTR, ccServiceCtx.getControllerService().getExecutor());
jsonAPIServer.setAttribute(ServletConstants.RUNNING_QUERIES_ATTR, statementExecutorCtx);
jsonAPIServer.setAttribute(ServletConstants.SERVICE_CONTEXT_ATTR, ccServiceCtx);
// AQL rest APIs.
addServlet(jsonAPIServer, Servlets.AQL_QUERY);
addServlet(jsonAPIServer, Servlets.AQL_UPDATE);
addServlet(jsonAPIServer, Servlets.AQL_DDL);
addServlet(jsonAPIServer, Servlets.AQL);
// SQL+x+ rest APIs.
addServlet(jsonAPIServer, Servlets.SQLPP_QUERY);
addServlet(jsonAPIServer, Servlets.SQLPP_UPDATE);
addServlet(jsonAPIServer, Servlets.SQLPP_DDL);
addServlet(jsonAPIServer, Servlets.SQLPP);
// Other APIs.
addServlet(jsonAPIServer, Servlets.QUERY_STATUS);
addServlet(jsonAPIServer, Servlets.QUERY_RESULT);
addServlet(jsonAPIServer, Servlets.QUERY_SERVICE);
addServlet(jsonAPIServer, Servlets.QUERY_AQL);
addServlet(jsonAPIServer, Servlets.RUNNING_REQUESTS);
addServlet(jsonAPIServer, Servlets.CONNECTOR);
addServlet(jsonAPIServer, Servlets.SHUTDOWN);
addServlet(jsonAPIServer, Servlets.VERSION);
addServlet(jsonAPIServer, Servlets.CLUSTER_STATE);
addServlet(jsonAPIServer, Servlets.REBALANCE);
// must not precede add of CLUSTER_STATE
addServlet(jsonAPIServer, Servlets.CLUSTER_STATE_NODE_DETAIL);
// must not precede add of CLUSTER_STATE
addServlet(jsonAPIServer, Servlets.CLUSTER_STATE_CC_DETAIL);
addServlet(jsonAPIServer, Servlets.DIAGNOSTICS);
return jsonAPIServer;
}
Aggregations