Search in sources :

Example 21 with IHyracksClientConnection

use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.

the class FilePartition method get.

@Override
protected void get(IServletRequest request, IServletResponse response) {
    response.setStatus(HttpResponseStatus.OK);
    try {
        HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
    } catch (IOException e) {
        LOGGER.log(Level.WARNING, "Failure setting content type", e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        response.writer().write(e.toString());
        return;
    }
    PrintWriter out = response.writer();
    try {
        ObjectMapper om = new ObjectMapper();
        ObjectNode jsonResponse = om.createObjectNode();
        String dataverseName = request.getParameter("dataverseName");
        String datasetName = request.getParameter("datasetName");
        if (dataverseName == null || datasetName == null) {
            jsonResponse.put("error", "Parameter dataverseName or datasetName is null,");
            out.write(jsonResponse.toString());
            return;
        }
        IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
        // Metadata transaction begins.
        MetadataManager.INSTANCE.init();
        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        // Retrieves file splits of the dataset.
        MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
        try {
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
            if (dataset == null) {
                jsonResponse.put("error", "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
                out.write(jsonResponse.toString());
                out.flush();
                return;
            }
            boolean temp = dataset.getDatasetDetails().isTemp();
            FileSplit[] fileSplits = metadataProvider.splitsForIndex(mdTxnCtx, dataset, datasetName);
            ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
            List<List<String>> primaryKeys = dataset.getPrimaryKeys();
            StringBuilder pkStrBuf = new StringBuilder();
            for (List<String> keys : primaryKeys) {
                for (String key : keys) {
                    pkStrBuf.append(key).append(",");
                }
            }
            pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
            // Constructs the returned json object.
            formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), temp, hcc.getNodeControllerInfos());
            // Flush the cached contents of the dataset to file system.
            FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName, datasetName);
            // Metadata transaction commits.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            // Writes file splits.
            out.write(jsonResponse.toString());
        } finally {
            metadataProvider.getLocks().unlock();
        }
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, "Failure handling a request", e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        out.write(e.toString());
    } finally {
        out.flush();
    }
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Dataset(org.apache.asterix.metadata.entities.Dataset) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) IOException(java.io.IOException) FileSplit(org.apache.hyracks.api.io.FileSplit) IOException(java.io.IOException) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) List(java.util.List) ARecordType(org.apache.asterix.om.types.ARecordType) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) PrintWriter(java.io.PrintWriter)

Example 22 with IHyracksClientConnection

use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.

the class ApiServlet method post.

@Override
protected void post(IServletRequest request, IServletResponse response) {
    // Query language
    ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language")) ? aqlCompilationProvider : sqlppCompilationProvider;
    IParserFactory parserFactory = compilationProvider.getParserFactory();
    // Output format.
    PrintWriter out = response.writer();
    OutputFormat format;
    boolean csvAndHeader = false;
    String output = request.getParameter("output-format");
    try {
        format = OutputFormat.valueOf(output);
    } catch (IllegalArgumentException e) {
        LOGGER.log(Level.INFO, output + ": unsupported output-format, using " + OutputFormat.CLEAN_JSON + " instead", e);
        // Default output format
        format = OutputFormat.CLEAN_JSON;
    }
    String query = request.getParameter("query");
    String wrapperArray = request.getParameter("wrapper-array");
    String printExprParam = request.getParameter("print-expr-tree");
    String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
    String printLogicalPlanParam = request.getParameter("print-logical-plan");
    String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
    String printJob = request.getParameter("print-job");
    String executeQuery = request.getParameter("execute-query");
    try {
        response.setStatus(HttpResponseStatus.OK);
        HttpUtil.setContentType(response, ContentType.TEXT_HTML, Encoding.UTF8);
    } catch (IOException e) {
        LOGGER.log(Level.WARNING, "Failure setting content type", e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        return;
    }
    try {
        IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
        IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
        if (hds == null) {
            synchronized (ctx) {
                hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
                if (hds == null) {
                    hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
                    ctx.put(HYRACKS_DATASET_ATTR, hds);
                }
            }
        }
        IParser parser = parserFactory.createParser(query);
        List<Statement> aqlStatements = parser.parse();
        SessionConfig sessionConfig = new SessionConfig(format, true, isSet(executeQuery), true);
        sessionConfig.set(SessionConfig.FORMAT_HTML, true);
        sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csvAndHeader);
        sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, isSet(wrapperArray));
        sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam), isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
        SessionOutput sessionOutput = new SessionOutput(sessionConfig, out);
        MetadataManager.INSTANCE.init();
        IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
        double duration;
        long startTime = System.currentTimeMillis();
        translator.compileAndExecute(hcc, hds, IStatementExecutor.ResultDelivery.IMMEDIATE, null, new IStatementExecutor.Stats());
        long endTime = System.currentTimeMillis();
        duration = (endTime - startTime) / 1000.00;
        out.println(HTML_STATEMENT_SEPARATOR);
        out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
    } catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
        ResultUtil.webUIParseExceptionHandler(out, pe, query);
    } catch (Exception e) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
        ResultUtil.webUIErrorHandler(out, e);
    }
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) SessionConfig(org.apache.asterix.translator.SessionConfig) AsterixException(org.apache.asterix.common.exceptions.AsterixException) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IParserFactory(org.apache.asterix.lang.common.base.IParserFactory) PrintWriter(java.io.PrintWriter) Statement(org.apache.asterix.lang.common.base.Statement) OutputFormat(org.apache.asterix.translator.SessionConfig.OutputFormat) TokenMgrError(org.apache.asterix.lang.aql.parser.TokenMgrError) IOException(java.io.IOException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IOException(java.io.IOException) IStatementExecutor(org.apache.asterix.translator.IStatementExecutor) HyracksDataset(org.apache.hyracks.client.dataset.HyracksDataset) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) SessionOutput(org.apache.asterix.translator.SessionOutput) IParser(org.apache.asterix.lang.common.base.IParser)

Example 23 with IHyracksClientConnection

use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.

the class QueryCancellationServlet method delete.

@Override
protected void delete(IServletRequest request, IServletResponse response) throws IOException {
    // gets the parameter client_context_id from the request.
    String clientContextId = request.getParameter(CLIENT_CONTEXT_ID);
    if (clientContextId == null) {
        response.setStatus(HttpResponseStatus.BAD_REQUEST);
        return;
    }
    // Retrieves the corresponding Hyracks job id.
    IStatementExecutorContext runningQueries = (IStatementExecutorContext) ctx.get(ServletConstants.RUNNING_QUERIES_ATTR);
    IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(ServletConstants.HYRACKS_CONNECTION_ATTR);
    JobId jobId = runningQueries.getJobIdFromClientContextId(clientContextId);
    if (jobId == null) {
        // response: NOT FOUND
        response.setStatus(HttpResponseStatus.NOT_FOUND);
        return;
    }
    try {
        // Cancels the on-going job.
        hcc.cancelJob(jobId);
        // Removes the cancelled query from the map activeQueries.
        runningQueries.removeJobIdFromClientContextId(clientContextId);
        // response: OK
        response.setStatus(HttpResponseStatus.OK);
    } catch (Exception e) {
        if (LOGGER.isLoggable(Level.WARNING)) {
            LOGGER.log(Level.WARNING, e.getMessage(), e);
        }
        // response: INTERNAL SERVER ERROR
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
    }
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) IStatementExecutorContext(org.apache.asterix.translator.IStatementExecutorContext) JobId(org.apache.hyracks.api.job.JobId) IOException(java.io.IOException)

Example 24 with IHyracksClientConnection

use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.

the class RebalanceApiServlet method rebalanceDataset.

// Rebalances a given dataset.
private void rebalanceDataset(String dataverseName, String datasetName, String[] targetNodes) throws Exception {
    IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
    MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
    RebalanceUtil.rebalance(dataverseName, datasetName, new LinkedHashSet<>(Arrays.asList(targetNodes)), metadataProvider, hcc);
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider)

Example 25 with IHyracksClientConnection

use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.

the class CCApplication method setupJSONAPIServer.

protected HttpServer setupJSONAPIServer(ExternalProperties externalProperties) throws Exception {
    HttpServer jsonAPIServer = new HttpServer(webManager.getBosses(), webManager.getWorkers(), externalProperties.getAPIServerPort());
    IHyracksClientConnection hcc = getHcc();
    jsonAPIServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
    jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, appCtx);
    jsonAPIServer.setAttribute(ServletConstants.EXECUTOR_SERVICE_ATTR, ccServiceCtx.getControllerService().getExecutor());
    jsonAPIServer.setAttribute(ServletConstants.RUNNING_QUERIES_ATTR, statementExecutorCtx);
    jsonAPIServer.setAttribute(ServletConstants.SERVICE_CONTEXT_ATTR, ccServiceCtx);
    // AQL rest APIs.
    addServlet(jsonAPIServer, Servlets.AQL_QUERY);
    addServlet(jsonAPIServer, Servlets.AQL_UPDATE);
    addServlet(jsonAPIServer, Servlets.AQL_DDL);
    addServlet(jsonAPIServer, Servlets.AQL);
    // SQL+x+ rest APIs.
    addServlet(jsonAPIServer, Servlets.SQLPP_QUERY);
    addServlet(jsonAPIServer, Servlets.SQLPP_UPDATE);
    addServlet(jsonAPIServer, Servlets.SQLPP_DDL);
    addServlet(jsonAPIServer, Servlets.SQLPP);
    // Other APIs.
    addServlet(jsonAPIServer, Servlets.QUERY_STATUS);
    addServlet(jsonAPIServer, Servlets.QUERY_RESULT);
    addServlet(jsonAPIServer, Servlets.QUERY_SERVICE);
    addServlet(jsonAPIServer, Servlets.QUERY_AQL);
    addServlet(jsonAPIServer, Servlets.RUNNING_REQUESTS);
    addServlet(jsonAPIServer, Servlets.CONNECTOR);
    addServlet(jsonAPIServer, Servlets.SHUTDOWN);
    addServlet(jsonAPIServer, Servlets.VERSION);
    addServlet(jsonAPIServer, Servlets.CLUSTER_STATE);
    addServlet(jsonAPIServer, Servlets.REBALANCE);
    // must not precede add of CLUSTER_STATE
    addServlet(jsonAPIServer, Servlets.CLUSTER_STATE_NODE_DETAIL);
    // must not precede add of CLUSTER_STATE
    addServlet(jsonAPIServer, Servlets.CLUSTER_STATE_CC_DETAIL);
    addServlet(jsonAPIServer, Servlets.DIAGNOSTICS);
    return jsonAPIServer;
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) HttpServer(org.apache.hyracks.http.server.HttpServer)

Aggregations

IHyracksClientConnection (org.apache.hyracks.api.client.IHyracksClientConnection)30 HyracksConnection (org.apache.hyracks.api.client.HyracksConnection)13 JobId (org.apache.hyracks.api.job.JobId)13 JobSpecification (org.apache.hyracks.api.job.JobSpecification)11 CmdLineParser (org.kohsuke.args4j.CmdLineParser)10 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)8 PrintWriter (java.io.PrintWriter)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)7 IOException (java.io.IOException)7 Test (org.junit.Test)4 HashMap (java.util.HashMap)3 AsterixException (org.apache.asterix.common.exceptions.AsterixException)3 IServletRequest (org.apache.hyracks.http.api.IServletRequest)3 IServletResponse (org.apache.hyracks.http.api.IServletResponse)3 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)2 FullHttpRequest (io.netty.handler.codec.http.FullHttpRequest)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 Date (java.util.Date)2 Map (java.util.Map)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2