Search in sources :

Example 1 with QueryStats

use of net.opentsdb.stats.QueryStats in project opentsdb by OpenTSDB.

the class TestHttpJsonSerializer method validateTestQuery.

/**
   * Helper to validate (set) the time series query
   * @param data_query The query to validate
   */
private void validateTestQuery(final TSQuery data_query) {
    data_query.validateAndSetQuery();
    data_query.setQueryStats(new QueryStats(remote, data_query, null));
}
Also used : QueryStats(net.opentsdb.stats.QueryStats)

Example 2 with QueryStats

use of net.opentsdb.stats.QueryStats in project opentsdb by OpenTSDB.

the class QueryExecutor method execute.

/**
   * Execute the RPC and serialize the response
   * @param query The HTTP query to parse and and return results to
   */
public void execute(final HttpQuery query) {
    http_query = query;
    final QueryStats query_stats = new QueryStats(query.getRemoteAddress(), ts_query, query.getHeaders());
    ts_query.setQueryStats(query_stats);
    /**
     * Sends the serialized results to the caller. This should be the very
     * last callback executed.
     */
    class CompleteCB implements Callback<Object, ChannelBuffer> {

        @Override
        public Object call(final ChannelBuffer cb) throws Exception {
            query.sendReply(cb);
            return null;
        }
    }
    /**
     * After all of the queries have run and we have data (or not) then we
     * need to compile the iterators.
     * This class could probably be improved:
     * First we iterate over the results AND for each result, iterate over
     * the expressions, giving a time synced iterator to each expression that 
     * needs the result set.
     * THEN we iterate over the expressions again and build a DAG to determine
     * if any of the expressions require the output of an expression. If so
     * then we add the expressions to the proper parent and compile them in
     * order.
     * After all of that we're ready to start serializing and iterating
     * over the results.
     */
    class QueriesCB implements Callback<Object, ArrayList<DataPoints[]>> {

        public Object call(final ArrayList<DataPoints[]> query_results) throws Exception {
            for (int i = 0; i < query_results.size(); i++) {
                final TSSubQuery sub = ts_query.getQueries().get(i);
                Iterator<Entry<String, TSSubQuery>> it = sub_queries.entrySet().iterator();
                while (it.hasNext()) {
                    final Entry<String, TSSubQuery> entry = it.next();
                    if (entry.getValue().equals(sub)) {
                        sub_query_results.put(entry.getKey(), query_results.get(i));
                        for (final ExpressionIterator ei : expressions.values()) {
                            if (ei.getVariableNames().contains(entry.getKey())) {
                                final TimeSyncedIterator tsi = new TimeSyncedIterator(entry.getKey(), sub.getFilterTagKs(), query_results.get(i));
                                final NumericFillPolicy fill = fills.get(entry.getKey());
                                if (fill != null) {
                                    tsi.setFillPolicy(fill);
                                }
                                ei.addResults(entry.getKey(), tsi);
                                if (LOG.isDebugEnabled()) {
                                    LOG.debug("Added results for " + entry.getKey() + " to " + ei.getId());
                                }
                            }
                        }
                    }
                }
            }
            // handle nested expressions
            final DirectedAcyclicGraph<String, DefaultEdge> graph = new DirectedAcyclicGraph<String, DefaultEdge>(DefaultEdge.class);
            for (final Entry<String, ExpressionIterator> eii : expressions.entrySet()) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(String.format("Expression entry key is %s, value is %s", eii.getKey(), eii.getValue().toString()));
                    LOG.debug(String.format("Time to loop through the variable names " + "for %s", eii.getKey()));
                }
                if (!graph.containsVertex(eii.getKey())) {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Adding vertex " + eii.getKey());
                    }
                    graph.addVertex(eii.getKey());
                }
                for (final String var : eii.getValue().getVariableNames()) {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug(String.format("var is %s", var));
                    }
                    final ExpressionIterator ei = expressions.get(var);
                    if (ei != null) {
                        if (LOG.isDebugEnabled()) {
                            LOG.debug(String.format("The expression iterator for %s is %s", var, ei.toString()));
                        }
                        // TODO - really ought to calculate this earlier
                        if (eii.getKey().equals(var)) {
                            throw new IllegalArgumentException("Self referencing expression found: " + eii.getKey());
                        }
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("Nested expression detected. " + eii.getKey() + " depends on " + var);
                        }
                        if (!graph.containsVertex(eii.getKey())) {
                            if (LOG.isDebugEnabled()) {
                                LOG.debug("Added vertex " + eii.getKey());
                            }
                            graph.addVertex(eii.getKey());
                        } else if (LOG.isDebugEnabled()) {
                            LOG.debug("Already contains vertex " + eii.getKey());
                        }
                        if (!graph.containsVertex(var)) {
                            if (LOG.isDebugEnabled()) {
                                LOG.debug("Added vertex " + var);
                            }
                            graph.addVertex(var);
                        } else if (LOG.isDebugEnabled()) {
                            LOG.debug("Already contains vertex " + var);
                        }
                        try {
                            if (LOG.isDebugEnabled()) {
                                LOG.debug("Added Edge " + eii.getKey() + " - " + var);
                            }
                            graph.addDagEdge(eii.getKey(), var);
                        } catch (CycleFoundException cfe) {
                            throw new IllegalArgumentException("Circular reference found: " + eii.getKey(), cfe);
                        }
                    } else if (LOG.isDebugEnabled()) {
                        LOG.debug(String.format("The expression iterator for %s is null", var));
                    }
                }
            }
            // compile all of the expressions
            final long intersect_start = DateTime.currentTimeMillis();
            final Integer expressionLength = expressions.size();
            final ExpressionIterator[] compile_stack = new ExpressionIterator[expressionLength];
            final TopologicalOrderIterator<String, DefaultEdge> it = new TopologicalOrderIterator<String, DefaultEdge>(graph);
            if (LOG.isDebugEnabled()) {
                LOG.debug(String.format("Expressions Size is %d", expressionLength));
                LOG.debug(String.format("Topology Iterator %s", it.toString()));
            }
            int i = 0;
            while (it.hasNext()) {
                String next = it.next();
                if (LOG.isDebugEnabled()) {
                    LOG.debug(String.format("Expression: %s", next));
                }
                ExpressionIterator ei = expressions.get(next);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(String.format("Expression Iterator: %s", ei.toString()));
                }
                if (ei == null) {
                    LOG.error(String.format("The expression iterator for %s is null", next));
                }
                compile_stack[i] = ei;
                if (LOG.isDebugEnabled()) {
                    LOG.debug(String.format("Added expression %s to compile_stack[%d]", next, i));
                }
                i++;
            }
            if (i != expressionLength) {
                throw new IOException(String.format(" Internal Error: Fewer " + "expressions where added to the compile stack than " + "expressions.size (%d instead of %d)", i, expressionLength));
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug(String.format("compile stack length: %d", compile_stack.length));
            }
            for (int x = compile_stack.length - 1; x >= 0; x--) {
                if (compile_stack[x] == null) {
                    throw new NullPointerException(String.format("Item %d in " + "compile_stack[] is null", x));
                }
                // look for and add expressions
                for (final String var : compile_stack[x].getVariableNames()) {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug(String.format("Looking for variable %s for %s", var, compile_stack[x].getId()));
                    }
                    ExpressionIterator source = expressions.get(var);
                    if (source != null) {
                        compile_stack[x].addResults(var, source.getCopy());
                        if (LOG.isDebugEnabled()) {
                            LOG.debug(String.format("Adding expression %s to %s", source.getId(), compile_stack[x].getId()));
                        }
                    }
                }
                compile_stack[x].compile();
                if (LOG.isDebugEnabled()) {
                    LOG.debug(String.format("Successfully compiled %s", compile_stack[x].getId()));
                }
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("Finished compilations in " + (DateTime.currentTimeMillis() - intersect_start) + " ms");
            }
            return serialize().addCallback(new CompleteCB()).addErrback(new ErrorCB());
        }
    }
    /**
     * Callback executed after we have resolved the metric, tag names and tag
     * values to their respective UIDs. This callback then runs the actual 
     * queries and fetches their results.
     */
    class BuildCB implements Callback<Deferred<Object>, net.opentsdb.core.Query[]> {

        @Override
        public Deferred<Object> call(final net.opentsdb.core.Query[] queries) {
            final ArrayList<Deferred<DataPoints[]>> deferreds = new ArrayList<Deferred<DataPoints[]>>(queries.length);
            for (final net.opentsdb.core.Query query : queries) {
                deferreds.add(query.runAsync());
            }
            return Deferred.groupInOrder(deferreds).addCallback(new QueriesCB()).addErrback(new ErrorCB());
        }
    }
    // TODO - only run the ones that will be involved in an output. Folks WILL
    // ask for stuff they don't need.... *sigh*
    ts_query.buildQueriesAsync(tsdb).addCallback(new BuildCB()).addErrback(new ErrorCB());
}
Also used : ExpressionIterator(net.opentsdb.query.expression.ExpressionIterator) Query(net.opentsdb.query.pojo.Query) TSQuery(net.opentsdb.core.TSQuery) TSSubQuery(net.opentsdb.core.TSSubQuery) Deferred(com.stumbleupon.async.Deferred) ArrayList(java.util.ArrayList) TopologicalOrderIterator(org.jgrapht.traverse.TopologicalOrderIterator) DataPoints(net.opentsdb.core.DataPoints) TimeSyncedIterator(net.opentsdb.query.expression.TimeSyncedIterator) DirectedAcyclicGraph(org.jgrapht.experimental.dag.DirectedAcyclicGraph) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) Entry(java.util.Map.Entry) CycleFoundException(org.jgrapht.experimental.dag.DirectedAcyclicGraph.CycleFoundException) DefaultEdge(org.jgrapht.graph.DefaultEdge) IOException(java.io.IOException) TSSubQuery(net.opentsdb.core.TSSubQuery) DataPoint(net.opentsdb.core.DataPoint) ExpressionDataPoint(net.opentsdb.query.expression.ExpressionDataPoint) Callback(com.stumbleupon.async.Callback) QueryStats(net.opentsdb.stats.QueryStats) NumericFillPolicy(net.opentsdb.query.expression.NumericFillPolicy)

Example 3 with QueryStats

use of net.opentsdb.stats.QueryStats in project opentsdb by OpenTSDB.

the class QueryRpc method handleQuery.

/**
   * Processing for a data point query
   * @param tsdb The TSDB to which we belong
   * @param query The HTTP query to parse/respond
   * @param allow_expressions Whether or not expressions should be parsed
   * (based on the endpoint)
   */
private void handleQuery(final TSDB tsdb, final HttpQuery query, final boolean allow_expressions) {
    final long start = DateTime.currentTimeMillis();
    final TSQuery data_query;
    final List<ExpressionTree> expressions;
    if (query.method() == HttpMethod.POST) {
        switch(query.apiVersion()) {
            case 0:
            case 1:
                data_query = query.serializer().parseQueryV1();
                break;
            default:
                query_invalid.incrementAndGet();
                throw new BadRequestException(HttpResponseStatus.NOT_IMPLEMENTED, "Requested API version not implemented", "Version " + query.apiVersion() + " is not implemented");
        }
        expressions = null;
    } else {
        expressions = new ArrayList<ExpressionTree>();
        data_query = parseQuery(tsdb, query, expressions);
    }
    if (query.getAPIMethod() == HttpMethod.DELETE && tsdb.getConfig().getBoolean("tsd.http.query.allow_delete")) {
        data_query.setDelete(true);
    }
    // validate and then compile the queries
    try {
        LOG.debug(data_query.toString());
        data_query.validateAndSetQuery();
    } catch (Exception e) {
        throw new BadRequestException(HttpResponseStatus.BAD_REQUEST, e.getMessage(), data_query.toString(), e);
    }
    // if the user tried this query multiple times from the same IP and src port
    // they'll be rejected on subsequent calls
    final QueryStats query_stats = new QueryStats(query.getRemoteAddress(), data_query, query.getPrintableHeaders());
    data_query.setQueryStats(query_stats);
    query.setStats(query_stats);
    final int nqueries = data_query.getQueries().size();
    final ArrayList<DataPoints[]> results = new ArrayList<DataPoints[]>(nqueries);
    final List<Annotation> globals = new ArrayList<Annotation>();
    /** This has to be attached to callbacks or we may never respond to clients */
    class ErrorCB implements Callback<Object, Exception> {

        public Object call(final Exception e) throws Exception {
            Throwable ex = e;
            try {
                LOG.error("Query exception: ", e);
                if (ex instanceof DeferredGroupException) {
                    ex = e.getCause();
                    while (ex != null && ex instanceof DeferredGroupException) {
                        ex = ex.getCause();
                    }
                    if (ex == null) {
                        LOG.error("The deferred group exception didn't have a cause???");
                    }
                }
                if (ex instanceof RpcTimedOutException) {
                    query_stats.markSerialized(HttpResponseStatus.REQUEST_TIMEOUT, ex);
                    query.badRequest(new BadRequestException(HttpResponseStatus.REQUEST_TIMEOUT, ex.getMessage()));
                    query_exceptions.incrementAndGet();
                } else if (ex instanceof HBaseException) {
                    query_stats.markSerialized(HttpResponseStatus.FAILED_DEPENDENCY, ex);
                    query.badRequest(new BadRequestException(HttpResponseStatus.FAILED_DEPENDENCY, ex.getMessage()));
                    query_exceptions.incrementAndGet();
                } else if (ex instanceof QueryException) {
                    query_stats.markSerialized(((QueryException) ex).getStatus(), ex);
                    query.badRequest(new BadRequestException(((QueryException) ex).getStatus(), ex.getMessage()));
                    query_exceptions.incrementAndGet();
                } else if (ex instanceof BadRequestException) {
                    query_stats.markSerialized(((BadRequestException) ex).getStatus(), ex);
                    query.badRequest((BadRequestException) ex);
                    query_invalid.incrementAndGet();
                } else if (ex instanceof NoSuchUniqueName) {
                    query_stats.markSerialized(HttpResponseStatus.BAD_REQUEST, ex);
                    query.badRequest(new BadRequestException(ex));
                    query_invalid.incrementAndGet();
                } else {
                    query_stats.markSerialized(HttpResponseStatus.INTERNAL_SERVER_ERROR, ex);
                    query.badRequest(new BadRequestException(ex));
                    query_exceptions.incrementAndGet();
                }
            } catch (RuntimeException ex2) {
                LOG.error("Exception thrown during exception handling", ex2);
                query_stats.markSerialized(HttpResponseStatus.INTERNAL_SERVER_ERROR, ex2);
                query.sendReply(HttpResponseStatus.INTERNAL_SERVER_ERROR, ex2.getMessage().getBytes());
                query_exceptions.incrementAndGet();
            }
            return null;
        }
    }
    /**
     * After all of the queries have run, we get the results in the order given
     * and add dump the results in an array
     */
    class QueriesCB implements Callback<Object, ArrayList<DataPoints[]>> {

        public Object call(final ArrayList<DataPoints[]> query_results) throws Exception {
            if (allow_expressions) {
                // process each of the expressions into a new list, then merge it
                // with the original. This avoids possible recursion loops.
                final List<DataPoints[]> expression_results = new ArrayList<DataPoints[]>(expressions.size());
                // let exceptions bubble up
                for (final ExpressionTree expression : expressions) {
                    expression_results.add(expression.evaluate(query_results));
                }
                results.addAll(expression_results);
            } else {
                results.addAll(query_results);
            }
            /** Simply returns the buffer once serialization is complete and logs it */
            class SendIt implements Callback<Object, ChannelBuffer> {

                public Object call(final ChannelBuffer buffer) throws Exception {
                    query.sendReply(buffer);
                    query_success.incrementAndGet();
                    return null;
                }
            }
            switch(query.apiVersion()) {
                case 0:
                case 1:
                    query.serializer().formatQueryAsyncV1(data_query, results, globals).addCallback(new SendIt()).addErrback(new ErrorCB());
                    break;
                default:
                    query_invalid.incrementAndGet();
                    throw new BadRequestException(HttpResponseStatus.NOT_IMPLEMENTED, "Requested API version not implemented", "Version " + query.apiVersion() + " is not implemented");
            }
            return null;
        }
    }
    /**
     * Callback executed after we have resolved the metric, tag names and tag
     * values to their respective UIDs. This callback then runs the actual 
     * queries and fetches their results.
     */
    class BuildCB implements Callback<Deferred<Object>, Query[]> {

        @Override
        public Deferred<Object> call(final Query[] queries) {
            final ArrayList<Deferred<DataPoints[]>> deferreds = new ArrayList<Deferred<DataPoints[]>>(queries.length);
            for (final Query query : queries) {
                deferreds.add(query.runAsync());
            }
            return Deferred.groupInOrder(deferreds).addCallback(new QueriesCB());
        }
    }
    /** Handles storing the global annotations after fetching them */
    class GlobalCB implements Callback<Object, List<Annotation>> {

        public Object call(final List<Annotation> annotations) throws Exception {
            globals.addAll(annotations);
            return data_query.buildQueriesAsync(tsdb).addCallback(new BuildCB());
        }
    }
    // when complete
    if (!data_query.getNoAnnotations() && data_query.getGlobalAnnotations()) {
        Annotation.getGlobalAnnotations(tsdb, data_query.startTime() / 1000, data_query.endTime() / 1000).addCallback(new GlobalCB()).addErrback(new ErrorCB());
    } else {
        data_query.buildQueriesAsync(tsdb).addCallback(new BuildCB()).addErrback(new ErrorCB());
    }
}
Also used : Query(net.opentsdb.core.Query) TSUIDQuery(net.opentsdb.meta.TSUIDQuery) TSQuery(net.opentsdb.core.TSQuery) TSSubQuery(net.opentsdb.core.TSSubQuery) Deferred(com.stumbleupon.async.Deferred) ArrayList(java.util.ArrayList) DataPoints(net.opentsdb.core.DataPoints) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) TSQuery(net.opentsdb.core.TSQuery) DeferredGroupException(com.stumbleupon.async.DeferredGroupException) ExpressionTree(net.opentsdb.query.expression.ExpressionTree) ArrayList(java.util.ArrayList) List(java.util.List) RpcTimedOutException(org.hbase.async.RpcTimedOutException) QueryException(net.opentsdb.core.QueryException) DeferredGroupException(com.stumbleupon.async.DeferredGroupException) IOException(java.io.IOException) RpcTimedOutException(org.hbase.async.RpcTimedOutException) HBaseException(org.hbase.async.HBaseException) IncomingDataPoint(net.opentsdb.core.IncomingDataPoint) Annotation(net.opentsdb.meta.Annotation) QueryException(net.opentsdb.core.QueryException) Callback(com.stumbleupon.async.Callback) QueryStats(net.opentsdb.stats.QueryStats) HBaseException(org.hbase.async.HBaseException) NoSuchUniqueName(net.opentsdb.uid.NoSuchUniqueName)

Example 4 with QueryStats

use of net.opentsdb.stats.QueryStats in project opentsdb by OpenTSDB.

the class HttpJsonSerializer method formatQueryAsyncV1.

/**
   * Format the results from a timeseries data query
   * @param data_query The TSQuery object used to fetch the results
   * @param results The data fetched from storage
   * @param globals An optional list of global annotation objects
   * @return A Deferred<ChannelBuffer> object to pass on to the caller
   * @throws IOException if serialization failed
   * @since 2.2
   */
public Deferred<ChannelBuffer> formatQueryAsyncV1(final TSQuery data_query, final List<DataPoints[]> results, final List<Annotation> globals) throws IOException {
    final long start = DateTime.currentTimeMillis();
    final boolean as_arrays = this.query.hasQueryStringParam("arrays");
    final String jsonp = this.query.getQueryStringParam("jsonp");
    // buffers and an array list to stored the deferreds
    final ChannelBuffer response = ChannelBuffers.dynamicBuffer();
    final OutputStream output = new ChannelBufferOutputStream(response);
    // too bad an inner class can't modify a primitive. This is a work around 
    final List<Boolean> timeout_flag = new ArrayList<Boolean>(1);
    timeout_flag.add(false);
    // start with JSONp if we're told to
    if (jsonp != null && !jsonp.isEmpty()) {
        output.write((jsonp + "(").getBytes(query.getCharset()));
    }
    // start the JSON generator and write the opening array
    final JsonGenerator json = JSON.getFactory().createGenerator(output);
    json.writeStartArray();
    /**
     * Every individual data point set (the result of a query and possibly a
     * group by) will initiate an asynchronous metric/tag UID to name resolution
     * and then print to the buffer.
     * NOTE that because this is asynchronous, the order of results is
     * indeterminate.
     */
    class DPsResolver implements Callback<Deferred<Object>, Object> {

        /** Has to be final to be shared with the nested classes */
        final StringBuilder metric = new StringBuilder(256);

        /** Resolved tags */
        final Map<String, String> tags = new HashMap<String, String>();

        /** Resolved aggregated tags */
        final List<String> agg_tags = new ArrayList<String>();

        /** A list storing the metric and tag resolve calls */
        final List<Deferred<Object>> resolve_deferreds = new ArrayList<Deferred<Object>>();

        /** The data points to serialize */
        final DataPoints dps;

        /** Starting time in nanos when we sent the UID resolution queries off */
        long uid_start;

        public DPsResolver(final DataPoints dps) {
            this.dps = dps;
        }

        /** Resolves the metric UID to a name*/
        class MetricResolver implements Callback<Object, String> {

            public Object call(final String metric) throws Exception {
                DPsResolver.this.metric.append(metric);
                return null;
            }
        }

        /** Resolves the tag UIDs to a key/value string set */
        class TagResolver implements Callback<Object, Map<String, String>> {

            public Object call(final Map<String, String> tags) throws Exception {
                DPsResolver.this.tags.putAll(tags);
                return null;
            }
        }

        /** Resolves aggregated tags */
        class AggTagResolver implements Callback<Object, List<String>> {

            public Object call(final List<String> tags) throws Exception {
                DPsResolver.this.agg_tags.addAll(tags);
                return null;
            }
        }

        /** After the metric and tags have been resolved, this will print the
       * results to the output buffer in the proper format.
       */
        class WriteToBuffer implements Callback<Object, ArrayList<Object>> {

            final DataPoints dps;

            /**
         * Default ctor that takes a data point set
         * @param dps Datapoints to print
         */
            public WriteToBuffer(final DataPoints dps) {
                this.dps = dps;
            }

            /**
         * Handles writing the data to the output buffer. The results of the
         * deferreds don't matter as they will be stored in the class final
         * variables.
         */
            public Object call(final ArrayList<Object> deferreds) throws Exception {
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.UID_TO_STRING_TIME, (DateTime.nanoTime() - uid_start));
                final long local_serialization_start = DateTime.nanoTime();
                final TSSubQuery orig_query = data_query.getQueries().get(dps.getQueryIndex());
                json.writeStartObject();
                json.writeStringField("metric", metric.toString());
                json.writeFieldName("tags");
                json.writeStartObject();
                if (dps.getTags() != null) {
                    for (Map.Entry<String, String> tag : tags.entrySet()) {
                        json.writeStringField(tag.getKey(), tag.getValue());
                    }
                }
                json.writeEndObject();
                json.writeFieldName("aggregateTags");
                json.writeStartArray();
                if (dps.getAggregatedTags() != null) {
                    for (String atag : agg_tags) {
                        json.writeString(atag);
                    }
                }
                json.writeEndArray();
                if (data_query.getShowQuery()) {
                    json.writeObjectField("query", orig_query);
                }
                if (data_query.getShowTSUIDs()) {
                    json.writeFieldName("tsuids");
                    json.writeStartArray();
                    final List<String> tsuids = dps.getTSUIDs();
                    Collections.sort(tsuids);
                    for (String tsuid : tsuids) {
                        json.writeString(tsuid);
                    }
                    json.writeEndArray();
                }
                if (!data_query.getNoAnnotations()) {
                    final List<Annotation> annotations = dps.getAnnotations();
                    if (annotations != null) {
                        Collections.sort(annotations);
                        json.writeArrayFieldStart("annotations");
                        for (Annotation note : annotations) {
                            json.writeObject(note);
                        }
                        json.writeEndArray();
                    }
                    if (globals != null && !globals.isEmpty()) {
                        Collections.sort(globals);
                        json.writeArrayFieldStart("globalAnnotations");
                        for (Annotation note : globals) {
                            json.writeObject(note);
                        }
                        json.writeEndArray();
                    }
                }
                // now the fun stuff, dump the data and time just the iteration over
                // the data points
                final long dps_start = DateTime.nanoTime();
                json.writeFieldName("dps");
                long counter = 0;
                // default is to write a map, otherwise write arrays
                if (!timeout_flag.get(0) && as_arrays) {
                    json.writeStartArray();
                    for (final DataPoint dp : dps) {
                        if (dp.timestamp() < data_query.startTime() || dp.timestamp() > data_query.endTime()) {
                            continue;
                        }
                        final long timestamp = data_query.getMsResolution() ? dp.timestamp() : dp.timestamp() / 1000;
                        json.writeStartArray();
                        json.writeNumber(timestamp);
                        if (dp.isInteger()) {
                            json.writeNumber(dp.longValue());
                        } else {
                            // Report missing intervals as null or NaN.
                            final double value = dp.doubleValue();
                            if (Double.isNaN(value) && orig_query.fillPolicy() == FillPolicy.NULL) {
                                json.writeNull();
                            } else {
                                json.writeNumber(dp.doubleValue());
                            }
                        }
                        json.writeEndArray();
                        ++counter;
                    }
                    json.writeEndArray();
                } else if (!timeout_flag.get(0)) {
                    json.writeStartObject();
                    for (final DataPoint dp : dps) {
                        if (dp.timestamp() < (data_query.startTime()) || dp.timestamp() > (data_query.endTime())) {
                            continue;
                        }
                        final long timestamp = data_query.getMsResolution() ? dp.timestamp() : dp.timestamp() / 1000;
                        if (dp.isInteger()) {
                            json.writeNumberField(Long.toString(timestamp), dp.longValue());
                        } else {
                            // Report missing intervals as null or NaN.
                            final double value = dp.doubleValue();
                            if (Double.isNaN(value) && orig_query.fillPolicy() == FillPolicy.NULL) {
                                json.writeNumberField(Long.toString(timestamp), null);
                            } else {
                                json.writeNumberField(Long.toString(timestamp), dp.doubleValue());
                            }
                        }
                        ++counter;
                    }
                    json.writeEndObject();
                } else {
                    // skipping data points all together due to timeout
                    json.writeStartObject();
                    json.writeEndObject();
                }
                final long agg_time = DateTime.nanoTime() - dps_start;
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.AGGREGATION_TIME, agg_time);
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.AGGREGATED_SIZE, counter);
                // yeah, it's a little early but we need to dump it out with the results.
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.SERIALIZATION_TIME, DateTime.nanoTime() - local_serialization_start);
                if (!timeout_flag.get(0) && data_query.getShowStats()) {
                    int query_index = (dps == null) ? -1 : dps.getQueryIndex();
                    QueryStats stats = data_query.getQueryStats();
                    if (query_index >= 0) {
                        json.writeFieldName("stats");
                        final Map<String, Object> s = stats.getQueryStats(query_index, false);
                        if (s != null) {
                            json.writeObject(s);
                        } else {
                            json.writeStringField("ERROR", "NO STATS FOUND");
                        }
                    }
                }
                // close the results for this particular query
                json.writeEndObject();
                return null;
            }
        }

        /**
       * When called, initiates a resolution of metric and tag UIDs to names, 
       * then prints to the output buffer once they are completed.
       */
        public Deferred<Object> call(final Object obj) throws Exception {
            this.uid_start = DateTime.nanoTime();
            resolve_deferreds.add(dps.metricNameAsync().addCallback(new MetricResolver()));
            resolve_deferreds.add(dps.getTagsAsync().addCallback(new TagResolver()));
            resolve_deferreds.add(dps.getAggregatedTagsAsync().addCallback(new AggTagResolver()));
            return Deferred.group(resolve_deferreds).addCallback(new WriteToBuffer(dps));
        }
    }
    // We want the serializer to execute serially so we need to create a callback
    // chain so that when one DPsResolver is finished, it triggers the next to
    // start serializing.
    final Deferred<Object> cb_chain = new Deferred<Object>();
    for (DataPoints[] separate_dps : results) {
        for (DataPoints dps : separate_dps) {
            try {
                cb_chain.addCallback(new DPsResolver(dps));
            } catch (Exception e) {
                throw new RuntimeException("Unexpected error durring resolution", e);
            }
        }
    }
    /** Final callback to close out the JSON array and return our results */
    class FinalCB implements Callback<ChannelBuffer, Object> {

        public ChannelBuffer call(final Object obj) throws Exception {
            // Call this here so we rollup sub metrics into a summary. It's not
            // completely accurate, of course, because we still have to write the
            // summary and close the writer. But it's close.
            data_query.getQueryStats().markSerializationSuccessful();
            // TODO - yeah, I've heard this sucks, we need to figure out a better way.
            if (data_query.getShowSummary()) {
                final QueryStats stats = data_query.getQueryStats();
                json.writeStartObject();
                json.writeFieldName("statsSummary");
                json.writeObject(stats.getStats(true, true));
                json.writeEndObject();
            }
            // IMPORTANT Make sure the close the JSON array and the generator
            json.writeEndArray();
            json.close();
            if (jsonp != null && !jsonp.isEmpty()) {
                output.write(")".getBytes());
            }
            return response;
        }
    }
    // trigger the callback chain here
    cb_chain.callback(null);
    return cb_chain.addCallback(new FinalCB());
}
Also used : ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) OutputStream(java.io.OutputStream) Deferred(com.stumbleupon.async.Deferred) ArrayList(java.util.ArrayList) DataPoints(net.opentsdb.core.DataPoints) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) DataPoint(net.opentsdb.core.DataPoint) IncomingDataPoint(net.opentsdb.core.IncomingDataPoint) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) ArrayList(java.util.ArrayList) List(java.util.List) ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) TSSubQuery(net.opentsdb.core.TSSubQuery) QueryException(net.opentsdb.core.QueryException) IOException(java.io.IOException) Annotation(net.opentsdb.meta.Annotation) Callback(com.stumbleupon.async.Callback) QueryStats(net.opentsdb.stats.QueryStats) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Aggregations

QueryStats (net.opentsdb.stats.QueryStats)4 Callback (com.stumbleupon.async.Callback)3 Deferred (com.stumbleupon.async.Deferred)3 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 DataPoints (net.opentsdb.core.DataPoints)3 TSSubQuery (net.opentsdb.core.TSSubQuery)3 ChannelBuffer (org.jboss.netty.buffer.ChannelBuffer)3 List (java.util.List)2 DataPoint (net.opentsdb.core.DataPoint)2 IncomingDataPoint (net.opentsdb.core.IncomingDataPoint)2 QueryException (net.opentsdb.core.QueryException)2 TSQuery (net.opentsdb.core.TSQuery)2 Annotation (net.opentsdb.meta.Annotation)2 JsonGenerator (com.fasterxml.jackson.core.JsonGenerator)1 DeferredGroupException (com.stumbleupon.async.DeferredGroupException)1 OutputStream (java.io.OutputStream)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 Entry (java.util.Map.Entry)1