Search in sources :

Example 11 with ChannelBufferOutputStream

use of org.jboss.netty.buffer.ChannelBufferOutputStream in project jstorm by alibaba.

the class MessageBatch method buffer.

/**
 * create a buffer containing the encoding of this batch
 */
@Override
public ChannelBuffer buffer() throws Exception {
    ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(encodedLength));
    for (Object msg : msgs) if (msg instanceof TaskMessage)
        writeTaskMessage(bout, (TaskMessage) msg);
    else {
        // LOG.debug("Write one non-TaskMessage {}", msg );
        ((ControlMessage) msg).write(bout);
    }
    // add a END_OF_BATCH indicator
    ControlMessage.EOB_MESSAGE.write(bout);
    // LOG.debug("ControlMessage.EOB_MESSAGE " );
    bout.close();
    return bout.buffer();
}
Also used : ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) TaskMessage(backtype.storm.messaging.TaskMessage)

Example 12 with ChannelBufferOutputStream

use of org.jboss.netty.buffer.ChannelBufferOutputStream in project storm by nathanmarz.

the class MessageBatch method buffer.

/**
 * create a buffer containing the encoding of this batch
 */
ChannelBuffer buffer() throws Exception {
    ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(encoded_length));
    for (Object msg : msgs) if (msg instanceof TaskMessage)
        writeTaskMessage(bout, (TaskMessage) msg);
    else
        ((ControlMessage) msg).write(bout);
    // add a END_OF_BATCH indicator
    ControlMessage.EOB_MESSAGE.write(bout);
    bout.close();
    return bout.buffer();
}
Also used : ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) TaskMessage(backtype.storm.messaging.TaskMessage)

Example 13 with ChannelBufferOutputStream

use of org.jboss.netty.buffer.ChannelBufferOutputStream in project adbcj by mheath.

the class Handler method handleDownstream.

public void handleDownstream(ChannelHandlerContext context, ChannelEvent event) throws Exception {
    if (!(event instanceof MessageEvent)) {
        context.sendDownstream(event);
        return;
    }
    MessageEvent e = (MessageEvent) event;
    if (!(e.getMessage() instanceof ClientRequest)) {
        context.sendDownstream(event);
        return;
    }
    ChannelBuffer buffer = ChannelBuffers.buffer(1024);
    ChannelBufferOutputStream out = new ChannelBufferOutputStream(buffer);
    encoder.encode((ClientRequest) e.getMessage(), out);
    Channels.write(context, e.getFuture(), buffer);
}
Also used : ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) MessageEvent(org.jboss.netty.channel.MessageEvent) ClientRequest(org.adbcj.mysql.codec.ClientRequest) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer)

Example 14 with ChannelBufferOutputStream

use of org.jboss.netty.buffer.ChannelBufferOutputStream in project opentsdb by OpenTSDB.

the class HttpJsonSerializer method formatQueryAsyncV1.

/**
 * Format the results from a timeseries data query
 * @param data_query The TSQuery object used to fetch the results
 * @param results The data fetched from storage
 * @param globals An optional list of global annotation objects
 * @return A Deferred<ChannelBuffer> object to pass on to the caller
 * @throws IOException if serialization failed
 * @since 2.2
 */
public Deferred<ChannelBuffer> formatQueryAsyncV1(final TSQuery data_query, final List<DataPoints[]> results, final List<Annotation> globals) throws IOException {
    final long start = DateTime.currentTimeMillis();
    final boolean as_arrays = this.query.hasQueryStringParam("arrays");
    final String jsonp = this.query.getQueryStringParam("jsonp");
    // buffers and an array list to stored the deferreds
    final ChannelBuffer response = ChannelBuffers.dynamicBuffer();
    final OutputStream output = new ChannelBufferOutputStream(response);
    // too bad an inner class can't modify a primitive. This is a work around
    final List<Boolean> timeout_flag = new ArrayList<Boolean>(1);
    timeout_flag.add(false);
    // start with JSONp if we're told to
    if (jsonp != null && !jsonp.isEmpty()) {
        output.write((jsonp + "(").getBytes(query.getCharset()));
    }
    // start the JSON generator and write the opening array
    final JsonGenerator json = JSON.getFactory().createGenerator(output);
    json.writeStartArray();
    /**
     * Every individual data point set (the result of a query and possibly a
     * group by) will initiate an asynchronous metric/tag UID to name resolution
     * and then print to the buffer.
     * NOTE that because this is asynchronous, the order of results is
     * indeterminate.
     */
    class DPsResolver implements Callback<Deferred<Object>, Object> {

        /**
         * Has to be final to be shared with the nested classes
         */
        final StringBuilder metric = new StringBuilder(256);

        /**
         * Resolved tags
         */
        final Map<String, String> tags = new HashMap<String, String>();

        /**
         * Resolved aggregated tags
         */
        final List<String> agg_tags = new ArrayList<String>();

        /**
         * A list storing the metric and tag resolve calls
         */
        final List<Deferred<Object>> resolve_deferreds = new ArrayList<Deferred<Object>>();

        /**
         * The data points to serialize
         */
        final DataPoints dps;

        /**
         * Starting time in nanos when we sent the UID resolution queries off
         */
        long uid_start;

        public DPsResolver(final DataPoints dps) {
            this.dps = dps;
        }

        /**
         * Resolves the metric UID to a name
         */
        class MetricResolver implements Callback<Object, String> {

            public Object call(final String metric) throws Exception {
                DPsResolver.this.metric.append(metric);
                return null;
            }
        }

        /**
         * Resolves the tag UIDs to a key/value string set
         */
        class TagResolver implements Callback<Object, Map<String, String>> {

            public Object call(final Map<String, String> tags) throws Exception {
                DPsResolver.this.tags.putAll(tags);
                return null;
            }
        }

        /**
         * Resolves aggregated tags
         */
        class AggTagResolver implements Callback<Object, List<String>> {

            public Object call(final List<String> tags) throws Exception {
                DPsResolver.this.agg_tags.addAll(tags);
                return null;
            }
        }

        /**
         * After the metric and tags have been resolved, this will print the
         * results to the output buffer in the proper format.
         */
        class WriteToBuffer implements Callback<Object, ArrayList<Object>> {

            final DataPoints dps;

            /**
             * Default ctor that takes a data point set
             * @param dps Datapoints to print
             */
            public WriteToBuffer(final DataPoints dps) {
                this.dps = dps;
            }

            /**
             * Handles writing the data to the output buffer. The results of the
             * deferreds don't matter as they will be stored in the class final
             * variables.
             */
            public Object call(final ArrayList<Object> deferreds) throws Exception {
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.UID_TO_STRING_TIME, (DateTime.nanoTime() - uid_start));
                final long local_serialization_start = DateTime.nanoTime();
                final TSSubQuery orig_query = data_query.getQueries().get(dps.getQueryIndex());
                json.writeStartObject();
                json.writeStringField("metric", metric.toString());
                json.writeFieldName("tags");
                json.writeStartObject();
                if (dps.getTags() != null) {
                    for (Map.Entry<String, String> tag : tags.entrySet()) {
                        json.writeStringField(tag.getKey(), tag.getValue());
                    }
                }
                json.writeEndObject();
                json.writeFieldName("aggregateTags");
                json.writeStartArray();
                if (dps.getAggregatedTags() != null) {
                    for (String atag : agg_tags) {
                        json.writeString(atag);
                    }
                }
                json.writeEndArray();
                if (data_query.getShowQuery()) {
                    json.writeObjectField("query", orig_query);
                }
                if (data_query.getShowTSUIDs()) {
                    json.writeFieldName("tsuids");
                    json.writeStartArray();
                    final List<String> tsuids = dps.getTSUIDs();
                    Collections.sort(tsuids);
                    for (String tsuid : tsuids) {
                        json.writeString(tsuid);
                    }
                    json.writeEndArray();
                }
                if (!data_query.getNoAnnotations()) {
                    final List<Annotation> annotations = dps.getAnnotations();
                    if (annotations != null) {
                        Collections.sort(annotations);
                        json.writeArrayFieldStart("annotations");
                        for (Annotation note : annotations) {
                            long ts = note.getStartTime();
                            if (!((ts & Const.SECOND_MASK) != 0)) {
                                ts *= 1000;
                            }
                            if (ts < data_query.startTime() || ts > data_query.endTime()) {
                                continue;
                            }
                            json.writeObject(note);
                        }
                        json.writeEndArray();
                    }
                    if (globals != null && !globals.isEmpty()) {
                        Collections.sort(globals);
                        json.writeArrayFieldStart("globalAnnotations");
                        for (Annotation note : globals) {
                            long ts = note.getStartTime();
                            if (!((ts & Const.SECOND_MASK) != 0)) {
                                ts *= 1000;
                            }
                            if (ts < data_query.startTime() || ts > data_query.endTime()) {
                                continue;
                            }
                            json.writeObject(note);
                        }
                        json.writeEndArray();
                    }
                }
                // now the fun stuff, dump the data and time just the iteration over
                // the data points
                final long dps_start = DateTime.nanoTime();
                json.writeFieldName("dps");
                long counter = 0;
                // default is to write a map, otherwise write arrays
                if (!timeout_flag.get(0) && as_arrays) {
                    json.writeStartArray();
                    for (final DataPoint dp : dps) {
                        if (dp.timestamp() < data_query.startTime() || dp.timestamp() > data_query.endTime()) {
                            continue;
                        }
                        final long timestamp = data_query.getMsResolution() ? dp.timestamp() : dp.timestamp() / 1000;
                        json.writeStartArray();
                        json.writeNumber(timestamp);
                        if (dp.isInteger()) {
                            json.writeNumber(dp.longValue());
                        } else {
                            // Report missing intervals as null or NaN.
                            final double value = dp.doubleValue();
                            if (Double.isNaN(value) && orig_query.fillPolicy() == FillPolicy.NULL) {
                                json.writeNull();
                            } else {
                                json.writeNumber(dp.doubleValue());
                            }
                        }
                        json.writeEndArray();
                        ++counter;
                    }
                    json.writeEndArray();
                } else if (!timeout_flag.get(0)) {
                    json.writeStartObject();
                    for (final DataPoint dp : dps) {
                        if (dp.timestamp() < (data_query.startTime()) || dp.timestamp() > (data_query.endTime())) {
                            continue;
                        }
                        final long timestamp = data_query.getMsResolution() ? dp.timestamp() : dp.timestamp() / 1000;
                        if (dp.isInteger()) {
                            json.writeNumberField(Long.toString(timestamp), dp.longValue());
                        } else {
                            // Report missing intervals as null or NaN.
                            final double value = dp.doubleValue();
                            if (Double.isNaN(value) && orig_query.fillPolicy() == FillPolicy.NULL) {
                                json.writeNumberField(Long.toString(timestamp), null);
                            } else {
                                json.writeNumberField(Long.toString(timestamp), dp.doubleValue());
                            }
                        }
                        ++counter;
                    }
                    json.writeEndObject();
                } else {
                    // skipping data points all together due to timeout
                    json.writeStartObject();
                    json.writeEndObject();
                }
                final long agg_time = DateTime.nanoTime() - dps_start;
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.AGGREGATION_TIME, agg_time);
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.AGGREGATED_SIZE, counter);
                // yeah, it's a little early but we need to dump it out with the results.
                data_query.getQueryStats().addStat(dps.getQueryIndex(), QueryStat.SERIALIZATION_TIME, DateTime.nanoTime() - local_serialization_start);
                if (!timeout_flag.get(0) && data_query.getShowStats()) {
                    int query_index = (dps == null) ? -1 : dps.getQueryIndex();
                    QueryStats stats = data_query.getQueryStats();
                    if (query_index >= 0) {
                        json.writeFieldName("stats");
                        final Map<String, Object> s = stats.getQueryStats(query_index, false);
                        if (s != null) {
                            json.writeObject(s);
                        } else {
                            json.writeStringField("ERROR", "NO STATS FOUND");
                        }
                    }
                }
                // close the results for this particular query
                json.writeEndObject();
                return null;
            }
        }

        /**
         * When called, initiates a resolution of metric and tag UIDs to names,
         * then prints to the output buffer once they are completed.
         */
        public Deferred<Object> call(final Object obj) throws Exception {
            this.uid_start = DateTime.nanoTime();
            resolve_deferreds.add(dps.metricNameAsync().addCallback(new MetricResolver()));
            resolve_deferreds.add(dps.getTagsAsync().addCallback(new TagResolver()));
            resolve_deferreds.add(dps.getAggregatedTagsAsync().addCallback(new AggTagResolver()));
            return Deferred.group(resolve_deferreds).addCallback(new WriteToBuffer(dps));
        }
    }
    // We want the serializer to execute serially so we need to create a callback
    // chain so that when one DPsResolver is finished, it triggers the next to
    // start serializing.
    final Deferred<Object> cb_chain = new Deferred<Object>();
    for (DataPoints[] separate_dps : results) {
        for (DataPoints dps : separate_dps) {
            try {
                cb_chain.addCallback(new DPsResolver(dps));
            } catch (Exception e) {
                throw new RuntimeException("Unexpected error durring resolution", e);
            }
        }
    }
    /**
     * Final callback to close out the JSON array and return our results
     */
    class FinalCB implements Callback<ChannelBuffer, Object> {

        public ChannelBuffer call(final Object obj) throws Exception {
            // Call this here so we rollup sub metrics into a summary. It's not
            // completely accurate, of course, because we still have to write the
            // summary and close the writer. But it's close.
            data_query.getQueryStats().markSerializationSuccessful();
            // TODO - yeah, I've heard this sucks, we need to figure out a better way.
            if (data_query.getShowSummary()) {
                final QueryStats stats = data_query.getQueryStats();
                json.writeStartObject();
                json.writeFieldName("statsSummary");
                json.writeObject(stats.getStats(true, true));
                json.writeEndObject();
            }
            // IMPORTANT Make sure the close the JSON array and the generator
            json.writeEndArray();
            json.close();
            if (jsonp != null && !jsonp.isEmpty()) {
                output.write(")".getBytes());
            }
            return response;
        }
    }
    // trigger the callback chain here
    cb_chain.callback(null);
    return cb_chain.addCallback(new FinalCB());
}
Also used : ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) OutputStream(java.io.OutputStream) Deferred(com.stumbleupon.async.Deferred) ArrayList(java.util.ArrayList) DataPoints(net.opentsdb.core.DataPoints) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) DataPoint(net.opentsdb.core.DataPoint) RollUpDataPoint(net.opentsdb.rollup.RollUpDataPoint) IncomingDataPoint(net.opentsdb.core.IncomingDataPoint) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) ArrayList(java.util.ArrayList) List(java.util.List) ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream) TSSubQuery(net.opentsdb.core.TSSubQuery) QueryException(net.opentsdb.core.QueryException) IOException(java.io.IOException) Annotation(net.opentsdb.meta.Annotation) Callback(com.stumbleupon.async.Callback) QueryStats(net.opentsdb.stats.QueryStats) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 15 with ChannelBufferOutputStream

use of org.jboss.netty.buffer.ChannelBufferOutputStream in project jstorm by alibaba.

the class TaskMessage method buffer.

/**
 * create a buffer containing the encoding of this batch
 */
@Override
public ChannelBuffer buffer() throws Exception {
    int payloadLen = 0;
    if (_message != null)
        payloadLen = _message.length;
    int totalLen = 8 + payloadLen;
    ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(totalLen));
    bout.writeShort(_type);
    if (_task > Short.MAX_VALUE)
        throw new RuntimeException("Task ID should not exceed " + Short.MAX_VALUE);
    bout.writeShort((short) _task);
    bout.writeInt(payloadLen);
    if (payloadLen > 0)
        bout.write(_message);
    bout.close();
    return bout.buffer();
}
Also used : ChannelBufferOutputStream(org.jboss.netty.buffer.ChannelBufferOutputStream)

Aggregations

ChannelBufferOutputStream (org.jboss.netty.buffer.ChannelBufferOutputStream)19 ChannelBuffer (org.jboss.netty.buffer.ChannelBuffer)12 IOException (java.io.IOException)5 OutputStream (java.io.OutputStream)3 TaskMessage (backtype.storm.messaging.TaskMessage)2 JsonGenerator (com.fasterxml.jackson.core.JsonGenerator)2 Callback (com.stumbleupon.async.Callback)2 Deferred (com.stumbleupon.async.Deferred)2 PrintWriter (java.io.PrintWriter)2 ClosedChannelException (java.nio.channels.ClosedChannelException)2 ArrayList (java.util.ArrayList)2 TSSubQuery (net.opentsdb.core.TSSubQuery)2 MessageEvent (org.jboss.netty.channel.MessageEvent)2 DbusEventV1Factory (com.linkedin.databus.core.DbusEventV1Factory)1 StreamEventsArgs (com.linkedin.databus.core.StreamEventsArgs)1 HttpException (com.nabalive.framework.web.exception.HttpException)1 WritableByteChannel (java.nio.channels.WritableByteChannel)1 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1