Search in sources :

Example 1 with StreamEventData

use of co.cask.cdap.api.stream.StreamEventData in project cdap by caskdata.

the class StreamEventDataCodec method decode.

/**
   * Decodes from the given {@link Decoder} to reconstruct a {@link StreamEventData}.
   * The set of headers provided is used as the default set of headers.
   *
   * @param decoder the decoder to read data from
   * @param defaultHeaders A map of headers available by default.
   * @return A new instance of {@link co.cask.cdap.api.stream.StreamEventData}.
   * @throws IOException If there is any IO error during decoding.
   */
public static StreamEventData decode(Decoder decoder, Map<String, String> defaultHeaders) throws IOException {
    // Reads the body
    ByteBuffer body = decoder.readBytes();
    // Reads the headers
    int len = decoder.readInt();
    // A special optimization for the case where there is no event header.
    if (len == 0) {
        return new StreamEventData(defaultHeaders, body);
    }
    Map<String, String> headers = defaultHeaders.isEmpty() ? Maps.<String, String>newHashMap() : Maps.newHashMap(defaultHeaders);
    do {
        for (int i = 0; i < len; i++) {
            String key = decoder.readString();
            String value = decoder.readInt() == 0 ? decoder.readString() : (String) decoder.readNull();
            headers.put(key, value);
        }
        len = decoder.readInt();
    } while (len != 0);
    return new StreamEventData(headers, body);
}
Also used : StreamEventData(co.cask.cdap.api.stream.StreamEventData) ByteBuffer(java.nio.ByteBuffer)

Example 2 with StreamEventData

use of co.cask.cdap.api.stream.StreamEventData in project cdap by caskdata.

the class ConcurrentStreamWriterTestBase method createWriterTask.

private Runnable createWriterTask(final StreamId streamId, final ConcurrentStreamWriter streamWriter, final int threadId, final int msgCount, final int batchSize, final CountDownLatch startLatch, final CountDownLatch completion) {
    return new Runnable() {

        @Override
        public void run() {
            try {
                startLatch.await();
                if (batchSize == 1) {
                    // Write events one by one
                    for (int j = 0; j < msgCount; j++) {
                        ByteBuffer body = Charsets.UTF_8.encode("Message " + j + " from " + threadId);
                        streamWriter.enqueue(streamId, ImmutableMap.<String, String>of(), body);
                    }
                } else {
                    // Writes event in batch of the given batch size
                    final AtomicInteger written = new AtomicInteger(0);
                    final MutableStreamEventData data = new MutableStreamEventData();
                    while (written.get() < msgCount) {
                        streamWriter.enqueue(streamId, new AbstractIterator<StreamEventData>() {

                            int count = 0;

                            @Override
                            protected StreamEventData computeNext() {
                                // Keep returning message until returned "batchSize" messages
                                if (written.get() >= msgCount || count == batchSize) {
                                    return endOfData();
                                }
                                ByteBuffer body = Charsets.UTF_8.encode("Message " + written.get() + " from " + threadId);
                                count++;
                                written.incrementAndGet();
                                return data.setBody(body);
                            }
                        });
                    }
                }
            } catch (Exception e) {
                LOG.error("Failed to write", e);
            } finally {
                completion.countDown();
            }
        }
    };
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StreamEventData(co.cask.cdap.api.stream.StreamEventData) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException)

Example 3 with StreamEventData

use of co.cask.cdap.api.stream.StreamEventData in project cdap by caskdata.

the class StreamEventCodec method decodePayload.

/**
   * Reverse of {@link #encodePayload(StreamEvent)}.
   *
   * @param payload The byte array containing the queue payload.
   * @return A {@link StreamEvent} reconstructed from payload.
   */
public StreamEvent decodePayload(byte[] payload) {
    ByteBuffer buffer = ByteBuffer.wrap(payload);
    SchemaHash schemaHash = new SchemaHash(buffer);
    Preconditions.checkArgument(schemaHash.equals(STREAM_EVENT_SCHEMA.getSchemaHash()), "Schema from payload not matching StreamEvent schema.");
    Decoder decoder = new BinaryDecoder(new ByteBufferInputStream(buffer));
    try {
        StreamEventData data = StreamEventDataCodec.decode(decoder);
        // Read the timestamp
        long timestamp = decoder.readLong();
        return new StreamEvent(data, timestamp);
    } catch (IOException e) {
        // It should never happens, otherwise something very wrong.
        throw Throwables.propagate(e);
    }
}
Also used : SchemaHash(co.cask.cdap.api.data.schema.SchemaHash) StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent) ByteBufferInputStream(co.cask.common.io.ByteBufferInputStream) IOException(java.io.IOException) BinaryDecoder(co.cask.cdap.common.io.BinaryDecoder) Decoder(co.cask.cdap.common.io.Decoder) StreamEventData(co.cask.cdap.api.stream.StreamEventData) ByteBuffer(java.nio.ByteBuffer) BinaryDecoder(co.cask.cdap.common.io.BinaryDecoder)

Aggregations

StreamEventData (co.cask.cdap.api.stream.StreamEventData)3 ByteBuffer (java.nio.ByteBuffer)3 IOException (java.io.IOException)2 SchemaHash (co.cask.cdap.api.data.schema.SchemaHash)1 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)1 BinaryDecoder (co.cask.cdap.common.io.BinaryDecoder)1 Decoder (co.cask.cdap.common.io.Decoder)1 ByteBufferInputStream (co.cask.common.io.ByteBufferInputStream)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1