Search in sources :

Example 1 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-core by apache.

the class BufferServerSubscriberTest method testEmergencySinks.

@Test
public void testEmergencySinks() throws InterruptedException {
    final List<Object> list = new ArrayList<>();
    final StreamCodec<Object> myserde = new StreamCodec<Object>() {

        @Override
        public Object fromByteArray(Slice fragment) {
            if (fragment.offset == 0 && fragment.length == fragment.buffer.length) {
                return fragment.buffer;
            } else {
                return Arrays.copyOfRange(fragment.buffer, fragment.offset, fragment.offset + fragment.length);
            }
        }

        @Override
        public Slice toByteArray(Object o) {
            return new Slice((byte[]) o, 0, ((byte[]) o).length);
        }

        @Override
        public int getPartition(Object o) {
            return 0;
        }
    };
    Sink<Object> unbufferedSink = new Sink<Object>() {

        @Override
        public void put(Object tuple) {
            list.add(tuple);
        }

        @Override
        public int getCount(boolean reset) {
            return 0;
        }
    };
    BufferServerSubscriber bss = new BufferServerSubscriber("subscriber", 5) {

        {
            serde = myserde;
        }

        @Override
        public void suspendRead() {
            logger.debug("read suspended");
        }

        @Override
        public void resumeRead() {
            logger.debug("read resumed");
        }
    };
    SweepableReservoir reservoir = bss.acquireReservoir("unbufferedSink", 3);
    reservoir.setSink(unbufferedSink);
    int i = 0;
    while (i++ < 10) {
        Slice fragment = myserde.toByteArray(new byte[] { (byte) i });
        byte[] buffer = PayloadTuple.getSerializedTuple(myserde.getPartition(i), fragment);
        bss.onMessage(buffer, 0, buffer.length);
    }
    reservoir.sweep();
    /* 4 make it to the reservoir */
    reservoir.sweep();
    /* we consume the 4; and 4 more make it to the reservoir */
    Assert.assertEquals("4 received", 4, list.size());
    reservoir.sweep();
    /* 8 consumed + 2 more make it to the reservoir */
    reservoir.sweep();
    /* consume 2 more */
    Assert.assertEquals("10  received", 10, list.size());
}
Also used : SweepableReservoir(com.datatorrent.stram.engine.SweepableReservoir) Sink(com.datatorrent.api.Sink) Slice(com.datatorrent.netlet.util.Slice) ArrayList(java.util.ArrayList) StreamCodec(com.datatorrent.api.StreamCodec) Test(org.junit.Test)

Example 2 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-core by apache.

the class BufferServerSubscriber method onMessage.

@Override
public void onMessage(byte[] buffer, int offset, int length) {
    Slice f;
    if (freeFragments.isEmpty()) {
        f = new Slice(buffer, offset, length);
    } else {
        f = freeFragments.pollUnsafe();
        f.buffer = buffer;
        f.offset = offset;
        f.length = length;
    }
    if (!offeredFragments.offer(f)) {
        synchronized (backlog) {
            if (!suspended) {
                suspendRead();
                suspended = true;
            }
            int newsize = offeredFragments.capacity() == MAX_SENDBUFFER_SIZE ? offeredFragments.capacity() : offeredFragments.capacity() << 1;
            backlog.add(offeredFragments = new CircularBuffer<>(newsize));
            offeredFragments.add(f);
        }
    }
}
Also used : CircularBuffer(com.datatorrent.netlet.util.CircularBuffer) Slice(com.datatorrent.netlet.util.Slice)

Example 3 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-core by apache.

the class DefaultStatefulStreamCodec method toDataStatePair.

@Override
public DataStatePair toDataStatePair(T o) {
    data.setPosition(0);
    writeClassAndObject(data, o);
    if (!pairs.isEmpty()) {
        state.setPosition(0);
        for (ClassIdPair cip : pairs) {
            writeClassAndObject(state, cip);
        }
        pairs.clear();
        dataStatePair.state = new Slice(state.getBuffer(), 0, state.position());
    } else {
        dataStatePair.state = null;
    }
    dataStatePair.data = new Slice(data.getBuffer(), 0, data.position());
    return dataStatePair;
}
Also used : Slice(com.datatorrent.netlet.util.Slice)

Example 4 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-core by apache.

the class FSEventRecorder method writeEvent.

public void writeEvent(StramEvent event) throws Exception {
    LOG.debug("Writing event {} to the storage", event.getType());
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    bos.write((event.getTimestamp() + ":").getBytes());
    bos.write((event.getType() + ":").getBytes());
    @SuppressWarnings("unchecked") Map<String, String> data = BeanUtils.describe(event);
    data.remove("timestamp");
    data.remove("class");
    data.remove("type");
    Slice f = streamCodec.toByteArray(data);
    bos.write(f.buffer, f.offset, f.length);
    bos.write("\n".getBytes());
    storage.writeDataItem(bos.toByteArray(), true);
    if (numSubscribers > 0) {
        LOG.debug("Publishing event {} through websocket to gateway", event.getType());
        EventsAgent.EventInfo eventInfo = new EventsAgent.EventInfo();
        eventInfo.id = event.getId();
        eventInfo.timestamp = event.getTimestamp();
        eventInfo.type = event.getType();
        eventInfo.data = data;
        eventInfo.data.remove("id");
        wsClient.publish(pubSubTopic, eventInfo);
    }
}
Also used : Slice(com.datatorrent.netlet.util.Slice) ByteArrayOutputStream(java.io.ByteArrayOutputStream) EventsAgent(com.datatorrent.stram.client.EventsAgent)

Example 5 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-core by apache.

the class FSStatsRecorder method recordContainers.

@Override
public void recordContainers(Map<String, StreamingContainerAgent> containerMap, long timestamp) throws IOException {
    for (Map.Entry<String, StreamingContainerAgent> entry : containerMap.entrySet()) {
        StreamingContainerAgent sca = entry.getValue();
        ContainerInfo containerInfo = sca.getContainerInfo();
        if (!containerInfo.state.equals("ACTIVE")) {
            continue;
        }
        int containerIndex;
        if (!knownContainers.containsKey(entry.getKey())) {
            containerIndex = knownContainers.size();
            knownContainers.put(entry.getKey(), containerIndex);
            Map<String, Object> fieldMap = extractRecordFields(containerInfo, "meta");
            ByteArrayOutputStream bos = new ByteArrayOutputStream();
            Slice f = streamCodec.toByteArray(fieldMap);
            bos.write((String.valueOf(containerIndex) + ":").getBytes());
            bos.write(f.buffer, f.offset, f.length);
            bos.write("\n".getBytes());
            queue.add(new WriteOperation(containersStorage, bos.toByteArray(), true));
        } else {
            containerIndex = knownContainers.get(entry.getKey());
        }
        Map<String, Object> fieldMap = extractRecordFields(containerInfo, "stats");
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        Slice f = streamCodec.toByteArray(fieldMap);
        bos.write((String.valueOf(containerIndex) + ":").getBytes());
        bos.write((String.valueOf(timestamp) + ":").getBytes());
        bos.write(f.buffer, f.offset, f.length);
        bos.write("\n".getBytes());
        queue.add(new WriteOperation(containersStorage, bos.toByteArray(), false));
    }
}
Also used : Slice(com.datatorrent.netlet.util.Slice) ContainerInfo(com.datatorrent.stram.webapp.ContainerInfo) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

Slice (com.datatorrent.netlet.util.Slice)114 Test (org.junit.Test)65 ByteArrayOutputStream (java.io.ByteArrayOutputStream)10 Input (com.esotericsoftware.kryo.io.Input)9 IOException (java.io.IOException)6 Map (java.util.Map)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 BufferSlice (org.apache.apex.malhar.lib.utils.serde.BufferSlice)4 Path (org.apache.hadoop.fs.Path)4 ObjectMapperString (com.datatorrent.common.util.ObjectMapperString)3 ExecutionException (java.util.concurrent.ExecutionException)3 SerializationBuffer (org.apache.apex.malhar.lib.utils.serde.SerializationBuffer)3 StringSerde (org.apache.apex.malhar.lib.utils.serde.StringSerde)3 Attribute (com.datatorrent.api.Attribute)2 OperatorContext (com.datatorrent.api.Context.OperatorContext)2 Output (com.esotericsoftware.kryo.io.Output)2 RandomAccessFile (java.io.RandomAccessFile)2 Serializable (java.io.Serializable)2