Search in sources :

Example 1 with GridUnsafeDataInput

use of org.apache.ignite.internal.util.io.GridUnsafeDataInput in project ignite by apache.

the class HadoopConcurrentHashMultimapSelftest method check.

private void check(HadoopConcurrentHashMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis, HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);
    Map<Integer, Collection<Integer>> mmm = mm.asMap();
    int keys = 0;
    while (in.next()) {
        keys++;
        IntWritable k = (IntWritable) in.key();
        assertNotNull(k);
        Deque<Integer> vs = new LinkedList<>();
        Iterator<?> it = in.values();
        while (it.hasNext()) vs.addFirst(((IntWritable) it.next()).get());
        Collection<Integer> exp = mmm.get(k.get());
        assertEquals(exp, vs);
    }
    assertEquals(mmm.size(), keys);
    assertEquals(m.keys(), keys);
    X.println("keys: " + keys + " cap: " + m.capacity());
    // Check visitor.
    final byte[] buf = new byte[4];
    final GridDataInput dataInput = new GridUnsafeDataInput();
    m.visit(false, new HadoopConcurrentHashMultimap.Visitor() {

        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);
            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;
            GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);
            dataInput.bytes(buf, size);
            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });
    //        X.println("vis: " + vis);
    assertEquals(mm, vis);
    in.close();
}
Also used : HadoopTaskInput(org.apache.ignite.internal.processors.hadoop.HadoopTaskInput) Writable(org.apache.hadoop.io.Writable) IntWritable(org.apache.hadoop.io.IntWritable) IOException(java.io.IOException) LinkedList(java.util.LinkedList) GridUnsafeDataInput(org.apache.ignite.internal.util.io.GridUnsafeDataInput) Collection(java.util.Collection) GridDataInput(org.apache.ignite.internal.util.io.GridDataInput) HadoopConcurrentHashMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimap) IntWritable(org.apache.hadoop.io.IntWritable)

Example 2 with GridUnsafeDataInput

use of org.apache.ignite.internal.util.io.GridUnsafeDataInput in project ignite by apache.

the class HadoopSkipListSelfTest method check.

/**
     * Check.
     * @param m The multimap.
     * @param mm The multimap storing expectations.
     * @param vis The multimap to store visitor results.
     * @param taskCtx The task context.
     * @throws Exception On error.
     */
private void check(HadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis, HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);
    Map<Integer, Collection<Integer>> mmm = mm.asMap();
    int keys = 0;
    int prevKey = Integer.MIN_VALUE;
    while (in.next()) {
        keys++;
        IntWritable k = (IntWritable) in.key();
        assertNotNull(k);
        assertTrue(k.get() > prevKey);
        prevKey = k.get();
        Deque<Integer> vs = new LinkedList<>();
        Iterator<?> it = in.values();
        while (it.hasNext()) vs.addFirst(((IntWritable) it.next()).get());
        Collection<Integer> exp = mmm.get(k.get());
        assertEquals(exp, vs);
    }
    assertEquals(mmm.size(), keys);
    //!        assertEquals(m.keys(), keys);
    // Check visitor.
    final byte[] buf = new byte[4];
    final GridDataInput dataInput = new GridUnsafeDataInput();
    m.visit(false, new HadoopMultimap.Visitor() {

        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);
            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;
            GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);
            dataInput.bytes(buf, size);
            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });
    //        X.println("vis: " + vis);
    assertEquals(mm, vis);
    in.close();
}
Also used : HadoopTaskInput(org.apache.ignite.internal.processors.hadoop.HadoopTaskInput) HadoopMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap) Writable(org.apache.hadoop.io.Writable) IntWritable(org.apache.hadoop.io.IntWritable) IOException(java.io.IOException) LinkedList(java.util.LinkedList) GridUnsafeDataInput(org.apache.ignite.internal.util.io.GridUnsafeDataInput) Collection(java.util.Collection) GridDataInput(org.apache.ignite.internal.util.io.GridDataInput) IntWritable(org.apache.hadoop.io.IntWritable)

Example 3 with GridUnsafeDataInput

use of org.apache.ignite.internal.util.io.GridUnsafeDataInput in project ignite by apache.

the class OptimizedObjectStreamSelfTest method testReadLine.

/**
     * @throws Exception If failed.
     */
public void testReadLine() throws Exception {
    OptimizedObjectInputStream in = new OptimizedObjectInputStream(new GridUnsafeDataInput());
    byte[] bytes = "line1\nline2\r\nli\rne3\nline4".getBytes();
    in.in().bytes(bytes, bytes.length);
    assertEquals("line1", in.readLine());
    assertEquals("line2", in.readLine());
    assertEquals("line3", in.readLine());
    assertEquals("line4", in.readLine());
}
Also used : OptimizedObjectInputStream(org.apache.ignite.internal.marshaller.optimized.OptimizedObjectInputStream) GridUnsafeDataInput(org.apache.ignite.internal.util.io.GridUnsafeDataInput)

Example 4 with GridUnsafeDataInput

use of org.apache.ignite.internal.util.io.GridUnsafeDataInput in project ignite by apache.

the class HadoopShuffleJob method onShuffleMessage.

/**
     * @param src Source.
     * @param msg Message.
     * @throws IgniteCheckedException Exception.
     */
public void onShuffleMessage(T src, HadoopShuffleMessage msg) throws IgniteCheckedException {
    assert msg.buffer() != null;
    assert msg.offset() > 0;
    HadoopTaskContext taskCtx = locReducersCtx.get(msg.reducer()).get();
    HadoopPerformanceCounter perfCntr = HadoopPerformanceCounter.getCounter(taskCtx.counters(), null);
    perfCntr.onShuffleMessage(msg.reducer(), U.currentTimeMillis());
    HadoopMultimap map = getOrCreateMap(locMaps, msg.reducer());
    // Add data from message to the map.
    try (HadoopMultimap.Adder adder = map.startAdding(taskCtx)) {
        final GridUnsafeDataInput dataInput = new GridUnsafeDataInput();
        final UnsafeValue val = new UnsafeValue(msg.buffer());
        msg.visit(new HadoopShuffleMessage.Visitor() {

            /** */
            private HadoopMultimap.Key key;

            @Override
            public void onKey(byte[] buf, int off, int len) throws IgniteCheckedException {
                dataInput.bytes(buf, off, off + len);
                key = adder.addKey(dataInput, key);
            }

            @Override
            public void onValue(byte[] buf, int off, int len) {
                val.off = off;
                val.size = len;
                key.add(val);
            }
        });
    }
    if (embedded) {
        // No immediate response.
        if (localShuffleState(src).onShuffleMessage())
            sendFinishResponse(src, msg.jobId());
    } else
        // Response for every message.
        io.apply(src, new HadoopShuffleAck(msg.id(), msg.jobId()));
}
Also used : HadoopMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap) HadoopPerformanceCounter(org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter) GridUnsafeDataInput(org.apache.ignite.internal.util.io.GridUnsafeDataInput) IgniteCheckedException(org.apache.ignite.IgniteCheckedException) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext)

Example 5 with GridUnsafeDataInput

use of org.apache.ignite.internal.util.io.GridUnsafeDataInput in project ignite by apache.

the class GridByteArrayList method inputStream.

/**
     * @return Input stream based on this byte array list.
     */
public InputStream inputStream() {
    GridUnsafeDataInput in = new GridUnsafeDataInput();
    in.bytes(data, size);
    return in;
}
Also used : GridUnsafeDataInput(org.apache.ignite.internal.util.io.GridUnsafeDataInput)

Aggregations

GridUnsafeDataInput (org.apache.ignite.internal.util.io.GridUnsafeDataInput)5 IOException (java.io.IOException)2 Collection (java.util.Collection)2 LinkedList (java.util.LinkedList)2 IntWritable (org.apache.hadoop.io.IntWritable)2 Writable (org.apache.hadoop.io.Writable)2 HadoopTaskInput (org.apache.ignite.internal.processors.hadoop.HadoopTaskInput)2 HadoopMultimap (org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap)2 GridDataInput (org.apache.ignite.internal.util.io.GridDataInput)2 IgniteCheckedException (org.apache.ignite.IgniteCheckedException)1 OptimizedObjectInputStream (org.apache.ignite.internal.marshaller.optimized.OptimizedObjectInputStream)1 HadoopTaskContext (org.apache.ignite.internal.processors.hadoop.HadoopTaskContext)1 HadoopPerformanceCounter (org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter)1 HadoopConcurrentHashMultimap (org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimap)1