Search in sources :

Example 1 with HadoopJobInfo

use of org.apache.ignite.internal.processors.hadoop.HadoopJobInfo in project ignite by apache.

the class HadoopConcurrentHashMultimapSelftest method testMapSimple.

/**
 */
public void testMapSimple() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);
    // mem.listen(new GridOffHeapEventListener() {
    // @Override public void onEvent(GridOffHeapEvent evt) {
    // if (evt == GridOffHeapEvent.ALLOCATE)
    // U.dumpStack();
    // }
    // });
    Random rnd = new Random();
    int mapSize = 16 << rnd.nextInt(3);
    HadoopJobInfo job = new JobInfo();
    HadoopTaskContext taskCtx = new TaskContext();
    HadoopConcurrentHashMultimap m = new HadoopConcurrentHashMultimap(job, mem, mapSize);
    HadoopConcurrentHashMultimap.Adder a = m.startAdding(taskCtx);
    Multimap<Integer, Integer> mm = ArrayListMultimap.create();
    Multimap<Integer, Integer> vis = ArrayListMultimap.create();
    for (int i = 0, vals = 4 * mapSize + rnd.nextInt(25); i < vals; i++) {
        int key = rnd.nextInt(mapSize);
        int val = rnd.nextInt();
        a.write(new IntWritable(key), new IntWritable(val));
        mm.put(key, val);
        X.println("k: " + key + " v: " + val);
        a.close();
        check(m, mm, vis, taskCtx);
        a = m.startAdding(taskCtx);
    }
    // a.add(new IntWritable(10), new IntWritable(2));
    // mm.put(10, 2);
    // check(m, mm);
    a.close();
    X.println("Alloc: " + mem.allocatedSize());
    m.close();
    assertEquals(0, mem.allocatedSize());
}
Also used : HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) Random(java.util.Random) GridRandom(org.apache.ignite.internal.util.GridRandom) HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) HadoopConcurrentHashMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimap) GridUnsafeMemory(org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory) IntWritable(org.apache.hadoop.io.IntWritable)

Example 2 with HadoopJobInfo

use of org.apache.ignite.internal.processors.hadoop.HadoopJobInfo in project ignite by apache.

the class HadoopConcurrentHashMultimapSelftest method testMultiThreaded.

/**
 * @throws Exception if failed.
 */
public void testMultiThreaded() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);
    X.println("___ Started");
    Random rnd = new GridRandom();
    for (int i = 0; i < 20; i++) {
        HadoopJobInfo job = new JobInfo();
        final HadoopTaskContext taskCtx = new TaskContext();
        final HadoopConcurrentHashMultimap m = new HadoopConcurrentHashMultimap(job, mem, 16);
        final ConcurrentMap<Integer, Collection<Integer>> mm = new ConcurrentHashMap<>();
        X.println("___ MT");
        multithreaded(new Callable<Object>() {

            @Override
            public Object call() throws Exception {
                X.println("___ TH in");
                Random rnd = new GridRandom();
                IntWritable key = new IntWritable();
                IntWritable val = new IntWritable();
                HadoopMultimap.Adder a = m.startAdding(taskCtx);
                for (int i = 0; i < 50000; i++) {
                    int k = rnd.nextInt(32000);
                    int v = rnd.nextInt();
                    key.set(k);
                    val.set(v);
                    a.write(key, val);
                    Collection<Integer> list = mm.get(k);
                    if (list == null) {
                        list = new ConcurrentLinkedQueue<>();
                        Collection<Integer> old = mm.putIfAbsent(k, list);
                        if (old != null)
                            list = old;
                    }
                    list.add(v);
                }
                a.close();
                X.println("___ TH out");
                return null;
            }
        }, 3 + rnd.nextInt(27));
        X.println("___ Check: " + m.capacity());
        assertEquals(mm.size(), m.keys());
        assertTrue(m.capacity() > 32000);
        HadoopTaskInput in = m.input(taskCtx);
        while (in.next()) {
            IntWritable key = (IntWritable) in.key();
            Iterator<?> valsIter = in.values();
            Collection<Integer> vals = mm.remove(key.get());
            assertNotNull(vals);
            while (valsIter.hasNext()) {
                IntWritable val = (IntWritable) valsIter.next();
                assertTrue(vals.remove(val.get()));
            }
            assertTrue(vals.isEmpty());
        }
        in.close();
        m.close();
        assertEquals(0, mem.allocatedSize());
    }
}
Also used : HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopTaskInput(org.apache.ignite.internal.processors.hadoop.HadoopTaskInput) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) IOException(java.io.IOException) GridRandom(org.apache.ignite.internal.util.GridRandom) Random(java.util.Random) GridRandom(org.apache.ignite.internal.util.GridRandom) HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) Collection(java.util.Collection) HadoopConcurrentHashMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimap) GridUnsafeMemory(org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) IntWritable(org.apache.hadoop.io.IntWritable)

Example 3 with HadoopJobInfo

use of org.apache.ignite.internal.processors.hadoop.HadoopJobInfo in project ignite by apache.

the class HadoopSkipListSelfTest method testMapSimple.

/**
 * @throws Exception On error.
 */
public void testMapSimple() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);
    // mem.listen(new GridOffHeapEventListener() {
    // @Override public void onEvent(GridOffHeapEvent evt) {
    // if (evt == GridOffHeapEvent.ALLOCATE)
    // U.dumpStack();
    // }
    // });
    Random rnd = new Random();
    int mapSize = 16 << rnd.nextInt(6);
    HadoopJobInfo job = new JobInfo();
    HadoopTaskContext taskCtx = new TaskContext();
    HadoopMultimap m = new HadoopSkipList(job, mem);
    HadoopMultimap.Adder a = m.startAdding(taskCtx);
    Multimap<Integer, Integer> mm = ArrayListMultimap.create();
    Multimap<Integer, Integer> vis = ArrayListMultimap.create();
    for (int i = 0, vals = 4 * mapSize + rnd.nextInt(25); i < vals; i++) {
        int key = rnd.nextInt(mapSize);
        int val = rnd.nextInt();
        a.write(new IntWritable(key), new IntWritable(val));
        mm.put(key, val);
        X.println("k: " + key + " v: " + val);
        a.close();
        check(m, mm, vis, taskCtx);
        a = m.startAdding(taskCtx);
    }
    // a.add(new IntWritable(10), new IntWritable(2));
    // mm.put(10, 2);
    // check(m, mm);
    a.close();
    X.println("Alloc: " + mem.allocatedSize());
    m.close();
    assertEquals(0, mem.allocatedSize());
}
Also used : HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) HadoopMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap) Random(java.util.Random) GridRandom(org.apache.ignite.internal.util.GridRandom) HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopSkipList(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipList) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) GridUnsafeMemory(org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory) IntWritable(org.apache.hadoop.io.IntWritable)

Example 4 with HadoopJobInfo

use of org.apache.ignite.internal.processors.hadoop.HadoopJobInfo in project ignite by apache.

the class HadoopSkipListSelfTest method testMultiThreaded.

/**
 * @throws Exception if failed.
 */
public void testMultiThreaded() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);
    X.println("___ Started");
    Random rnd = new GridRandom();
    for (int i = 0; i < 20; i++) {
        HadoopJobInfo job = new JobInfo();
        final HadoopTaskContext taskCtx = new TaskContext();
        final HadoopMultimap m = new HadoopSkipList(job, mem);
        final ConcurrentMap<Integer, Collection<Integer>> mm = new ConcurrentHashMap<>();
        X.println("___ MT");
        multithreaded(new Callable<Object>() {

            @Override
            public Object call() throws Exception {
                X.println("___ TH in");
                Random rnd = new GridRandom();
                IntWritable key = new IntWritable();
                IntWritable val = new IntWritable();
                HadoopMultimap.Adder a = m.startAdding(taskCtx);
                for (int i = 0; i < 50000; i++) {
                    int k = rnd.nextInt(32000);
                    int v = rnd.nextInt();
                    key.set(k);
                    val.set(v);
                    a.write(key, val);
                    Collection<Integer> list = mm.get(k);
                    if (list == null) {
                        list = new ConcurrentLinkedQueue<>();
                        Collection<Integer> old = mm.putIfAbsent(k, list);
                        if (old != null)
                            list = old;
                    }
                    list.add(v);
                }
                a.close();
                X.println("___ TH out");
                return null;
            }
        }, 3 + rnd.nextInt(27));
        HadoopTaskInput in = m.input(taskCtx);
        int prevKey = Integer.MIN_VALUE;
        while (in.next()) {
            IntWritable key = (IntWritable) in.key();
            assertTrue(key.get() > prevKey);
            prevKey = key.get();
            Iterator<?> valsIter = in.values();
            Collection<Integer> vals = mm.remove(key.get());
            assertNotNull(vals);
            while (valsIter.hasNext()) {
                IntWritable val = (IntWritable) valsIter.next();
                assertTrue(vals.remove(val.get()));
            }
            assertTrue(vals.isEmpty());
        }
        in.close();
        m.close();
        assertEquals(0, mem.allocatedSize());
    }
}
Also used : HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopTaskInput(org.apache.ignite.internal.processors.hadoop.HadoopTaskInput) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) HadoopMultimap(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap) IOException(java.io.IOException) GridRandom(org.apache.ignite.internal.util.GridRandom) Random(java.util.Random) GridRandom(org.apache.ignite.internal.util.GridRandom) HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) HadoopSkipList(org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipList) HadoopTaskContext(org.apache.ignite.internal.processors.hadoop.HadoopTaskContext) Collection(java.util.Collection) GridUnsafeMemory(org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) IntWritable(org.apache.hadoop.io.IntWritable)

Example 5 with HadoopJobInfo

use of org.apache.ignite.internal.processors.hadoop.HadoopJobInfo in project ignite by apache.

the class HadoopFileSystemCounterWriterDelegateImpl method write.

/**
 * {@inheritDoc}
 */
public void write(HadoopJobEx job, HadoopCounters cntrs) throws IgniteCheckedException {
    Configuration hadoopCfg = HadoopUtils.safeCreateConfiguration();
    final HadoopJobInfo jobInfo = job.info();
    final HadoopJobId jobId = job.id();
    for (Map.Entry<String, String> e : ((HadoopDefaultJobInfo) jobInfo).properties().entrySet()) hadoopCfg.set(e.getKey(), e.getValue());
    String user = jobInfo.user();
    user = IgfsUtils.fixUserName(user);
    String dir = jobInfo.property(IgniteHadoopFileSystemCounterWriter.COUNTER_WRITER_DIR_PROPERTY);
    if (dir == null)
        dir = DEFAULT_COUNTER_WRITER_DIR;
    Path jobStatPath = new Path(new Path(dir.replace(USER_MACRO, user)), jobId.toString());
    HadoopPerformanceCounter perfCntr = HadoopPerformanceCounter.getCounter(cntrs, null);
    try {
        hadoopCfg.set(MRJobConfig.USER_NAME, user);
        FileSystem fs = ((HadoopV2Job) job).fileSystem(jobStatPath.toUri(), hadoopCfg);
        fs.mkdirs(jobStatPath);
        try (PrintStream out = new PrintStream(fs.create(new Path(jobStatPath, IgniteHadoopFileSystemCounterWriter.PERFORMANCE_COUNTER_FILE_NAME)))) {
            for (T2<String, Long> evt : perfCntr.evts()) {
                out.print(evt.get1());
                out.print(':');
                out.println(evt.get2().toString());
            }
            out.flush();
        }
    } catch (IOException e) {
        throw new IgniteCheckedException(e);
    }
}
Also used : HadoopJobInfo(org.apache.ignite.internal.processors.hadoop.HadoopJobInfo) Path(org.apache.hadoop.fs.Path) PrintStream(java.io.PrintStream) Configuration(org.apache.hadoop.conf.Configuration) HadoopPerformanceCounter(org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter) IOException(java.io.IOException) HadoopJobId(org.apache.ignite.internal.processors.hadoop.HadoopJobId) IgniteCheckedException(org.apache.ignite.IgniteCheckedException) HadoopV2Job(org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job) FileSystem(org.apache.hadoop.fs.FileSystem) Map(java.util.Map)

Aggregations

HadoopJobInfo (org.apache.ignite.internal.processors.hadoop.HadoopJobInfo)6 Random (java.util.Random)4 IntWritable (org.apache.hadoop.io.IntWritable)4 HadoopTaskContext (org.apache.ignite.internal.processors.hadoop.HadoopTaskContext)4 GridRandom (org.apache.ignite.internal.util.GridRandom)4 GridUnsafeMemory (org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory)4 IOException (java.io.IOException)3 Collection (java.util.Collection)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)2 IgniteCheckedException (org.apache.ignite.IgniteCheckedException)2 HadoopTaskInput (org.apache.ignite.internal.processors.hadoop.HadoopTaskInput)2 HadoopConcurrentHashMultimap (org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimap)2 HadoopMultimap (org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap)2 HadoopSkipList (org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipList)2 PrintStream (java.io.PrintStream)1 Map (java.util.Map)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1