Search in sources :

Example 1 with Pair

use of com.tdunning.plume.Pair in project Plume by tdunning.

the class LogParseTest method parseGroupSort.

@Test
public void parseGroupSort() throws IOException {
    Plume p = new LocalPlume();
    PCollection<String> logs = p.readResourceFile("log.txt");
    PTable<String, Event> events = logs.map(new DoFn<String, Pair<String, Event>>() {

        @Override
        public void process(String logLine, EmitFn<Pair<String, Event>> emitter) {
            Event e = new Event(logLine);
            emitter.emit(new Pair<String, Event>(e.getName(), e));
        }
    }, Plume.tableOf(strings(), strings()));
    PTable<String, Iterable<Event>> byName = events.groupByKey(new Ordering<Event>() {
    });
}
Also used : Plume(com.tdunning.plume.Plume) LocalPlume(com.tdunning.plume.local.eager.LocalPlume) LocalPlume(com.tdunning.plume.local.eager.LocalPlume) Pair(com.tdunning.plume.Pair) Test(org.junit.Test)

Example 2 with Pair

use of com.tdunning.plume.Pair in project Plume by tdunning.

the class LocalExecutor method execute.

/**
   * Execute one-output flow
   * 
   * @param <T>
   * @param output
   * @return
   */
@SuppressWarnings({ "unchecked", "rawtypes" })
public <T> Iterable<T> execute(LazyCollection<T> output) {
    if (output.isMaterialized()) {
        // nothing else to execute
        return output.getData();
    } else {
        DeferredOp op = output.getDeferredOp();
        final List<T> result = Lists.newArrayList();
        // Flatten op
        if (op instanceof Flatten) {
            Flatten<T> flatten = (Flatten<T>) op;
            for (PCollection<T> col : flatten.getOrigins()) {
                Iterable<T> res = execute((LazyCollection<T>) col);
                result.addAll(Lists.newArrayList(res));
            }
            // done with it
            return result;
        }
        Iterable parent;
        EmitFn<T> emitter = new EmitFn<T>() {

            @Override
            public void emit(T v) {
                result.add(v);
            }
        };
        // ParallelDo
        if (op instanceof ParallelDo) {
            ParallelDo pDo = (ParallelDo) op;
            parent = execute((LazyCollection) pDo.getOrigin());
            for (Object obj : parent) {
                pDo.getFunction().process(obj, emitter);
            }
        // MultipleParallelDo -> parallel operations that read the same collection
        // In this version of executor, we will only compute the current collection, not its neighbors
        } else if (op instanceof MultipleParallelDo) {
            MultipleParallelDo mPDo = (MultipleParallelDo) op;
            parent = execute((LazyCollection) mPDo.getOrigin());
            // get the function that corresponds to this collection
            DoFn function = (DoFn) mPDo.getDests().get(output);
            for (Object obj : parent) {
                function.process(obj, emitter);
            }
        // GroupByKey
        } else if (op instanceof GroupByKey) {
            GroupByKey gBK = (GroupByKey) op;
            parent = execute(gBK.getOrigin());
            Map<Object, List> groupMap = Maps.newHashMap();
            // Perform in-memory group by operation
            for (Object obj : parent) {
                Pair p = (Pair) obj;
                List list = groupMap.get(p.getKey());
                if (list == null) {
                    list = new ArrayList();
                }
                list.add(p.getValue());
                groupMap.put(p.getKey(), list);
            }
            for (Map.Entry<Object, List> entry : groupMap.entrySet()) {
                result.add((T) new Pair(entry.getKey(), entry.getValue()));
            }
        }
        return result;
    }
}
Also used : ParallelDo(com.tdunning.plume.local.lazy.op.ParallelDo) MultipleParallelDo(com.tdunning.plume.local.lazy.op.MultipleParallelDo) MultipleParallelDo(com.tdunning.plume.local.lazy.op.MultipleParallelDo) GroupByKey(com.tdunning.plume.local.lazy.op.GroupByKey) Flatten(com.tdunning.plume.local.lazy.op.Flatten) ArrayList(java.util.ArrayList) DeferredOp(com.tdunning.plume.local.lazy.op.DeferredOp) DoFn(com.tdunning.plume.DoFn) EmitFn(com.tdunning.plume.EmitFn) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Pair(com.tdunning.plume.Pair)

Example 3 with Pair

use of com.tdunning.plume.Pair in project Plume by tdunning.

the class MSCRMapper method map.

@SuppressWarnings("unchecked")
protected void map(WritableComparable key, WritableComparable value, final Mapper<WritableComparable, WritableComparable, PlumeObject, PlumeObject>.Context<WritableComparable, WritableComparable, PlumeObject, PlumeObject> context) throws IOException, InterruptedException {
    LazyCollection<?> l = null;
    FileSplit fS = FileInputSplitWrapper.getFileInputSplit(context);
    // Get LazyCollection for this input (according to FileSplit)
    for (PCollection<?> input : mscr.getInputs()) {
        LazyCollection<?> thisL = (LazyCollection<?>) input;
        if (thisL.getFile() == null) {
            // Convention for intermediate results
            thisL.setFile(tmpFolder + "/" + thisL.getPlumeId());
        }
        if (fS.getPath().toString().startsWith(thisL.getFile()) || fS.getPath().toString().startsWith("file:" + thisL.getFile())) {
            l = thisL;
            break;
        }
    }
    if (l == null) {
        throw new RuntimeException("Unable to match input split with any MSCR input");
    }
    // If this collection is a table -> process Pair, otherwise process value
    PCollectionType type = l.getType();
    Object toProcess = value;
    if (type instanceof PTableType) {
        toProcess = Pair.create(key, value);
    }
    for (DeferredOp op : l.getDownOps()) {
        if (op instanceof MultipleParallelDo) {
            MultipleParallelDo mPDo = ((MultipleParallelDo) op);
            for (Object entry : mPDo.getDests().entrySet()) {
                Map.Entry<PCollection, DoFn> en = (Map.Entry<PCollection, DoFn>) entry;
                LazyCollection<?> lCol = (LazyCollection<?>) en.getKey();
                DeferredOp childOp = null;
                if (lCol.getDownOps() != null && lCol.getDownOps().size() > 0) {
                    childOp = lCol.getDownOps().get(0);
                }
                final Integer channel;
                if (childOp != null && childOp instanceof Flatten) {
                    channel = mscr.getNumberedChannels().get(((Flatten) childOp).getDest());
                } else if (childOp != null && childOp instanceof GroupByKey) {
                    channel = mscr.getNumberedChannels().get(((GroupByKey) childOp).getOrigin());
                } else {
                    // bypass channel?
                    channel = mscr.getNumberedChannels().get(en.getKey());
                }
                if (channel == null) {
                    // This is not for this MSCR - just skip it
                    return;
                }
                // Call parallelDo function
                en.getValue().process(toProcess, new EmitFn() {

                    @Override
                    public void emit(Object v) {
                        try {
                            if (v instanceof Pair) {
                                Pair p = (Pair) v;
                                context.write(new PlumeObject((WritableComparable) p.getKey(), channel), new PlumeObject((WritableComparable) p.getValue(), channel));
                            } else {
                                context.write(new PlumeObject((WritableComparable) v, channel), new PlumeObject((WritableComparable) v, channel));
                            }
                        } catch (Exception e) {
                            // TODO How to report this
                            e.printStackTrace();
                        }
                    }
                });
            }
        } else {
            if (op instanceof Flatten) {
                l = (LazyCollection) ((Flatten) op).getDest();
            }
            int channel = mscr.getNumberedChannels().get(l);
            if (toProcess instanceof Pair) {
                context.write(new PlumeObject(key, channel), new PlumeObject(value, channel));
            } else {
                context.write(new PlumeObject(value, channel), new PlumeObject(value, channel));
            }
        }
    }
}
Also used : MultipleParallelDo(com.tdunning.plume.local.lazy.op.MultipleParallelDo) GroupByKey(com.tdunning.plume.local.lazy.op.GroupByKey) PlumeObject(com.tdunning.plume.local.lazy.MapRedExecutor.PlumeObject) PTableType(com.tdunning.plume.types.PTableType) Flatten(com.tdunning.plume.local.lazy.op.Flatten) PCollectionType(com.tdunning.plume.types.PCollectionType) FileSplit(org.apache.hadoop.mapreduce.lib.input.FileSplit) DeferredOp(com.tdunning.plume.local.lazy.op.DeferredOp) IOException(java.io.IOException) PCollection(com.tdunning.plume.PCollection) DoFn(com.tdunning.plume.DoFn) EmitFn(com.tdunning.plume.EmitFn) PlumeObject(com.tdunning.plume.local.lazy.MapRedExecutor.PlumeObject) Map(java.util.Map) Pair(com.tdunning.plume.Pair)

Example 4 with Pair

use of com.tdunning.plume.Pair in project Plume by tdunning.

the class MSCRReducer method reduce.

@SuppressWarnings("unchecked")
protected void reduce(final PlumeObject arg0, java.lang.Iterable<PlumeObject> values, Reducer<PlumeObject, PlumeObject, NullWritable, NullWritable>.Context<PlumeObject, PlumeObject, NullWritable, NullWritable> arg2) throws IOException, InterruptedException {
    PCollection col = mscr.getChannelByNumber().get(arg0.sourceId);
    OutputChannel oC = mscr.getOutputChannels().get(col);
    if (oC.reducer != null) {
        // apply reducer
        ParallelDo pDo = oC.reducer;
        // TODO how to check / report this
        DoFn reducer = pDo.getFunction();
        List<WritableComparable> vals = Lists.newArrayList();
        for (PlumeObject val : values) {
            vals.add(val.obj);
        }
        reducer.process(Pair.create(arg0.obj, vals), new EmitFn() {

            @Override
            public void emit(Object v) {
                try {
                    if (v instanceof Pair) {
                        Pair p = (Pair) v;
                        mos.write(arg0.sourceId + "", p.getKey(), p.getValue());
                    } else {
                        mos.write(arg0.sourceId + "", NullWritable.get(), (WritableComparable) v);
                    }
                } catch (Exception e) {
                    // TODO How to report this
                    e.printStackTrace();
                }
            }
        });
    } else {
        // direct writing - write all key, value pairs
        for (PlumeObject val : values) {
            if (oC.output instanceof PTable) {
                mos.write(arg0.sourceId + "", arg0.obj, val.obj);
            } else {
                mos.write(arg0.sourceId + "", NullWritable.get(), val.obj);
            }
        }
    }
}
Also used : ParallelDo(com.tdunning.plume.local.lazy.op.ParallelDo) PlumeObject(com.tdunning.plume.local.lazy.MapRedExecutor.PlumeObject) IOException(java.io.IOException) PTable(com.tdunning.plume.PTable) PCollection(com.tdunning.plume.PCollection) DoFn(com.tdunning.plume.DoFn) EmitFn(com.tdunning.plume.EmitFn) WritableComparable(org.apache.hadoop.io.WritableComparable) OutputChannel(com.tdunning.plume.local.lazy.MSCR.OutputChannel) PlumeObject(com.tdunning.plume.local.lazy.MapRedExecutor.PlumeObject) Pair(com.tdunning.plume.Pair)

Example 5 with Pair

use of com.tdunning.plume.Pair in project Plume by tdunning.

the class MapReduceTest method mapReduceAndCombine.

@Test
public void mapReduceAndCombine() {
    List<Pair<Integer, String>> words = Lists.newArrayList();
    Multiset<String> ref = HashMultiset.create();
    int k = 0;
    Random gen = new Random();
    for (String letter : "abcdefghij".split("")) {
        // add 2^k of this letter
        for (int i = 0; i < (1 << k); i++) {
            words.add(Pair.create(gen.nextInt(), letter));
            ref.add(letter);
        }
        k++;
    }
    Reducer<String, Integer, Integer> r = new Reducer<String, Integer, Integer>() {

        @Override
        public void reduce(String key, Iterable<Integer> values, Collector<String, Integer> out) {
            int sum = 0;
            for (Integer value : values) {
                sum += value;
            }
            out.collect(key, sum);
        }
    };
    MapReduce<Integer, String, String, Integer, Integer> mr = new MapReduceBuilder<Integer, String, String, Integer, Integer>().map(new Mapper<Integer, String, String, Integer>() {

        @Override
        public void map(Integer key, String value, Collector<String, Integer> out) {
            out.collect(value, 1);
        }
    }).reduce(r).combine(r).build();
    Iterable<Pair<String, Integer>> out = mr.run(words);
    for (Pair<String, Integer> pair : out) {
        assertEquals(ref.count(pair.getKey()), pair.getValue().intValue());
    }
}
Also used : Random(java.util.Random) Pair(com.tdunning.plume.Pair) Test(org.junit.Test)

Aggregations

Pair (com.tdunning.plume.Pair)7 Test (org.junit.Test)4 DoFn (com.tdunning.plume.DoFn)3 EmitFn (com.tdunning.plume.EmitFn)3 Random (java.util.Random)3 PCollection (com.tdunning.plume.PCollection)2 PlumeObject (com.tdunning.plume.local.lazy.MapRedExecutor.PlumeObject)2 DeferredOp (com.tdunning.plume.local.lazy.op.DeferredOp)2 Flatten (com.tdunning.plume.local.lazy.op.Flatten)2 GroupByKey (com.tdunning.plume.local.lazy.op.GroupByKey)2 MultipleParallelDo (com.tdunning.plume.local.lazy.op.MultipleParallelDo)2 ParallelDo (com.tdunning.plume.local.lazy.op.ParallelDo)2 IOException (java.io.IOException)2 Map (java.util.Map)2 Function (com.google.common.base.Function)1 PTable (com.tdunning.plume.PTable)1 Plume (com.tdunning.plume.Plume)1 LocalPlume (com.tdunning.plume.local.eager.LocalPlume)1 OutputChannel (com.tdunning.plume.local.lazy.MSCR.OutputChannel)1 PCollectionType (com.tdunning.plume.types.PCollectionType)1