Search in sources :

Example 1 with Pair

use of org.apache.commons.lang3.tuple.Pair in project hive by apache.

the class MapJoinOperator method completeInitializationOp.

@SuppressWarnings("unchecked")
@Override
protected void completeInitializationOp(Object[] os) throws HiveException {
    if (os.length != 0) {
        Pair<MapJoinTableContainer[], MapJoinTableContainerSerDe[]> pair = (Pair<MapJoinTableContainer[], MapJoinTableContainerSerDe[]>) os[0];
        boolean spilled = false;
        for (MapJoinTableContainer container : pair.getLeft()) {
            if (container != null) {
                spilled = spilled || container.hasSpill();
            }
        }
        if (spilled) {
            // we can't use the cached table because it has spilled.
            loadHashTable(getExecContext(), MapredContext.get());
        } else {
            if (LOG.isDebugEnabled()) {
                String s = "Using tables from cache: [";
                for (MapJoinTableContainer c : pair.getLeft()) {
                    s += ((c == null) ? "null" : c.getClass().getSimpleName()) + ", ";
                }
                LOG.debug(s + "]");
            }
            // let's use the table from the cache.
            mapJoinTables = pair.getLeft();
            mapJoinTableSerdes = pair.getRight();
        }
        hashTblInitedOnce = true;
    }
    if (this.getExecContext() != null) {
        // reset exec context so that initialization of the map operator happens
        // properly
        this.getExecContext().setLastInputPath(null);
        this.getExecContext().setCurrentInputPath(null);
    }
}
Also used : MapJoinTableContainerSerDe(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe) MapJoinTableContainer(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer) Pair(org.apache.commons.lang3.tuple.Pair) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) ObjectPair(org.apache.hadoop.hive.common.ObjectPair)

Example 2 with Pair

use of org.apache.commons.lang3.tuple.Pair in project hive by apache.

the class Copy method runFromLocal.

/**
   * Run COPY FROM LOCAL statement
   */
public Integer runFromLocal(HplsqlParser.Copy_from_local_stmtContext ctx) {
    trace(ctx, "COPY FROM LOCAL");
    initFileOptions(ctx.copy_file_option());
    HashMap<String, Pair<String, Long>> srcFiles = new HashMap<String, Pair<String, Long>>();
    String src = evalPop(ctx.copy_source(0)).toString();
    String dest = evalPop(ctx.copy_target()).toString();
    int srcItems = ctx.copy_source().size();
    for (int i = 0; i < srcItems; i++) {
        createLocalFileList(srcFiles, evalPop(ctx.copy_source(i)).toString(), null);
    }
    if (info) {
        info(ctx, "Files to copy: " + srcFiles.size() + " (" + Utils.formatSizeInBytes(srcSizeInBytes) + ")");
    }
    if (srcFiles.size() == 0) {
        exec.setHostCode(2);
        return 2;
    }
    timer.start();
    File file = new File();
    FileSystem fs = null;
    int succeed = 0;
    int failed = 0;
    long copiedSize = 0;
    try {
        fs = file.createFs();
        boolean multi = false;
        if (srcFiles.size() > 1) {
            multi = true;
        }
        for (Map.Entry<String, Pair<String, Long>> i : srcFiles.entrySet()) {
            try {
                Path s = new Path(i.getKey());
                Path d = null;
                if (multi) {
                    String relativePath = i.getValue().getLeft();
                    if (relativePath == null) {
                        d = new Path(dest, s.getName());
                    } else {
                        d = new Path(dest, relativePath + Path.SEPARATOR + s.getName());
                    }
                } else {
                    // Path to file is specified (can be relative), so treat target as a file name (hadoop fs -put behavior)
                    if (srcItems == 1 && i.getKey().endsWith(src)) {
                        d = new Path(dest);
                    } else // Source directory is specified, so treat the target as a directory 
                    {
                        d = new Path(dest + Path.SEPARATOR + s.getName());
                    }
                }
                fs.copyFromLocalFile(delete, overwrite, s, d);
                succeed++;
                long size = i.getValue().getRight();
                copiedSize += size;
                if (info) {
                    info(ctx, "Copied: " + file.resolvePath(d) + " (" + Utils.formatSizeInBytes(size) + ")");
                }
            } catch (IOException e) {
                failed++;
                if (!ignore) {
                    throw e;
                }
            }
        }
    } catch (IOException e) {
        exec.signal(e);
        exec.setHostCode(1);
        return 1;
    } finally {
        long elapsed = timer.stop();
        if (info) {
            info(ctx, "COPY completed: " + succeed + " succeed, " + failed + " failed, " + timer.format() + ", " + Utils.formatSizeInBytes(copiedSize) + ", " + Utils.formatBytesPerSec(copiedSize, elapsed));
        }
        if (failed == 0) {
            exec.setHostCode(0);
        } else {
            exec.setHostCode(1);
        }
        file.close();
    }
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) IOException(java.io.IOException) FileSystem(org.apache.hadoop.fs.FileSystem) HashMap(java.util.HashMap) Map(java.util.Map) Pair(org.apache.commons.lang3.tuple.Pair)

Example 3 with Pair

use of org.apache.commons.lang3.tuple.Pair in project deeplearning4j by deeplearning4j.

the class FirstIterationFunctionAdapter method call.

@Override
public Iterable<Map.Entry<VocabWord, INDArray>> call(Iterator<Tuple2<List<VocabWord>, Long>> pairIter) {
    while (pairIter.hasNext()) {
        List<Pair<List<VocabWord>, Long>> batch = new ArrayList<>();
        while (pairIter.hasNext() && batch.size() < batchSize) {
            Tuple2<List<VocabWord>, Long> pair = pairIter.next();
            List<VocabWord> vocabWordsList = pair._1();
            Long sentenceCumSumCount = pair._2();
            batch.add(Pair.of(vocabWordsList, sentenceCumSumCount));
        }
        for (int i = 0; i < iterations; i++) {
            //System.out.println("Training sentence: " + vocabWordsList);
            for (Pair<List<VocabWord>, Long> pair : batch) {
                List<VocabWord> vocabWordsList = pair.getKey();
                Long sentenceCumSumCount = pair.getValue();
                double currentSentenceAlpha = Math.max(minAlpha, alpha - (alpha - minAlpha) * (sentenceCumSumCount / (double) totalWordCount));
                trainSentence(vocabWordsList, currentSentenceAlpha);
            }
        }
    }
    return indexSyn0VecMap.entrySet();
}
Also used : AtomicLong(java.util.concurrent.atomic.AtomicLong) VocabWord(org.deeplearning4j.models.word2vec.VocabWord) Pair(org.apache.commons.lang3.tuple.Pair)

Example 4 with Pair

use of org.apache.commons.lang3.tuple.Pair in project deeplearning4j by deeplearning4j.

the class SecondIterationFunctionAdapter method call.

@Override
public Iterable<Entry<VocabWord, INDArray>> call(Iterator<Tuple2<List<VocabWord>, Long>> pairIter) {
    this.vocabHolder = VocabHolder.getInstance();
    this.vocabHolder.setSeed(seed, vectorLength);
    if (negative > 0) {
        negativeHolder = NegativeHolder.getInstance();
        negativeHolder.initHolder(vocab, expTable, this.vectorLength);
    }
    while (pairIter.hasNext()) {
        List<Pair<List<VocabWord>, Long>> batch = new ArrayList<>();
        while (pairIter.hasNext() && batch.size() < batchSize) {
            Tuple2<List<VocabWord>, Long> pair = pairIter.next();
            List<VocabWord> vocabWordsList = pair._1();
            Long sentenceCumSumCount = pair._2();
            batch.add(Pair.of(vocabWordsList, sentenceCumSumCount));
        }
        for (int i = 0; i < iterations; i++) {
            //System.out.println("Training sentence: " + vocabWordsList);
            for (Pair<List<VocabWord>, Long> pair : batch) {
                List<VocabWord> vocabWordsList = pair.getKey();
                Long sentenceCumSumCount = pair.getValue();
                double currentSentenceAlpha = Math.max(minAlpha, alpha - (alpha - minAlpha) * (sentenceCumSumCount / (double) totalWordCount));
                trainSentence(vocabWordsList, currentSentenceAlpha);
            }
        }
    }
    return vocabHolder.getSplit(vocab);
}
Also used : ArrayList(java.util.ArrayList) AtomicLong(java.util.concurrent.atomic.AtomicLong) VocabWord(org.deeplearning4j.models.word2vec.VocabWord) ArrayList(java.util.ArrayList) List(java.util.List) Pair(org.apache.commons.lang3.tuple.Pair)

Example 5 with Pair

use of org.apache.commons.lang3.tuple.Pair in project PneumaticCraft by MineMaarten.

the class PneumaticRecipeRegistry method getSingleStack.

public static ItemStack getSingleStack(Object o) {
    if (o instanceof ItemStack) {
        return (ItemStack) o;
    } else {
        Pair<String, Integer> pair = (Pair) o;
        ItemStack s = OreDictionaryHelper.getOreDictEntries(pair.getKey()).get(0);
        s = s.copy();
        s.stackSize = pair.getValue();
        return s;
    }
}
Also used : ItemStack(net.minecraft.item.ItemStack) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) Pair(org.apache.commons.lang3.tuple.Pair)

Aggregations

Pair (org.apache.commons.lang3.tuple.Pair)111 ArrayList (java.util.ArrayList)98 Mutable (org.apache.commons.lang3.mutable.Mutable)97 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)87 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)86 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)75 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)73 ImmutablePair (org.apache.commons.lang3.tuple.ImmutablePair)63 Pair (org.apache.hyracks.algebricks.common.utils.Pair)62 MutableObject (org.apache.commons.lang3.mutable.MutableObject)42 List (java.util.List)35 HashMap (java.util.HashMap)34 AssignOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator)32 ScalarFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)30 Collectors (java.util.stream.Collectors)29 ILogicalPlan (org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan)29 AbstractFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression)29 GbyVariableExpressionPair (org.apache.asterix.lang.common.expression.GbyVariableExpressionPair)27 HashSet (java.util.HashSet)25 File (java.io.File)24