Search in sources :

Example 6 with RuntimeInterruptedException

use of edu.stanford.nlp.util.RuntimeInterruptedException in project CoreNLP by stanfordnlp.

the class InterruptibleMulticoreWrapper method joinWithTimeout.

/**
 * Shuts down the thread pool, returns when finished.
 * <p>
 * If {@code timeout} is positive, then {@code join} waits at
 * most {@code timeout} milliseconds for threads to finish.  If
 * any fail to finish in that time, the threadpool is shutdownNow.
 * After that, {@code join} continues to wait for the
 * interrupted threads to finish, so if job do not obey
 * interruptions, they can continue indefinitely regardless of the
 * timeout.
 *
 * @return a list of jobs which had never been started if
 * {@code timeout} was reached, or an empty list if that did not
 * happen.
 */
public List<I> joinWithTimeout() {
    if (timeout <= 0) {
        join();
        return new ArrayList<>();
    }
    // Make blocking calls to the last processes that are running
    if (!threadPool.isShutdown()) {
        try {
            List<I> leftover = null;
            int i;
            for (i = nThreads; i > 0; --i) {
                if (idleProcessors.poll(timeout, TimeUnit.MILLISECONDS) == null) {
                    leftover = shutdownNow();
                    break;
                }
            }
            // so join() can guarantee the threads are finished
            if (i > 0) {
                for (; i > leftover.size(); --i) {
                    idleProcessors.take();
                }
                return leftover;
            } else {
                threadPool.shutdown();
                // Sanity check. The threadpool should be done after iterating over
                // the processors.
                threadPool.awaitTermination(10, TimeUnit.SECONDS);
            }
        } catch (InterruptedException e) {
            throw new RuntimeInterruptedException(e);
        }
    }
    return new ArrayList<>();
}
Also used : RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException) ArrayList(java.util.ArrayList) RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException)

Example 7 with RuntimeInterruptedException

use of edu.stanford.nlp.util.RuntimeInterruptedException in project CoreNLP by stanfordnlp.

the class LexicalizedParserQuery method parseInternal.

/**
 * Parse a sentence represented as a List of tokens.
 * The text must already have been tokenized and
 * normalized into tokens that are appropriate to the treebank
 * which was used to train the parser.  The tokens can be of
 * multiple types, and the list items need not be homogeneous as to type
 * (in particular, only some words might be given tags):
 * <ul>
 * <li>If a token implements HasWord, then the word to be parsed is
 * given by its word() value.</li>
 * <li>If a token implements HasTag and the tag() value is not
 * null or the empty String, then the parser is strongly advised to assign
 * a part of speech tag that <i>begins</i> with this String.</li>
 * </ul>
 *
 * @param sentence The sentence to parse
 * @return true Iff the sentence was accepted by the grammar
 * @throws UnsupportedOperationException If the Sentence is too long or
 *                                       of zero length or the parse
 *                                       otherwise fails for resource reasons
 */
private boolean parseInternal(List<? extends HasWord> sentence) {
    parseSucceeded = false;
    parseNoMemory = false;
    parseUnparsable = false;
    parseSkipped = false;
    parseFallback = false;
    whatFailed = null;
    addedPunct = false;
    originalSentence = sentence;
    int length = sentence.size();
    if (length == 0) {
        parseSkipped = true;
        throw new UnsupportedOperationException("Can't parse a zero-length sentence!");
    }
    List<HasWord> sentenceB;
    if (op.wordFunction != null) {
        sentenceB = Generics.newArrayList();
        for (HasWord word : originalSentence) {
            if (word instanceof Label) {
                Label label = (Label) word;
                Label newLabel = label.labelFactory().newLabel(label);
                if (newLabel instanceof HasWord) {
                    sentenceB.add((HasWord) newLabel);
                } else {
                    throw new AssertionError("This should have been a HasWord");
                }
            } else if (word instanceof HasTag) {
                TaggedWord tw = new TaggedWord(word.word(), ((HasTag) word).tag());
                sentenceB.add(tw);
            } else {
                sentenceB.add(new Word(word.word()));
            }
        }
        for (HasWord word : sentenceB) {
            word.setWord(op.wordFunction.apply(word.word()));
        }
    } else {
        sentenceB = new ArrayList<>(sentence);
    }
    if (op.testOptions.addMissingFinalPunctuation) {
        addedPunct = addSentenceFinalPunctIfNeeded(sentenceB, length);
    }
    if (length > op.testOptions.maxLength) {
        parseSkipped = true;
        throw new UnsupportedOperationException("Sentence too long: length " + length);
    }
    TreePrint treePrint = getTreePrint();
    PrintWriter pwOut = op.tlpParams.pw();
    // Insert the boundary symbol
    if (sentence.get(0) instanceof CoreLabel) {
        CoreLabel boundary = new CoreLabel();
        boundary.setWord(Lexicon.BOUNDARY);
        boundary.setValue(Lexicon.BOUNDARY);
        boundary.setTag(Lexicon.BOUNDARY_TAG);
        // 1-based indexing used in the parser
        boundary.setIndex(sentence.size() + 1);
        sentenceB.add(boundary);
    } else {
        sentenceB.add(new TaggedWord(Lexicon.BOUNDARY, Lexicon.BOUNDARY_TAG));
    }
    if (Thread.interrupted()) {
        throw new RuntimeInterruptedException();
    }
    if (op.doPCFG) {
        if (!pparser.parse(sentenceB)) {
            return parseSucceeded;
        }
        if (op.testOptions.verbose) {
            pwOut.println("PParser output");
            // getBestPCFGParse(false).pennPrint(pwOut); // with scores on nodes
            // without scores on nodes
            treePrint.printTree(getBestPCFGParse(false), pwOut);
        }
    }
    if (Thread.interrupted()) {
        throw new RuntimeInterruptedException();
    }
    if (op.doDep && !op.testOptions.useFastFactored) {
        if (!dparser.parse(sentenceB)) {
            return parseSucceeded;
        }
        // so don't calculate the best parse twice!
        if (op.testOptions.verbose) {
            pwOut.println("DParser output");
            treePrint.printTree(dparser.getBestParse(), pwOut);
        }
    }
    if (Thread.interrupted()) {
        throw new RuntimeInterruptedException();
    }
    if (op.doPCFG && op.doDep) {
        if (!bparser.parse(sentenceB)) {
            return parseSucceeded;
        } else {
            parseSucceeded = true;
        }
    }
    return true;
}
Also used : HasWord(edu.stanford.nlp.ling.HasWord) HasWord(edu.stanford.nlp.ling.HasWord) TaggedWord(edu.stanford.nlp.ling.TaggedWord) Word(edu.stanford.nlp.ling.Word) RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException) Label(edu.stanford.nlp.ling.Label) CoreLabel(edu.stanford.nlp.ling.CoreLabel) HasTag(edu.stanford.nlp.ling.HasTag) TreePrint(edu.stanford.nlp.trees.TreePrint) TreePrint(edu.stanford.nlp.trees.TreePrint) ParserConstraint(edu.stanford.nlp.parser.common.ParserConstraint) CoreLabel(edu.stanford.nlp.ling.CoreLabel) TaggedWord(edu.stanford.nlp.ling.TaggedWord) PrintWriter(java.io.PrintWriter)

Example 8 with RuntimeInterruptedException

use of edu.stanford.nlp.util.RuntimeInterruptedException in project CoreNLP by stanfordnlp.

the class LogConditionalObjectiveFunction method rvfcalculate.

/**
 * Calculate conditional likelihood for datasets with real-valued features.
 * Currently this can calculate CL only (no support for SCL).
 * TODO: sum-conditional obj. fun. with RVFs.
 */
protected void rvfcalculate(double[] x) {
    value = 0.0;
    // This is only calculated once per training run, not worth the effort to multi-thread properly
    if (derivativeNumerator == null) {
        derivativeNumerator = new double[x.length];
        for (int d = 0; d < data.length; d++) {
            final int[] features = data[d];
            final double[] vals = values[d];
            for (int f = 0; f < features.length; f++) {
                int i = indexOf(features[f], labels[d]);
                if (dataWeights == null) {
                    derivativeNumerator[i] -= vals[f];
                } else {
                    derivativeNumerator[i] -= dataWeights[d] * vals[f];
                }
            }
        }
    }
    copy(derivative, derivativeNumerator);
    if (parallelGradientCalculation && threads > 1) {
        // Launch several threads (reused out of our fixed pool) to handle the computation
        @SuppressWarnings("unchecked") RVFDerivativeCalculation[] runnables = (RVFDerivativeCalculation[]) Array.newInstance(RVFDerivativeCalculation.class, threads);
        CountDownLatch latch = new CountDownLatch(threads);
        for (int i = 0; i < threads; i++) {
            runnables[i] = new RVFDerivativeCalculation(threads, i, x, derivative.length, latch);
            new Thread(runnables[i]).start();
        }
        try {
            latch.await();
        } catch (InterruptedException e) {
            throw new RuntimeInterruptedException(e);
        }
        for (int i = 0; i < threads; i++) {
            value += runnables[i].localValue;
            for (int j = 0; j < derivative.length; j++) {
                derivative[j] += runnables[i].localDerivative[j];
            }
        }
    } else {
        // Do the calculation locally on this thread
        double[] sums = new double[numClasses];
        double[] probs = new double[numClasses];
        for (int d = 0; d < data.length; d++) {
            final int[] features = data[d];
            final double[] vals = values[d];
            // activation
            Arrays.fill(sums, 0.0);
            for (int f = 0; f < features.length; f++) {
                final int feature = features[f];
                final double val = vals[f];
                for (int c = 0; c < numClasses; c++) {
                    int i = indexOf(feature, c);
                    sums[c] += x[i] * val;
                }
            }
            // expectation (slower routine replaced by fast way)
            // double total = Double.NEGATIVE_INFINITY;
            // for (int c=0; c<numClasses; c++) {
            // total = SloppyMath.logAdd(total, sums[c]);
            // }
            // it is faster to split these two loops. More striding
            double total = ArrayMath.logSum(sums);
            for (int c = 0; c < numClasses; c++) {
                probs[c] = Math.exp(sums[c] - total);
                if (dataWeights != null) {
                    probs[c] *= dataWeights[d];
                }
            }
            for (int f = 0; f < features.length; f++) {
                final int feature = features[f];
                final double val = vals[f];
                for (int c = 0; c < numClasses; c++) {
                    int i = indexOf(feature, c);
                    derivative[i] += probs[c] * val;
                }
            }
            double dV = sums[labels[d]] - total;
            if (dataWeights != null) {
                dV *= dataWeights[d];
            }
            value -= dV;
        }
    }
    value += prior.compute(x, derivative);
}
Also used : RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException) CountDownLatch(java.util.concurrent.CountDownLatch) RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException)

Example 9 with RuntimeInterruptedException

use of edu.stanford.nlp.util.RuntimeInterruptedException in project CoreNLP by stanfordnlp.

the class LogConditionalObjectiveFunction method calculateStochasticUpdate.

@Override
public double calculateStochasticUpdate(double[] x, double xscale, int[] batch, double gain) {
    value = 0.0;
    if (parallelGradientCalculation && threads > 1) {
        int examplesPerProcessor = 50;
        if (batch.length <= Runtime.getRuntime().availableProcessors() * examplesPerProcessor) {
            log.info("\n\n***************");
            log.info("CONFIGURATION ERROR: YOUR BATCH SIZE DOESN'T MEET PARALLEL MINIMUM SIZE FOR PERFORMANCE");
            log.info("Batch size: " + batch.length);
            log.info("CPUS: " + Runtime.getRuntime().availableProcessors());
            log.info("Minimum batch size per CPU: " + examplesPerProcessor);
            log.info("MINIMIM BATCH SIZE ON THIS MACHINE: " + (Runtime.getRuntime().availableProcessors() * examplesPerProcessor));
            log.info("TURNING OFF PARALLEL GRADIENT COMPUTATION");
            log.info("***************\n");
            parallelGradientCalculation = false;
        }
    }
    if (parallelGradientCalculation && threads > 1) {
        // Launch several threads (reused out of our fixed pool) to handle the computation
        @SuppressWarnings("unchecked") CLBatchDerivativeCalculation[] runnables = (CLBatchDerivativeCalculation[]) Array.newInstance(CLBatchDerivativeCalculation.class, threads);
        CountDownLatch latch = new CountDownLatch(threads);
        for (int i = 0; i < threads; i++) {
            runnables[i] = new CLBatchDerivativeCalculation(threads, i, batch, x, x.length, latch);
            new Thread(runnables[i]).start();
        }
        try {
            latch.await();
        } catch (InterruptedException e) {
            throw new RuntimeInterruptedException(e);
        }
        for (int i = 0; i < threads; i++) {
            value += runnables[i].localValue;
            for (int j = 0; j < x.length; j++) {
                x[j] += runnables[i].localDerivative[j] * xscale * gain;
            }
        }
    } else {
        double[] sums = new double[numClasses];
        double[] probs = new double[numClasses];
        for (int m : batch) {
            // Sets the index based on the current batch
            int[] features = data[m];
            // activation
            Arrays.fill(sums, 0.0);
            for (int c = 0; c < numClasses; c++) {
                for (int f = 0; f < features.length; f++) {
                    int i = indexOf(features[f], c);
                    if (values != null) {
                        sums[c] += x[i] * xscale * values[m][f];
                    } else {
                        sums[c] += x[i] * xscale;
                    }
                }
            }
            for (int f = 0; f < features.length; f++) {
                int i = indexOf(features[f], labels[m]);
                double v = (values != null) ? values[m][f] : 1;
                double delta = (dataWeights != null) ? dataWeights[m] * v : v;
                x[i] += delta * gain;
            }
            double total = ArrayMath.logSum(sums);
            for (int c = 0; c < numClasses; c++) {
                probs[c] = Math.exp(sums[c] - total);
                if (dataWeights != null) {
                    probs[c] *= dataWeights[m];
                }
                for (int f = 0; f < features.length; f++) {
                    int i = indexOf(features[f], c);
                    double v = (values != null) ? values[m][f] : 1;
                    double delta = probs[c] * v;
                    x[i] -= delta * gain;
                }
            }
            double dV = sums[labels[m]] - total;
            if (dataWeights != null) {
                dV *= dataWeights[m];
            }
            value -= dV;
        }
    }
    return value;
}
Also used : RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException) CountDownLatch(java.util.concurrent.CountDownLatch) RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException)

Example 10 with RuntimeInterruptedException

use of edu.stanford.nlp.util.RuntimeInterruptedException in project CoreNLP by stanfordnlp.

the class LogConditionalObjectiveFunction method calculateCLbatch.

private void calculateCLbatch(double[] x) {
    // System.out.println("Checking at: "+x[0]+" "+x[1]+" "+x[2]);
    value = 0.0;
    if (derivativeNumerator == null) {
        derivativeNumerator = new double[x.length];
        for (int d = 0; d < data.length; d++) {
            int[] features = data[d];
            for (int feature : features) {
                int i = indexOf(feature, labels[d]);
                if (dataWeights == null) {
                    derivativeNumerator[i] -= 1;
                } else {
                    derivativeNumerator[i] -= dataWeights[d];
                }
            }
        }
    }
    copy(derivative, derivativeNumerator);
    if (parallelGradientCalculation && threads > 1) {
        // Launch several threads (reused out of our fixed pool) to handle the computation
        @SuppressWarnings("unchecked") CLBatchDerivativeCalculation[] runnables = (CLBatchDerivativeCalculation[]) Array.newInstance(CLBatchDerivativeCalculation.class, threads);
        CountDownLatch latch = new CountDownLatch(threads);
        for (int i = 0; i < threads; i++) {
            runnables[i] = new CLBatchDerivativeCalculation(threads, i, null, x, derivative.length, latch);
            new Thread(runnables[i]).start();
        }
        try {
            latch.await();
        } catch (InterruptedException e) {
            throw new RuntimeInterruptedException(e);
        }
        for (int i = 0; i < threads; i++) {
            value += runnables[i].localValue;
            for (int j = 0; j < derivative.length; j++) {
                derivative[j] += runnables[i].localDerivative[j];
            }
        }
    } else {
        double[] sums = new double[numClasses];
        double[] probs = new double[numClasses];
        for (int d = 0; d < data.length; d++) {
            // activation
            Arrays.fill(sums, 0.0);
            int[] featuresArr = data[d];
            for (int feature : featuresArr) {
                for (int c = 0; c < numClasses; c++) {
                    int i = indexOf(feature, c);
                    sums[c] += x[i];
                }
            }
            // expectation (slower routine replaced by fast way)
            // double total = Double.NEGATIVE_INFINITY;
            // for (int c=0; c<numClasses; c++) {
            // total = SloppyMath.logAdd(total, sums[c]);
            // }
            double total = ArrayMath.logSum(sums);
            for (int c = 0; c < numClasses; c++) {
                probs[c] = Math.exp(sums[c] - total);
                if (dataWeights != null) {
                    probs[c] *= dataWeights[d];
                }
            }
            for (int feature : featuresArr) {
                for (int c = 0; c < numClasses; c++) {
                    int i = indexOf(feature, c);
                    derivative[i] += probs[c];
                }
            }
            int labelindex = labels[d];
            double dV = sums[labelindex] - total;
            if (dataWeights != null) {
                dV *= dataWeights[d];
            }
            value -= dV;
        }
    }
    value += prior.compute(x, derivative);
}
Also used : RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException) CountDownLatch(java.util.concurrent.CountDownLatch) RuntimeInterruptedException(edu.stanford.nlp.util.RuntimeInterruptedException)

Aggregations

RuntimeInterruptedException (edu.stanford.nlp.util.RuntimeInterruptedException)14 List (java.util.List)3 CountDownLatch (java.util.concurrent.CountDownLatch)3 CoreAnnotations (edu.stanford.nlp.ling.CoreAnnotations)2 CoreLabel (edu.stanford.nlp.ling.CoreLabel)2 ParserConstraint (edu.stanford.nlp.parser.common.ParserConstraint)2 ClassicCounter (edu.stanford.nlp.stats.ClassicCounter)2 Pair (edu.stanford.nlp.util.Pair)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 MentionType (edu.stanford.nlp.coref.data.Dictionaries.MentionType)1 Compressor (edu.stanford.nlp.coref.statistical.Compressor)1 DocumentExamples (edu.stanford.nlp.coref.statistical.DocumentExamples)1 Example (edu.stanford.nlp.coref.statistical.Example)1 HasTag (edu.stanford.nlp.ling.HasTag)1 HasWord (edu.stanford.nlp.ling.HasWord)1 Label (edu.stanford.nlp.ling.Label)1 TaggedWord (edu.stanford.nlp.ling.TaggedWord)1 Word (edu.stanford.nlp.ling.Word)1