Search in sources :

Example 1 with FeatureVectorBuffer

use of edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer in project cogcomp-nlp by CogComp.

the class LabeledDepFeatureGenerator method getLabeledEdgeFeatures.

public FeatureVectorBuffer getLabeledEdgeFeatures(int head, int dep, DepInst sent, String deprel) {
    FeatureVectorBuffer feat = featureVectorBufferFromFeature(getLabeledEdgeFeatureSet(head, dep, sent, deprel));
    feat.shift((int) Math.pow(2, 0));
    return feat;
}
Also used : FeatureVectorBuffer(edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer)

Example 2 with FeatureVectorBuffer

use of edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer in project cogcomp-nlp by CogComp.

the class LabeledChuLiuEdmondsDecoder method predictLabel.

private String predictLabel(int head, int node, DepInst ins, WeightVector weight) {
    if (head == -1)
        throw new IllegalArgumentException("Invalid arc, head must be positive!");
    String rel = null;
    float max = Float.NEGATIVE_INFINITY;
    String keyPOS = ins.strPos[head] + " " + ins.strPos[node];
    Set<String> candidates = new HashSet<>();
    if (deprelDict.get(keyPOS) != null)
        candidates.addAll(deprelDict.get(keyPOS));
    if (candidates.size() == 1)
        return candidates.iterator().next();
    else if (candidates.isEmpty()) {
        if (keyPOS.contains("."))
            return "P";
        candidates.addAll(ALL_RELS);
    }
    for (String candidate : candidates) {
        FeatureVectorBuffer edgefv = depfeat.getLabeledEdgeFeatures(head, node, ins, candidate);
        float decision = weight.dotProduct(edgefv.toFeatureVector(false));
        if (decision > max) {
            rel = candidate;
            max = decision;
        }
    }
    return rel;
}
Also used : FeatureVectorBuffer(edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer)

Example 3 with FeatureVectorBuffer

use of edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer in project cogcomp-nlp by CogComp.

the class CommaSequenceFeatureGenerator method getFeatureVector.

/**
     * This function returns a feature vector \Phi(x,y) based on an instance-structure pair.
     * 
     * @return Feature Vector \Phi(x,y), where x is the input instance and y is the output structure
     */
@Override
public IFeatureVector getFeatureVector(IInstance x, IStructure y) {
    // lexicon should have been completely built while reading the problem instances itself
    assert !lexicon.isAllowNewFeatures();
    CommaSequence commaSequence = (CommaSequence) x;
    CommaLabelSequence commaLabelSequence = (CommaLabelSequence) y;
    FeatureVectorBuffer fv = new FeatureVectorBuffer();
    int len = commaSequence.sortedCommas.size();
    /*
         * for(Comma comma : commaSequence.sortedCommas){ FeatureVector lbjFeatureVector =
         * lbjExtractor.classify(comma); for(int i=0; i<lbjFeatureVector.featuresSize(); i++){
         * String emittedFeatureString = lbjFeatureVector.getFeature(i).toString();
         * lexicon.addFeature(emittedFeatureString);
         * fv.addFeature(lexicon.getFeatureId(emittedFeatureString), 1); } }
         * 
         * String startLabel = commaLabelSequence.commaLabels.get(0);
         * lexicon.addFeature(startLabel); fv.addFeature(lexicon.getFeatureId(startLabel), 1);
         * 
         * for(int i=1; i<commaLabelSequence.commaLabels.size(); i++){ String previousLabel =
         * commaLabelSequence.commaLabels.get(i-1); String currentLabel =
         * commaLabelSequence.commaLabels.get(i); String transitionFeatureString = previousLabel +
         * "---" + currentLabel; lexicon.addFeature(transitionFeatureString);
         * fv.addFeature(lexicon.getFeatureId(transitionFeatureString), 1); }
         */
    int[] tags = commaLabelSequence.labelIds;
    IFeatureVector[] baseFeatures = commaSequence.baseFeatures;
    int numOfEmissionFeatures = lexicon.getNumOfFeature();
    int numOfLabels = lexicon.getNumOfLabels();
    // add emission features.....
    for (int i = 0; i < len; i++) {
        fv.addFeature(baseFeatures[i], numOfEmissionFeatures * tags[i]);
    }
    // add prior feature
    int emissionOffset = numOfEmissionFeatures * numOfLabels;
    fv.addFeature(emissionOffset + tags[0], 1.0f);
    // add transition features
    int priorEmissionOffset = emissionOffset + numOfLabels;
    // calculate transition features
    for (int i = 1; i < len; i++) {
        fv.addFeature(priorEmissionOffset + (// TODO can't allow label-id of 0
        tags[i - 1] * // product will be 0
        numOfLabels + tags[i]), 1.0f);
    }
    return fv.toFeatureVector();
}
Also used : FeatureVectorBuffer(edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer) IFeatureVector(edu.illinois.cs.cogcomp.sl.util.IFeatureVector)

Example 4 with FeatureVectorBuffer

use of edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer in project cogcomp-nlp by CogComp.

the class LabeledChuLiuEdmondsDecoder method getLossAugmentedBestStructure.

@Override
public IStructure getLossAugmentedBestStructure(WeightVector weight, IInstance ins, IStructure goldStructure) throws Exception {
    DepInst sent = (DepInst) ins;
    DepStruct gold = goldStructure != null ? (DepStruct) goldStructure : null;
    // edgeScore[i][j] score of edge from head i to modifier j
    // i (head) varies from 0..n, while j (token idx) varies over 1..n
    double[][] edgeScore = new double[sent.size() + 1][sent.size() + 1];
    String[][] edgeLabel = new String[sent.size() + 1][sent.size() + 1];
    initEdge(edgeScore, edgeLabel);
    for (int head = 0; head <= sent.size(); head++) {
        for (int j = 1; j <= sent.size(); j++) {
            if (head == j) {
                edgeScore[head][j] = Double.NEGATIVE_INFINITY;
                continue;
            }
            String deprel = predictLabel(head, j, sent, weight);
            edgeLabel[head][j] = deprel;
            FeatureVectorBuffer edgefv = depfeat.getCombineEdgeFeatures(head, j, sent, deprel);
            // edge from head i to modifier j
            edgeScore[head][j] = weight.dotProduct(edgefv.toFeatureVector(false));
            if (gold != null) {
                if (// incur loss
                gold.heads[j] != head || !deprel.equals(gold.deprels[j]))
                    edgeScore[head][j] += 1.0f;
            }
        }
    }
    return LabeledChuLiuEdmonds(edgeScore, edgeLabel);
}
Also used : FeatureVectorBuffer(edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer)

Example 5 with FeatureVectorBuffer

use of edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer in project cogcomp-nlp by CogComp.

the class LabeledDepFeatureGenerator method featureVectorBufferFromFeature.

private FeatureVectorBuffer featureVectorBufferFromFeature(Set<Feature> features) {
    Map<String, Float> featureMap = new HashMap<>();
    for (Feature f : features) {
        if (lm.containFeature(f.getName()))
            featureMap.put(f.getName(), f.getValue());
    }
    SparseFeatureVector sfv = (SparseFeatureVector) lm.convertToFeatureVector(featureMap);
    return new FeatureVectorBuffer(sfv);
}
Also used : SparseFeatureVector(edu.illinois.cs.cogcomp.sl.util.SparseFeatureVector) HashMap(java.util.HashMap) FeatureVectorBuffer(edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer) Feature(edu.illinois.cs.cogcomp.edison.features.Feature) DiscreteFeature(edu.illinois.cs.cogcomp.edison.features.DiscreteFeature)

Aggregations

FeatureVectorBuffer (edu.illinois.cs.cogcomp.sl.util.FeatureVectorBuffer)6 DiscreteFeature (edu.illinois.cs.cogcomp.edison.features.DiscreteFeature)1 Feature (edu.illinois.cs.cogcomp.edison.features.Feature)1 IFeatureVector (edu.illinois.cs.cogcomp.sl.util.IFeatureVector)1 SparseFeatureVector (edu.illinois.cs.cogcomp.sl.util.SparseFeatureVector)1 HashMap (java.util.HashMap)1