use of de.bwaldvogel.liblinear.FeatureNode in project tribuo by oracle.
the class LibLinearRegressionModel method predict.
@Override
public Prediction<Regressor> predict(Example<Regressor> example) {
FeatureNode[] features = LibLinearTrainer.exampleToNodes(example, featureIDMap, null);
// Bias feature is always set
if (features.length == 1) {
throw new IllegalArgumentException("No features found in Example " + example.toString());
}
double[] scores = new double[models.get(0).getNrClass()];
double[] regressedValues = new double[models.size()];
// constructed correctly.
for (int i = 0; i < regressedValues.length; i++) {
regressedValues[mapping[i]] = Linear.predictValues(models.get(i), features, scores);
}
Regressor regressor = new Regressor(dimensionNames, regressedValues);
return new Prediction<>(regressor, features.length - 1, example);
}
use of de.bwaldvogel.liblinear.FeatureNode in project tribuo by oracle.
the class LibLinearTrainer method exampleToNodes.
/**
* Converts a Tribuo {@link Example} into a liblinear {@code FeatureNode} array, including a bias feature.
* <p>
* If there is a collision between feature ids (i.e., if there is feature hashing or some other mechanism changing
* the feature ids) then the feature values are summed.
* @param example The input example.
* @param featureIDMap The feature id map which contains the example's indices.
* @param features A buffer. If null then an array list is created and used internally.
* @param <T> The output type.
* @return The features suitable for use in liblinear.
*/
public static <T extends Output<T>> FeatureNode[] exampleToNodes(Example<T> example, ImmutableFeatureMap featureIDMap, List<FeatureNode> features) {
int biasIndex = featureIDMap.size() + 1;
if (features == null) {
features = new ArrayList<>();
}
features.clear();
int prevIdx = -1;
for (Feature f : example) {
int id = featureIDMap.getID(f.getName());
if (id > prevIdx) {
prevIdx = id;
features.add(new FeatureNode(id + 1, f.getValue()));
} else if (id > -1) {
//
// Collision, deal with it.
int collisionIdx = Util.binarySearch(features, id + 1, FeatureNode::getIndex);
if (collisionIdx < 0) {
//
// Collision but not present in features
// move data and bump i
collisionIdx = -(collisionIdx + 1);
features.add(collisionIdx, new FeatureNode(id + 1, f.getValue()));
} else {
//
// Collision present in features
// add the values.
FeatureNode n = features.get(collisionIdx);
n.setValue(n.getValue() + f.getValue());
}
}
}
features.add(new FeatureNode(biasIndex, 1.0));
return features.toArray(new FeatureNode[0]);
}
use of de.bwaldvogel.liblinear.FeatureNode in project talismane by joliciel-informatique.
the class LinearSVMModelTrainer method addFeatureResult.
void addFeatureResult(String featureName, double value, Map<Integer, Feature> featureList, TObjectIntMap<String> featureIndexMap, TIntIntMap featureCountMap, CountingInfo countingInfo) {
int featureIndex = featureIndexMap.get(featureName);
if (featureIndex < 0) {
featureIndex = countingInfo.currentFeatureIndex++;
featureIndexMap.put(featureName, featureIndex);
}
if (cutoff > 1) {
int featureCount = featureCountMap.get(featureIndex) + 1;
if (featureCount == cutoff)
countingInfo.featureCountOverCutoff++;
featureCountMap.put(featureIndex, featureCount);
}
// if the same feature is added multiple times, we sum the values
Feature feature = featureList.get(featureIndex);
if (feature == null) {
FeatureNode featureNode = new FeatureNode(featureIndex, value);
featureList.put(featureIndex, featureNode);
} else {
FeatureNode featureNode = (FeatureNode) feature;
featureNode.setValue(featureNode.getValue() + value);
}
}
use of de.bwaldvogel.liblinear.FeatureNode in project talismane by joliciel-informatique.
the class LinearSVMUtils method prepareData.
public static List<Feature> prepareData(List<FeatureResult<?>> featureResults, TObjectIntMap<String> featureIndexMap) {
List<Feature> featureList = new ArrayList<Feature>(featureResults.size());
for (FeatureResult<?> featureResult : featureResults) {
if (featureResult.getOutcome() instanceof List) {
@SuppressWarnings("unchecked") FeatureResult<List<WeightedOutcome<String>>> stringCollectionResult = (FeatureResult<List<WeightedOutcome<String>>>) featureResult;
for (WeightedOutcome<String> stringOutcome : stringCollectionResult.getOutcome()) {
int index = featureIndexMap.get(featureResult.getTrainingName() + "|" + featureResult.getTrainingOutcome(stringOutcome.getOutcome()));
if (index >= 0) {
double value = stringOutcome.getWeight();
FeatureNode featureNode = new FeatureNode(index, value);
featureList.add(featureNode);
}
}
} else {
double value = 1.0;
if (featureResult.getOutcome() instanceof Double) {
@SuppressWarnings("unchecked") FeatureResult<Double> doubleResult = (FeatureResult<Double>) featureResult;
value = doubleResult.getOutcome().doubleValue();
}
int index = featureIndexMap.get(featureResult.getTrainingName());
if (index >= 0) {
// we only need to bother adding features which existed in the
// training set
FeatureNode featureNode = new FeatureNode(index, value);
featureList.add(featureNode);
}
}
}
return featureList;
}
use of de.bwaldvogel.liblinear.FeatureNode in project palladian by palladian.
the class LibLinearLearner method makeInstance.
static de.bwaldvogel.liblinear.Feature[] makeInstance(Map<String, Integer> featureLabelIndices, FeatureVector featureVector, double bias) {
List<de.bwaldvogel.liblinear.Feature> features = new ArrayList<>();
for (VectorEntry<String, Value> vectorEntry : featureVector) {
Value value = vectorEntry.value();
Integer featureIndex = featureLabelIndices.get(vectorEntry.key());
if (featureIndex != null && !value.isNull() && value instanceof NumericValue) {
double floatValue = ((NumericValue) value).getDouble();
if (Math.abs(floatValue) < 2 * Float.MIN_VALUE) {
continue;
}
features.add(new FeatureNode(featureIndex + 1, /* 1-indexed */
floatValue));
}
}
if (bias >= 0) {
// bias term
features.add(new FeatureNode(featureLabelIndices.size() + 1, bias));
}
Collections.sort(features, new Comparator<de.bwaldvogel.liblinear.Feature>() {
@Override
public int compare(Feature o1, Feature o2) {
return Integer.compare(o1.getIndex(), o2.getIndex());
}
});
return features.toArray(new de.bwaldvogel.liblinear.Feature[features.size()]);
}
Aggregations