use of com.oracle.labs.mlrg.olcut.util.Pair in project tribuo by oracle.
the class MultinomialNaiveBayesModel method getExcuse.
@Override
public Optional<Excuse<Label>> getExcuse(Example<Label> example) {
Map<String, List<Pair<String, Double>>> explanation = new HashMap<>();
for (Pair<Integer, Label> label : outputIDInfo) {
List<Pair<String, Double>> scores = new ArrayList<>();
for (Feature f : example) {
int id = featureIDMap.getID(f.getName());
if (id > -1) {
scores.add(new Pair<>(f.getName(), labelWordProbs.getRow(label.getA()).get(id)));
}
}
explanation.put(label.getB().getLabel(), scores);
}
return Optional.of(new Excuse<>(example, predict(example), explanation));
}
use of com.oracle.labs.mlrg.olcut.util.Pair in project tribuo by oracle.
the class MultinomialNaiveBayesModel method getTopFeatures.
@Override
public Map<String, List<Pair<String, Double>>> getTopFeatures(int n) {
int maxFeatures = n < 0 ? featureIDMap.size() : n;
Map<String, List<Pair<String, Double>>> topFeatures = new HashMap<>();
for (Pair<Integer, Label> label : outputIDInfo) {
List<Pair<String, Double>> features = new ArrayList<>(labelWordProbs.numActiveElements(label.getA()));
for (VectorTuple vt : labelWordProbs.getRow(label.getA())) {
features.add(new Pair<>(featureIDMap.get(vt.index).getName(), vt.value));
}
features.sort(Comparator.comparing(x -> -x.getB()));
if (maxFeatures < featureIDMap.size()) {
features = features.subList(0, maxFeatures);
}
topFeatures.put(label.getB().getLabel(), features);
}
return topFeatures;
}
use of com.oracle.labs.mlrg.olcut.util.Pair in project tribuo by oracle.
the class LIMEBase method explainWithSamples.
protected Pair<LIMEExplanation, List<Example<Regressor>>> explainWithSamples(Example<Label> example) {
// Predict using the full model, and generate a new example containing that prediction.
Prediction<Label> prediction = innerModel.predict(example);
Example<Regressor> labelledExample = new ArrayExample<>(transformOutput(prediction), example, 1.0f);
// Sample a dataset.
List<Example<Regressor>> sample = sampleData(example);
// Generate a sparse model on the sampled data.
SparseModel<Regressor> model = trainExplainer(labelledExample, sample);
// Test the sparse model against the predictions of the real model.
List<Prediction<Regressor>> predictions = new ArrayList<>(model.predict(sample));
predictions.add(model.predict(labelledExample));
RegressionEvaluation evaluation = evaluator.evaluate(model, predictions, new SimpleDataSourceProvenance("LIMEColumnar sampled data", regressionFactory));
return new Pair<>(new LIMEExplanation(model, prediction, evaluation), sample);
}
use of com.oracle.labs.mlrg.olcut.util.Pair in project tribuo by oracle.
the class IndependentRegressionTreeModel method getExcuse.
@Override
public Optional<Excuse<Regressor>> getExcuse(Example<Regressor> example) {
SparseVector vec = SparseVector.createSparseVector(example, featureIDMap, false);
if (vec.numActiveElements() == 0) {
return Optional.empty();
}
List<String> list = new ArrayList<>();
List<Prediction<Regressor>> predList = new ArrayList<>();
Map<String, List<Pair<String, Double>>> map = new HashMap<>();
for (Map.Entry<String, Node<Regressor>> e : roots.entrySet()) {
list.clear();
//
// Ensures we handle collisions correctly
Node<Regressor> oldNode = e.getValue();
Node<Regressor> curNode = e.getValue();
while (curNode != null) {
oldNode = curNode;
if (oldNode instanceof SplitNode) {
SplitNode<?> node = (SplitNode<?>) curNode;
list.add(featureIDMap.get(node.getFeatureID()).getName());
}
curNode = oldNode.getNextNode(vec);
}
//
// oldNode must be a LeafNode.
predList.add(((LeafNode<Regressor>) oldNode).getPrediction(vec.numActiveElements(), example));
List<Pair<String, Double>> pairs = new ArrayList<>();
int i = list.size() + 1;
for (String s : list) {
pairs.add(new Pair<>(s, i + 0.0));
i--;
}
map.put(e.getKey(), pairs);
}
Prediction<Regressor> combinedPrediction = combine(predList);
return Optional.of(new Excuse<>(example, combinedPrediction, map));
}
use of com.oracle.labs.mlrg.olcut.util.Pair in project tribuo by oracle.
the class LibLinearRegressionModel method innerGetExcuse.
/**
* The call to model.getFeatureWeights in the public methods copies the
* weights array so this inner method exists to save the copy in getExcuses.
* <p>
* If it becomes a problem then we could cache the feature weights in the
* model.
*
* @param e The example.
* @param allFeatureWeights The feature weights.
* @return An excuse for this example.
*/
@Override
protected Excuse<Regressor> innerGetExcuse(Example<Regressor> e, double[][] allFeatureWeights) {
Prediction<Regressor> prediction = predict(e);
Map<String, List<Pair<String, Double>>> weightMap = new HashMap<>();
for (int i = 0; i < allFeatureWeights.length; i++) {
List<Pair<String, Double>> scores = new ArrayList<>();
for (Feature f : e) {
int id = featureIDMap.getID(f.getName());
if (id > -1) {
double score = allFeatureWeights[i][id] * f.getValue();
scores.add(new Pair<>(f.getName(), score));
}
}
scores.sort((o1, o2) -> o2.getB().compareTo(o1.getB()));
weightMap.put(dimensionNames[mapping[i]], scores);
}
return new Excuse<>(e, prediction, weightMap);
}
Aggregations