use of com.airbnb.aerosolve.core.util.FloatVector in project aerosolve by airbnb.
the class LowRankLinearModel method scoreItem.
// In the binary case this is just the score for class 0.
// Ideally use a binary model for binary classification.
@Override
public float scoreItem(FeatureVector combinedItem) {
// Not supported.
assert (false);
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector sum = scoreFlatFeature(flatFeatures);
return sum.values[0];
}
use of com.airbnb.aerosolve.core.util.FloatVector in project aerosolve by airbnb.
the class LowRankLinearModel method scoreItemMulticlass.
public ArrayList<MulticlassScoringResult> scoreItemMulticlass(FeatureVector combinedItem) {
ArrayList<MulticlassScoringResult> results = new ArrayList<>();
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector sum = scoreFlatFeature(flatFeatures);
for (int i = 0; i < labelDictionary.size(); i++) {
MulticlassScoringResult result = new MulticlassScoringResult();
result.setLabel(labelDictionary.get(i).getLabel());
result.setScore(sum.values[i]);
results.add(result);
}
return results;
}
use of com.airbnb.aerosolve.core.util.FloatVector in project aerosolve by airbnb.
the class LowRankLinearModel method projectEmbeddingToLabel.
public FloatVector projectEmbeddingToLabel(FloatVector fvProjection) {
int dim = labelDictionary.size();
FloatVector sum = new FloatVector(dim);
// compute the projection from D-dim joint space to label space
for (int i = 0; i < dim; i++) {
String labelKey = labelDictionary.get(i).getLabel();
FloatVector labelVector = labelWeightVector.get(labelKey);
if (labelVector != null) {
float val = labelVector.dot(fvProjection);
sum.set(i, val);
}
}
return sum;
}
use of com.airbnb.aerosolve.core.util.FloatVector in project aerosolve by airbnb.
the class MlpModel method projectInputLayer.
public FloatVector projectInputLayer(Map<String, Map<String, Double>> flatFeatures, Double dropout) {
// compute the projection from input feature space to the first hidden layer or
// output layer if there is no hidden layer
// output: fvProjection is a float vector representing the activation at the first layer after input layer
int outputNodeNum = layerNodeNumber.get(0);
FloatVector fvProjection = layerActivations.get(0);
if (fvProjection == null) {
fvProjection = new FloatVector(outputNodeNum);
layerActivations.put(0, fvProjection);
} else {
// recompute activation every time we do forward propagation
fvProjection.setConstant(0.0f);
}
for (Map.Entry<String, Map<String, Double>> entry : flatFeatures.entrySet()) {
Map<String, FloatVector> family = inputLayerWeights.get(entry.getKey());
if (family != null) {
for (Map.Entry<String, Double> feature : entry.getValue().entrySet()) {
FloatVector vec = family.get(feature.getKey());
if (vec != null) {
if (dropout > 0.0 && Math.random() < dropout)
continue;
fvProjection.multiplyAdd(feature.getValue().floatValue(), vec);
}
}
}
}
if (dropout > 0.0 && dropout < 1.0) {
fvProjection.scale(1.0f / (1.0f - dropout.floatValue()));
}
// add bias for the first hidden layer or output layer
fvProjection.add(bias.get(0));
applyActivation(fvProjection, activationFunction.get(0));
return fvProjection;
}
use of com.airbnb.aerosolve.core.util.FloatVector in project aerosolve by airbnb.
the class MlpModel method projectHiddenLayer.
public FloatVector projectHiddenLayer(int hiddenLayerId, Double dropout) {
int outputLayerId = hiddenLayerId + 1;
int outputDim = layerNodeNumber.get(outputLayerId);
FloatVector output = layerActivations.get(outputLayerId);
if (output == null) {
output = new FloatVector(outputDim);
layerActivations.put(outputLayerId, output);
} else {
output.setConstant(0.0f);
}
FloatVector input = layerActivations.get(hiddenLayerId);
ArrayList<FloatVector> weights = hiddenLayerWeights.get(hiddenLayerId);
for (int i = 0; i < input.length(); i++) {
if (dropout > 0.0 && Math.random() < dropout)
continue;
output.multiplyAdd(input.get(i), weights.get(i));
}
if (dropout > 0.0 && dropout < 1.0) {
output.scale(1.0f / (1.0f - dropout.floatValue()));
}
output.multiplyAdd(1.0f, bias.get(outputLayerId));
applyActivation(output, activationFunction.get(outputLayerId));
return output;
}
Aggregations