use of structures._SparseFeature in project IR_Base by Linda-sunshine.
the class MTCLinAdaptWithHDP method gradientByFunc.
@Override
protected void gradientByFunc(_AdaptStruct u, _Doc review, double weight, double[] g) {
_Review r = (_Review) review;
_HDPThetaStar theta = r.getHDPThetaStar();
// feature index
int n, k, s;
int cIndex = theta.getIndex();
if (cIndex < 0 || cIndex >= m_kBar)
System.err.println("Error,cannot find the theta star!");
int offset = m_dim * 2 * cIndex, offsetSup = m_dim * 2 * m_kBar;
double[] Au = theta.getModel();
double delta = (review.getYLabel() - logit(review.getSparse(), r)) * weight;
// Bias term for individual user.
// a[0] = ws0*x0; x0=1
g[offset] -= delta * getSupWeights(0);
// b[0]
g[offset + m_dim] -= delta;
// Bias term for super user.
// a_s[0] = a_i0*w_g0*x_d0
g[offsetSup] -= delta * Au[0] * m_gWeights[0];
// b_s[0] = a_i0*x_d0
g[offsetSup + m_dimSup] -= delta * Au[0];
// Traverse all the feature dimension to calculate the gradient for both individual users and super user.
for (_SparseFeature fv : review.getSparse()) {
n = fv.getIndex() + 1;
k = m_featureGroupMap[n];
// w_si*x_di
g[offset + k] -= delta * getSupWeights(n) * fv.getValue();
// x_di
g[offset + m_dim + k] -= delta * fv.getValue();
s = m_featureGroupMap4SupUsr[n];
// a_i*w_gi*x_di
g[offsetSup + s] -= delta * Au[k] * m_gWeights[n] * fv.getValue();
// a_i*x_di
g[offsetSup + m_dimSup + s] -= delta * Au[k] * fv.getValue();
}
}
use of structures._SparseFeature in project IR_Base by Linda-sunshine.
the class _HDPAdaptStruct method evaluate.
@Override
public double evaluate(_Doc doc) {
_Review r = (_Review) doc;
double prob = 0, sum = 0;
double[] probs = r.getCluPosterior();
int n, m, k;
// not adaptation based
if (m_dim == 0) {
for (k = 0; k < probs.length; k++) {
// need to be fixed: here we assumed binary classification
sum = Utils.dotProduct(CLRWithHDP.m_hdpThetaStars[k].getModel(), doc.getSparse(), 0);
if (MTCLRWithHDP.m_supWeights != null && MTCLRWithHDP.m_q != 0)
sum += CLRWithDP.m_q * Utils.dotProduct(MTCLRWithHDP.m_supWeights, doc.getSparse(), 0);
// to maintain numerical precision, compute the expectation in log space as well
if (k == 0)
prob = probs[k] + Math.log(Utils.logistic(sum));
else
prob = Utils.logSum(prob, probs[k] + Math.log(Utils.logistic(sum)));
}
} else {
double[] As;
for (k = 0; k < probs.length; k++) {
As = CLRWithHDP.m_hdpThetaStars[k].getModel();
// Bias term: w_s0*a0+b0.
sum = As[0] * CLinAdaptWithHDP.m_supWeights[0] + As[m_dim];
for (_SparseFeature fv : doc.getSparse()) {
n = fv.getIndex() + 1;
m = m_featureGroupMap[n];
sum += (As[m] * CLinAdaptWithHDP.m_supWeights[n] + As[m_dim + m]) * fv.getValue();
}
// to maintain numerical precision, compute the expectation in log space as well
if (k == 0)
prob = probs[k] + Math.log(Utils.logistic(sum));
else
prob = Utils.logSum(prob, probs[k] + Math.log(Utils.logistic(sum)));
}
}
// accumulate the prediction results during sampling procedure
doc.m_pCount++;
// >0.5?1:0;
doc.m_prob += Math.exp(prob);
return prob;
}
use of structures._SparseFeature in project IR_Base by Linda-sunshine.
the class _HDPAdaptStruct method evaluateG.
// Evaluate the performance of the global part.
public double evaluateG(_Doc doc) {
_Review r = (_Review) doc;
double prob = 0, sum = 0;
double[] probs = r.getCluPosterior();
int n, k;
for (k = 0; k < probs.length; k++) {
// As = CLRWithHDP.m_hdpThetaStars[k].getModel();
// Bias term: w_s0*a0+b0.
sum = CLinAdaptWithHDP.m_supWeights[0];
for (_SparseFeature fv : doc.getSparse()) {
n = fv.getIndex() + 1;
sum += CLinAdaptWithHDP.m_supWeights[n] * fv.getValue();
}
// to maintain numerical precision, compute the expectation in log space as well
if (k == 0)
prob = probs[k] + Math.log(Utils.logistic(sum));
else
prob = Utils.logSum(prob, probs[k] + Math.log(Utils.logistic(sum)));
}
// accumulate the prediction results during sampling procedure
doc.m_pCount_g++;
// >0.5?1:0;
doc.m_prob_g += Math.exp(prob);
return prob;
}
use of structures._SparseFeature in project IR_Base by Linda-sunshine.
the class CLinAdaptWithMMB method logit.
@Override
protected double logit(_SparseFeature[] fvs, _Review r) {
int k, n;
double[] Au = r.getHDPThetaStar().getModel();
// Bias term: w_s0*a0+b0.
double sum = Au[0] * m_gWeights[0] + Au[m_dim];
for (_SparseFeature fv : fvs) {
n = fv.getIndex() + 1;
k = m_featureGroupMap[n];
sum += (Au[k] * m_gWeights[n] + Au[m_dim + k]) * fv.getValue();
}
return Utils.logistic(sum);
}
use of structures._SparseFeature in project IR_Base by Linda-sunshine.
the class CLinAdaptWithMMB method gradientByFunc.
@Override
protected void gradientByFunc(_AdaptStruct u, _Doc review, double weight, double[] g) {
_Review r = (_Review) review;
// feature index
int n, k;
int cIndex = r.getHDPThetaStar().getIndex();
if (cIndex < 0 || cIndex >= m_kBar)
System.err.println("Error,cannot find the theta star!");
int offset = m_dim * 2 * cIndex;
double delta = (review.getYLabel() - logit(review.getSparse(), r)) * weight;
// Bias term for individual user.
// a[0] = ws0*x0; x0=1
g[offset] -= delta * m_gWeights[0];
// b[0]
g[offset + m_dim] -= delta;
// Traverse all the feature dimension to calculate the gradient for both individual users and super user.
for (_SparseFeature fv : review.getSparse()) {
n = fv.getIndex() + 1;
k = m_featureGroupMap[n];
// w_si*x_di
g[offset + k] -= delta * m_gWeights[n] * fv.getValue();
// x_di
g[offset + m_dim + k] -= delta * fv.getValue();
}
}
Aggregations