use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.
the class CLinAdaptWithDP method loadUsers.
@Override
public void loadUsers(ArrayList<_User> userList) {
m_userList = new ArrayList<_AdaptStruct>();
// Init each user.
for (_User user : userList) m_userList.add(new _DPAdaptStruct(user, m_dim));
m_pWeights = new double[m_gWeights.length];
}
use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.
the class CLRWithHDP method loadUsers.
@Override
public void loadUsers(ArrayList<_User> userList) {
m_userList = new ArrayList<_AdaptStruct>();
for (_User user : userList) m_userList.add(new _HDPAdaptStruct(user));
m_pWeights = new double[m_gWeights.length];
}
use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.
the class CLRWithMMB method loadUsers.
@Override
public void loadUsers(ArrayList<_User> userList) {
m_userList = new ArrayList<_AdaptStruct>();
for (_User user : userList) m_userList.add(new _MMBAdaptStruct(user));
m_pWeights = new double[m_gWeights.length];
m_indicator = new _HDPThetaStar[m_userList.size()][m_userList.size()];
}
use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.
the class CLRWithMMB method trainTrace.
@Override
public double trainTrace(String data, int iter) {
m_numberOfIterations = iter;
m_thinning = 1;
System.out.print(String.format("[Info]Joint Sampling for all zero edges: %b\n", m_jointAll));
System.out.print(toString());
double delta = 0, lastLikelihood = 0, curLikelihood = 0;
double likelihoodX = 0, likelihoodY = 0;
int count = 0;
double likelihoodE = 0;
// double[] likelihoodE;
// clear user performance, init cluster assignment, assign each review to one cluster
init();
initThetaStars_Edges_Joint();
sanityCheck();
// Burn in period for doc.
while (count++ < m_burnIn) {
calculate_E_step();
calculate_M_step();
}
try {
String traceFile = String.format("%s_iter_%d_burnin_%d_thin_%d_%b_%d.txt", data, iter, m_burnIn, m_thinning, m_jointAll, System.currentTimeMillis());
PrintWriter writer = new PrintWriter(new File(traceFile));
// EM iteration.
for (int i = 0; i < m_numberOfIterations; i++) {
// Cluster assignment, thinning to reduce auto-correlation.
calculate_E_step();
likelihoodY = calculate_M_step();
// accumulate the likelihood
likelihoodX = accumulateLikelihoodX();
// likelihoodE = accumulateDecomposedLikelihoodEMMB();
// likelihoodE[3] = (m_MNL[2]/2)*Math.log(1-m_rho);
likelihoodE = accumulateLikelihoodEMMB();
likelihoodE += (m_MNL[2] / 2) * Math.log(1 - m_rho);
// curLikelihood = likelihoodY + likelihoodX + likelihoodE[0] + likelihoodE[1] + likelihoodE[3];
curLikelihood = likelihoodY + likelihoodX + likelihoodE;
delta = (lastLikelihood - curLikelihood) / curLikelihood;
// evaluate the model
if (i % m_thinning == 0) {
evaluateModel();
test();
for (_AdaptStruct u : m_userList) u.getPerfStat().clear();
}
// writer.write(String.format("%.5f\t%.5f\t%.5f\t%.5f\t%d\t%.5f\t%.5f\n", likelihoodE[0], likelihoodE[1], likelihoodE[2], likelihoodE[3], m_kBar, m_perf[0], m_perf[1]));
writer.write(String.format("%.5f\t%.5f\t%.5f\t%.5f\t%d\t%.5f\t%.5f\n", likelihoodY, likelihoodX, likelihoodE, delta, m_kBar, m_perf[0], m_perf[1]));
System.out.print(String.format("\n[Info]Step %d: likelihood: %.4f, Delta_likelihood: %.3f\n", i, curLikelihood, delta));
if (Math.abs(delta) < m_converge)
break;
lastLikelihood = curLikelihood;
}
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
// we do not want to miss the last sample?!
evaluateModel();
return curLikelihood;
}
use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.
the class CoRegLR method loadUsers.
@Override
public void loadUsers(ArrayList<_User> userList) {
int vSize = m_featureSize + 1;
// step 1: create space
m_userList = new ArrayList<_AdaptStruct>();
for (int i = 0; i < userList.size(); i++) {
_User user = userList.get(i);
m_userList.add(new _CoRegLRAdaptStruct(user, i, vSize, m_topK));
}
// huge space consumption
_CoRegLRAdaptStruct.sharedW = new double[vSize * m_userList.size()];
// step 3: construct neighborhood graph
constructNeighborhood(m_sType);
}
Aggregations