use of LBFGS.LBFGS.ExceptionWithIflag in project IR_Base by Linda-sunshine.
the class CoRegLR method train.
// this is batch training in each individual user
@Override
public double train() {
int[] iflag = { 0 }, iprint = { -1, 3 };
double fValue, oldFValue = Double.MAX_VALUE;
;
int vSize = (m_featureSize + 1) * m_userList.size(), displayCount = 0;
double oldMag = 0;
initLBFGS();
init();
try {
do {
fValue = 0;
// initialize gradient
Arrays.fill(m_g, 0);
// accumulate function values and gradients from each user
for (_AdaptStruct user : m_userList) {
fValue += calculateFuncValue(user);
calculateGradients(user);
}
// added by Lin for stopping lbfgs.
double curMag = gradientTest();
if (Math.abs(oldMag - curMag) < 0.1)
break;
oldMag = curMag;
if (m_displayLv == 2) {
System.out.println("Fvalue is " + fValue);
} else if (m_displayLv == 1) {
if (fValue < oldFValue)
System.out.print("o");
else
System.out.print("x");
if (++displayCount % 100 == 0)
System.out.println();
}
oldFValue = fValue;
// In the training process, sharedW is updated.
LBFGS.lbfgs(vSize, 5, _CoRegLRAdaptStruct.getSharedW(), fValue, m_g, false, m_diag, iprint, 1e-3, 1e-16, iflag);
} while (iflag[0] != 0);
System.out.println();
} catch (ExceptionWithIflag e) {
e.printStackTrace();
}
setPersonalizedModel();
return oldFValue;
}
use of LBFGS.LBFGS.ExceptionWithIflag in project IR_Base by Linda-sunshine.
the class MTRegLR method train.
// this is batch training in each individual user
@Override
public double train() {
int[] iflag = { 0 }, iprint = { -1, 3 };
double fValue, oldFValue = Double.MAX_VALUE;
int displayCount = 0;
_AdaptStruct user;
initLBFGS();
init();
try {
do {
fValue = 0;
// initialize gradient
Arrays.fill(m_g, 0);
// accumulate function values and gradients from each user
for (int i = 0; i < m_userList.size(); i++) {
user = (_AdaptStruct) m_userList.get(i);
// L + R^1(A_i)
fValue += calculateFuncValue(user);
calculateGradients(user);
}
if (m_displayLv == 2) {
System.out.format("Fvalue is %.3f\t", fValue);
gradientTest();
} else if (m_displayLv == 1) {
if (fValue < oldFValue)
System.out.print("o");
else
System.out.print("x");
if (++displayCount % 100 == 0)
System.out.println();
}
oldFValue = fValue;
// In the training process, A is updated.
LBFGS.lbfgs(m_ws.length, 6, m_ws, fValue, m_g, false, m_diag, iprint, 1e-3, 1e-16, iflag);
} while (iflag[0] != 0);
System.out.println();
} catch (ExceptionWithIflag e) {
System.err.println("********lbfgs fails here!******");
e.printStackTrace();
}
setPersonalizedModel();
return oldFValue;
}
use of LBFGS.LBFGS.ExceptionWithIflag in project IR_Base by Linda-sunshine.
the class RegLR method train.
// this is batch training in each individual user
@Override
public double train() {
int[] iflag = { 0 }, iprint = { -1, 3 };
double fValue = 0, w[], oldFValue = Double.MAX_VALUE, totalFvalue = 0;
init();
for (_AdaptStruct user : m_userList) {
initLBFGS();
iflag[0] = 0;
try {
w = user.getUserModel();
oldFValue = Double.MAX_VALUE;
do {
// initialize gradient
Arrays.fill(m_g, 0);
fValue = calculateFuncValue(user);
calculateGradients(user);
if (m_displayLv == 2) {
System.out.println("Fvalue is " + fValue);
gradientTest();
} else if (m_displayLv == 1) {
if (fValue < oldFValue)
System.out.print("o");
else
System.out.print("x");
}
oldFValue = fValue;
// In the training process, A is updated.
LBFGS.lbfgs(w.length, 6, w, fValue, m_g, false, m_diag, iprint, 1e-4, 1e-32, iflag);
} while (iflag[0] != 0);
} catch (ExceptionWithIflag e) {
if (m_displayLv > 0)
System.out.print("X");
else
System.out.println("X");
}
if (m_displayLv > 0)
System.out.println();
totalFvalue += fValue;
}
setPersonalizedModel();
return totalFvalue;
}
use of LBFGS.LBFGS.ExceptionWithIflag in project IR_Base by Linda-sunshine.
the class CLinAdaptWithKmeans method train.
@Override
public double train() {
int[] iflag = { 0 }, iprint = { -1, 3 };
double fValue = 0, oldFValue = Double.MAX_VALUE, totalFvalue = 0;
int displayCount = 0;
_LinAdaptStruct user;
init();
initLBFGS();
try {
do {
fValue = 0;
initPerIter();
// accumulate function values and gradients from each user
for (int i = 0; i < m_userList.size(); i++) {
user = (_LinAdaptStruct) m_userList.get(i);
fValue += calculateFuncValue(user);
calculateGradients(user);
}
fValue += calculateRcRg();
gradientByRcRg();
if (m_displayLv == 2) {
gradientTest();
System.out.println("Fvalue is " + fValue);
} else if (m_displayLv == 1) {
if (fValue < oldFValue)
System.out.print("o");
else
System.out.print("x");
if (++displayCount % 100 == 0)
System.out.println();
}
// In the training process, A is updated.
LBFGS.lbfgs(m_g.length, 6, _CLinAdaptStruct.sharedA, fValue, m_g, false, m_diag, iprint, 1e-3, 1e-16, iflag);
} while (iflag[0] != 0);
} catch (ExceptionWithIflag e) {
System.out.println("LBFGS fails!!!!");
e.printStackTrace();
}
setPersonalizedModel();
return totalFvalue;
}
Aggregations