use of edu.neu.ccs.pyramid.optimization.GradientDescent in project pyramid by cheng-li.
the class LogRiskOptimizer method updateModelPartial.
private void updateModelPartial() {
if (logger.isDebugEnabled()) {
logger.debug("start updateModelPartial()");
}
KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance);
// todo
Optimizer opt = null;
switch(optimizer) {
case "LBFGS":
opt = new LBFGS(klLoss);
break;
case "GD":
opt = new GradientDescent(klLoss);
break;
default:
throw new IllegalArgumentException("unknown");
}
opt.getTerminator().setMaxIteration(10);
opt.optimize();
if (logger.isDebugEnabled()) {
logger.debug("finish updateModelPartial()");
}
}
use of edu.neu.ccs.pyramid.optimization.GradientDescent in project pyramid by cheng-li.
the class NoiseOptimizerLR method updateModel.
private void updateModel() {
if (logger.isDebugEnabled()) {
logger.debug("start updateModel()");
}
KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance);
// todo
Optimizer opt = null;
switch(optimizer) {
case "LBFGS":
opt = new LBFGS(klLoss);
break;
case "GD":
opt = new GradientDescent(klLoss);
break;
default:
throw new IllegalArgumentException("unknown");
}
opt.optimize();
if (logger.isDebugEnabled()) {
logger.debug("finish updateModel()");
}
}
use of edu.neu.ccs.pyramid.optimization.GradientDescent in project pyramid by cheng-li.
the class NoiseOptimizerLR method updateModelPartial.
private void updateModelPartial(int modelIterations) {
if (logger.isDebugEnabled()) {
logger.debug("start updateModelPartial()");
}
KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance);
// todo
Optimizer opt = null;
switch(optimizer) {
case "LBFGS":
opt = new LBFGS(klLoss);
break;
case "GD":
opt = new GradientDescent(klLoss);
break;
default:
throw new IllegalArgumentException("unknown");
}
opt.getTerminator().setMaxIteration(modelIterations);
opt.optimize();
if (logger.isDebugEnabled()) {
logger.debug("finish updateModelPartial()");
}
}
use of edu.neu.ccs.pyramid.optimization.GradientDescent in project pyramid by cheng-li.
the class NoiseOptimizer method updateModel.
private void updateModel() {
if (logger.isDebugEnabled()) {
logger.debug("start updateModel()");
}
KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance);
// todo
Optimizer opt = null;
switch(optimizer) {
case "LBFGS":
opt = new LBFGS(klLoss);
break;
case "GD":
opt = new GradientDescent(klLoss);
break;
default:
throw new IllegalArgumentException("unknown");
}
opt.optimize();
if (logger.isDebugEnabled()) {
logger.debug("finish updateModel()");
}
}
use of edu.neu.ccs.pyramid.optimization.GradientDescent in project pyramid by cheng-li.
the class NoiseOptimizer method updateModelPartial.
private void updateModelPartial(int modelIterations) {
if (logger.isDebugEnabled()) {
logger.debug("start updateModelPartial()");
}
KLLoss klLoss = new KLLoss(crf, dataSet, targets, variance);
// todo
Optimizer opt = null;
switch(optimizer) {
case "LBFGS":
opt = new LBFGS(klLoss);
break;
case "GD":
opt = new GradientDescent(klLoss);
break;
default:
throw new IllegalArgumentException("unknown");
}
opt.getTerminator().setMaxIteration(modelIterations);
opt.optimize();
if (logger.isDebugEnabled()) {
logger.debug("finish updateModelPartial()");
}
}
Aggregations