}
ge = new MaxEntOptimizableByGE(trainingList,constraints,classifier);
ge.setTemperature(temperature);
ge.setGaussianPriorVariance(gaussianPriorVariance);
opt = new LimitedMemoryBFGS(ge);
logger.fine ("trainingList.size() = "+trainingList.size());
boolean converged;
for (int i = 0; i < numIterations; i++) {
try {
converged = opt.optimize (1);
} catch (Exception e) {
e.printStackTrace();
logger.info ("Catching exception; saying converged.");
converged = true;
}
if (converged)
break;
}
if (numIterations == Integer.MAX_VALUE) {
// Run it again because in our and Sam Roweis' experience, BFGS can still
// eke out more likelihood after first convergence by re-running without
// being restricted by its gradient history.
opt = new LimitedMemoryBFGS(ge);
try {
opt.optimize ();
} catch (Exception e) {
e.printStackTrace();
logger.info ("Catching exception; saying converged.");