*/
@Test
public void testTrainWithSquaredError() {
// generate training data
DoubleVector[] trainingData = new DenseDoubleVector[] {
new DenseDoubleVector(new double[] { 0, 0, 0 }),
new DenseDoubleVector(new double[] { 0, 1, 1 }),
new DenseDoubleVector(new double[] { 1, 0, 1 }),
new DenseDoubleVector(new double[] { 1, 1, 0 }) };
// set parameters
double learningRate = 0.3;
double regularization = 0.02; // no regularization
double momentum = 0; // no momentum
String squashingFunctionName = "Sigmoid";
String costFunctionName = "SquaredError";
int[] layerSizeArray = new int[] { 2, 5, 1 };
SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
regularization, momentum, squashingFunctionName, costFunctionName,
layerSizeArray);
try {
// train by multiple instances
Random rnd = new Random();
for (int i = 0; i < 100000; ++i) {
DenseDoubleMatrix[] weightUpdates = mlp
.trainByInstance(trainingData[rnd.nextInt(4)]);
mlp.updateWeightMatrices(weightUpdates);
}
// System.out.printf("Weight matrices: %s\n",
// mlp.weightsToString(mlp.getWeightMatrices()));
for (int i = 0; i < trainingData.length; ++i) {
DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
.slice(2);
double expected = trainingData[i].toArray()[2];
double actual = mlp.output(testVec).toArray()[0];
if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
Log.info("Neural network failes to lear the XOR.");