Package aima.core.util.math

Examples of aima.core.util.math.Vector


    // example 11.14 of Neural Network Design by Hagan, Demuth and Beale
    Matrix hiddenLayerWeightMatrix = new Matrix(2, 1);
    hiddenLayerWeightMatrix.set(0, 0, -0.27);
    hiddenLayerWeightMatrix.set(1, 0, -0.41);

    Vector hiddenLayerBiasVector = new Vector(2);
    hiddenLayerBiasVector.setValue(0, -0.48);
    hiddenLayerBiasVector.setValue(1, -0.13);

    Vector input = new Vector(1);
    input.setValue(0, 1);

    Matrix outputLayerWeightMatrix = new Matrix(1, 2);
    outputLayerWeightMatrix.set(0, 0, 0.09);
    outputLayerWeightMatrix.set(0, 1, -0.17);

    Vector outputLayerBiasVector = new Vector(1);
    outputLayerBiasVector.setValue(0, 0.48);

    Vector error = new Vector(1);
    error.setValue(0, 1.261);

    double learningRate = 0.1;
    double momentumFactor = 0.5;
    FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(
        hiddenLayerWeightMatrix, hiddenLayerBiasVector,
        outputLayerWeightMatrix, outputLayerBiasVector);

    ffnn.setTrainingScheme(new BackPropLearning(learningRate,
        momentumFactor));
    ffnn.processInput(input);
    ffnn.processError(error);

    Matrix finalHiddenLayerWeights = ffnn.getHiddenLayerWeights();
    Assert.assertEquals(-0.2675, finalHiddenLayerWeights.get(0, 0), 0.001);
    Assert.assertEquals(-0.4149, finalHiddenLayerWeights.get(1, 0), 0.001);

    Vector hiddenLayerBias = ffnn.getHiddenLayerBias();
    Assert.assertEquals(-0.4775, hiddenLayerBias.getValue(0), 0.001);
    Assert.assertEquals(-0.1349, hiddenLayerBias.getValue(1), 0.001);

    Matrix finalOutputLayerWeights = ffnn.getOutputLayerWeights();
    Assert.assertEquals(0.1304, finalOutputLayerWeights.get(0, 0), 0.001);
    Assert.assertEquals(-0.1235, finalOutputLayerWeights.get(0, 1), 0.001);

    Vector outputLayerBias = ffnn.getOutputLayerBias();
    Assert.assertEquals(0.6061, outputLayerBias.getValue(0), 0.001);
  }
View Full Code Here


    // lots of tedious tests necessary to ensure nn is fundamentally correct
    Matrix weightMatrix1 = new Matrix(2, 1);
    weightMatrix1.set(0, 0, -0.27);
    weightMatrix1.set(1, 0, -0.41);

    Vector biasVector1 = new Vector(2);
    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    Vector expected = new Vector(2);
    expected.setValue(0, 0.321);
    expected.setValue(1, 0.368);

    Vector result1 = layer1.feedForward(inputVector1);
    Assert.assertEquals(expected.getValue(0), result1.getValue(0), 0.001);
    Assert.assertEquals(expected.getValue(1), result1.getValue(1), 0.001);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    Vector result2 = layer2.feedForward(inputVector2);
    Assert.assertEquals(0.446, result2.getValue(0), 0.001);
  }
View Full Code Here

  public void testSensitivityMatrixCalculationFromErrorVector() {
    Matrix weightMatrix1 = new Matrix(2, 1);
    weightMatrix1.set(0, 0, -0.27);
    weightMatrix1.set(1, 0, -0.41);

    Vector biasVector1 = new Vector(2);
    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    Matrix sensitivityMatrix = layer2Sensitivity.getSensitivityMatrix();
    Assert.assertEquals(-2.522, sensitivityMatrix.get(0, 0), 0.0001);
View Full Code Here

  public void testSensitivityMatrixCalculationFromSucceedingLayer() {
    Matrix weightMatrix1 = new Matrix(2, 1);
    weightMatrix1.set(0, 0, -0.27);
    weightMatrix1.set(1, 0, -0.41);

    Vector biasVector1 = new Vector(2);
    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);
View Full Code Here

  public void testWeightUpdateMatrixesFormedCorrectly() {
    Matrix weightMatrix1 = new Matrix(2, 1);
    weightMatrix1.set(0, 0, -0.27);
    weightMatrix1.set(1, 0, -0.41);

    Vector biasVector1 = new Vector(2);
    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);
View Full Code Here

  public void testBiasUpdateMatrixesFormedCorrectly() {
    Matrix weightMatrix1 = new Matrix(2, 1);
    weightMatrix1.set(0, 0, -0.27);
    weightMatrix1.set(1, 0, -0.41);

    Vector biasVector1 = new Vector(2);
    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

    Vector biasUpdateVector2 = BackPropLearning.calculateBiasUpdates(
        layer2Sensitivity, 0.1);
    Assert.assertEquals(0.2522, biasUpdateVector2.getValue(0), 0.001);

    Vector lastBiasUpdateVector2 = layer2.getLastBiasUpdateVector();
    Assert.assertEquals(0.2522, lastBiasUpdateVector2.getValue(0), 0.001);

    Vector penultimateBiasUpdateVector2 = layer2
        .getPenultimateBiasUpdateVector();
    Assert.assertEquals(0.0, penultimateBiasUpdateVector2.getValue(0),
        0.001);

    Vector biasUpdateVector1 = BackPropLearning.calculateBiasUpdates(
        layer1Sensitivity, 0.1);
    Assert.assertEquals(0.00495, biasUpdateVector1.getValue(0), 0.001);
    Assert.assertEquals(-0.00997, biasUpdateVector1.getValue(1), 0.001);

    Vector lastBiasUpdateVector1 = layer1.getLastBiasUpdateVector();

    Assert.assertEquals(0.00495, lastBiasUpdateVector1.getValue(0), 0.001);
    Assert.assertEquals(-0.00997, lastBiasUpdateVector1.getValue(1), 0.001);

    Vector penultimateBiasUpdateVector1 = layer1
        .getPenultimateBiasUpdateVector();
    Assert.assertEquals(0.0, penultimateBiasUpdateVector1.getValue(0),
        0.001);
    Assert.assertEquals(0.0, penultimateBiasUpdateVector1.getValue(1),
        0.001);
  }
View Full Code Here

  public void testWeightsAndBiasesUpdatedCorrectly() {
    Matrix weightMatrix1 = new Matrix(2, 1);
    weightMatrix1.set(0, 0, -0.27);
    weightMatrix1.set(1, 0, -0.41);

    Vector biasVector1 = new Vector(2);
    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

    BackPropLearning.calculateWeightUpdates(layer2Sensitivity,
        layer1.getLastActivationValues(), 0.1);

    BackPropLearning.calculateBiasUpdates(layer2Sensitivity, 0.1);

    BackPropLearning.calculateWeightUpdates(layer1Sensitivity,
        inputVector1, 0.1);

    BackPropLearning.calculateBiasUpdates(layer1Sensitivity, 0.1);

    layer2.updateWeights();
    Matrix newWeightMatrix2 = layer2.getWeightMatrix();
    Assert.assertEquals(0.171, newWeightMatrix2.get(0, 0), 0.001);
    Assert.assertEquals(-0.0772, newWeightMatrix2.get(0, 1), 0.001);

    layer2.updateBiases();
    Vector newBiasVector2 = layer2.getBiasVector();
    Assert.assertEquals(0.7322, newBiasVector2.getValue(0), 0.00001);

    layer1.updateWeights();
    Matrix newWeightMatrix1 = layer1.getWeightMatrix();

    Assert.assertEquals(-0.265, newWeightMatrix1.get(0, 0), 0.001);
    Assert.assertEquals(-0.419, newWeightMatrix1.get(1, 0), 0.001);

    layer1.updateBiases();
    Vector newBiasVector1 = layer1.getBiasVector();

    Assert.assertEquals(-0.475, newBiasVector1.getValue(0), 0.001);
    Assert.assertEquals(-0.139, newBiasVector1.getValue(1), 0.001);
  }
View Full Code Here

    // example 11.14 of Neural Network Design by Hagan, Demuth and Beale
    Matrix hiddenLayerWeightMatrix = new Matrix(2, 1);
    hiddenLayerWeightMatrix.set(0, 0, -0.27);
    hiddenLayerWeightMatrix.set(1, 0, -0.41);

    Vector hiddenLayerBiasVector = new Vector(2);
    hiddenLayerBiasVector.setValue(0, -0.48);
    hiddenLayerBiasVector.setValue(1, -0.13);

    Vector input = new Vector(1);
    input.setValue(0, 1);

    Matrix outputLayerWeightMatrix = new Matrix(1, 2);
    outputLayerWeightMatrix.set(0, 0, 0.09);
    outputLayerWeightMatrix.set(0, 1, -0.17);

    Vector outputLayerBiasVector = new Vector(1);
    outputLayerBiasVector.setValue(0, 0.48);

    Vector error = new Vector(1);
    error.setValue(0, 1.261);

    double learningRate = 0.1;
    double momentumFactor = 0.0;
    FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(
        hiddenLayerWeightMatrix, hiddenLayerBiasVector,
        outputLayerWeightMatrix, outputLayerBiasVector);
    ffnn.setTrainingScheme(new BackPropLearning(learningRate,
        momentumFactor));
    ffnn.processInput(input);
    ffnn.processError(error);

    Matrix finalHiddenLayerWeights = ffnn.getHiddenLayerWeights();
    Assert.assertEquals(-0.265, finalHiddenLayerWeights.get(0, 0), 0.001);
    Assert.assertEquals(-0.419, finalHiddenLayerWeights.get(1, 0), 0.001);

    Vector hiddenLayerBias = ffnn.getHiddenLayerBias();
    Assert.assertEquals(-0.475, hiddenLayerBias.getValue(0), 0.001);
    Assert.assertEquals(-0.1399, hiddenLayerBias.getValue(1), 0.001);

    Matrix finalOutputLayerWeights = ffnn.getOutputLayerWeights();
    Assert.assertEquals(0.171, finalOutputLayerWeights.get(0, 0), 0.001);
    Assert.assertEquals(-0.0772, finalOutputLayerWeights.get(0, 1), 0.001);

    Vector outputLayerBias = ffnn.getOutputLayerBias();
    Assert.assertEquals(0.7322, outputLayerBias.getValue(0), 0.001);
  }
View Full Code Here

TOP

Related Classes of aima.core.util.math.Vector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.