Package aima.core.learning.neural

Examples of aima.core.learning.neural.LayerSensitivity


    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    Matrix sensitivityMatrix = layer2Sensitivity.getSensitivityMatrix();
    Assert.assertEquals(-2.522, sensitivityMatrix.get(0, 0), 0.0001);
  }
View Full Code Here


    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);
    Matrix sensitivityMatrix = layer1Sensitivity.getSensitivityMatrix();
View Full Code Here

    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

    Matrix weightUpdateMatrix2 = BackPropLearning.calculateWeightUpdates(
View Full Code Here

    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

    Vector biasUpdateVector2 = BackPropLearning.calculateBiasUpdates(
View Full Code Here

    biasVector1.setValue(0, -0.48);
    biasVector1.setValue(1, -0.13);

    Layer layer1 = new Layer(weightMatrix1, biasVector1,
        new LogSigActivationFunction());
    LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

    Vector inputVector1 = new Vector(1);
    inputVector1.setValue(0, 1);

    layer1.feedForward(inputVector1);

    Matrix weightMatrix2 = new Matrix(1, 2);
    weightMatrix2.set(0, 0, 0.09);
    weightMatrix2.set(0, 1, -0.17);

    Vector biasVector2 = new Vector(1);
    biasVector2.setValue(0, 0.48);

    Layer layer2 = new Layer(weightMatrix2, biasVector2,
        new PureLinearActivationFunction());
    Vector inputVector2 = layer1.getLastActivationValues();
    layer2.feedForward(inputVector2);

    Vector errorVector = new Vector(1);
    errorVector.setValue(0, 1.261);
    LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
    layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

    layer1Sensitivity
        .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

    BackPropLearning.calculateWeightUpdates(layer2Sensitivity,
View Full Code Here

TOP

Related Classes of aima.core.learning.neural.LayerSensitivity

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.