Package zdenekdrahos.Testing

Source Code of zdenekdrahos.Testing.MainBackAlgorithm

package zdenekdrahos.Testing;

import zdenekdrahos.AI.NeuralNetwork.Layers.ILayer;
import zdenekdrahos.AI.NeuralNetwork.Layers.Layer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import zdenekdrahos.AI.ActivationFunctions.*;

public class MainBackAlgorithm {

    static Map<Integer, List<List<Double>>> weights = new HashMap<Integer, List<List<Double>>>();
    // for momentum
    static Map<Integer, List<List<Double>>> previousAdjustment = new HashMap<Integer, List<List<Double>>>();
    static Map<Integer, List<Double>> values = new HashMap<Integer, List<Double>>();
    static Map<Integer, List<Double>> delta = new HashMap<Integer, List<Double>>();
    static double[] input, output;
    static ILayer[] network;
    static double learningRate = 0.75;
    static double momentum = 0.9;
    static double[][][] initWeight = {
        {
            {0.1, -0.1},
            {-0.2, 0.2},
            {0.15, -0.15},
            {-0.05, 0.05}
        },
        {
            {0.1, 0.01, 0.15, 0.21, -0.50}
        }
    };

    public static void main(String[] args) {

        input = new double[]{0};
        output = new double[]{1};

        //input = new double[]{0, 0};
        //output = new double[]{1, 1};

        IActivationFactory factory = new ActivationFactory();
        ILayer inputLayer = new Layer(1, factory.getLinearFunction());
        ILayer hiddenLayer1 = new Layer(4, factory.getHyperbolicTangent());
        ILayer hiddenLayer2 = new Layer(4, factory.getHyperbolicTangent());
        ILayer outputLayer = new Layer(1, factory.getLinearFunction());

        // check
        if (input.length != output.length || input.length != inputLayer.getNeuronsCount() || output.length != outputLayer.getNeuronsCount()) {
            System.exit(1);
        }

        network = new ILayer[]{inputLayer, hiddenLayer1, outputLayer};

        initValues();
        initWeights();
       
        for (int i = 1; i <= 1; i++) {
            System.out.println("Iteration " + i);
            feedforward();
            backPropagation();
            updateWeights();
        }
    }

    private static void initValues() {
        System.out.println("Init values: ");
        for (int layerIndex = 0; layerIndex < network.length; layerIndex++) {
            // input to neu
            int neurons = network[layerIndex].getNeuronsCount();
            List<Double> initValues = new ArrayList<Double>(neurons);
            List<Double> deltaValues = new ArrayList<Double>(neurons);
            System.out.print(layerIndex + " = ");
            for (int neuronIndex = 0; neuronIndex < neurons; neuronIndex++) {
                double w = layerIndex == 0 ? input[neuronIndex] : Double.NaN;
                initValues.add(w);
                System.out.print(w + ", ");
                deltaValues.add(0.0);
            }
            System.out.println();
            values.put(layerIndex, initValues);
            delta.put(layerIndex, deltaValues);
        }
        System.out.println();

    }

    private static void initWeights() {
        // init weights
        Random rand = new Random(System.currentTimeMillis());
        Integer bias = new Integer(1);
        System.out.println("Init weights: ");

        for (int layerIndex = 1; layerIndex < network.length; layerIndex++) {
            // input to neu
            int previousNeurons = network[layerIndex - 1].getNeuronsCount() + 1;
            int currentNeurons = network[layerIndex].getNeuronsCount();
            List<List<Double>> initWeights = new ArrayList<List<Double>>(currentNeurons);
            List<List<Double>> initMomentum = new ArrayList<List<Double>>(currentNeurons);
            System.out.println((layerIndex - 1) + " -> " + layerIndex + " layer");
            for (int neuronIndex = 0; neuronIndex < currentNeurons; neuronIndex++) {
                List<Double> neuronWeights = new ArrayList<Double>(previousNeurons);
                List<Double> momentumValues = new ArrayList<Double>(previousNeurons);
                System.out.print(neuronIndex + ": ");
                for (int prevIndex = 0; prevIndex < previousNeurons; prevIndex++) {
                    double w = rand.nextGaussian() - 0.5;
                    w = initWeight[layerIndex - 1][neuronIndex][prevIndex];
                    neuronWeights.add(w);
                    System.out.print(w + ", ");
                    momentumValues.add(0.0);
                }
                System.out.println();
                initWeights.add(neuronWeights);
                initMomentum.add(momentumValues);
            }
            weights.put(layerIndex, initWeights);
            previousAdjustment.put(layerIndex, initMomentum);
        }
        System.out.println();
    }

    private static void feedforward() {
        List<Double> previousLayerValues, previousLayerWeights;
        System.out.println("Feedforward: ");
        for (int layerIndex = 1; layerIndex < network.length; layerIndex++) {
            previousLayerValues = values.get(layerIndex - 1);
            double sum;
            System.out.println(layerIndex + ".layer:");
            for (int neuronIndex = 0; neuronIndex < network[layerIndex].getNeuronsCount(); neuronIndex++) {
                previousLayerWeights = weights.get(layerIndex).get(neuronIndex);
                System.out.print(neuronIndex + ".neuron = ");
                // bias
                sum = 1 * previousLayerWeights.get(0);
                System.out.printf("%.2f * %.2f + ", previousLayerWeights.get(0), 1.0);
                for (int i = 1; i < previousLayerWeights.size(); i++) {
                    double x = previousLayerValues.get(i - 1);
                    double w = previousLayerWeights.get(i);
                    sum += x * w;
                    System.out.printf("%.2f * %.2f + ", w, x);
                }
                double a = network[layerIndex].getActivationFunction().activate(sum);
                System.out.printf(" = sum = %f -> active = %f\n", sum, a);
                values.get(layerIndex).set(neuronIndex, a);
            }
        }
        System.out.println();
    }

    private static void backPropagation() {
        System.out.println("Back propagation (delta calculation): ");
        // output
        int indexOutput = network.length - 1;
        List<Double> currentDelta = delta.get(indexOutput);
        List<Double> currentValues = values.get(indexOutput);
        System.out.println("Output layer:");
        for (int i = 0; i < output.length; i++) {
            double error = output[i] - currentValues.get(i);
            double derivate = network[indexOutput].getActivationFunction().derivate(currentValues.get(i));
            double deltaValue = error * derivate;
            System.out.printf("%d: (%f - %f) * %f = %f\n", i, output[i], currentValues.get(i), derivate, deltaValue);
            currentDelta.set(i, deltaValue);
        }

        for (int layerIndex = indexOutput - 1; layerIndex > 0; layerIndex--) {
            System.out.println(layerIndex + ".layer");
            currentValues = values.get(layerIndex);
            currentDelta = delta.get(layerIndex);
            int neurons = network[layerIndex].getNeuronsCount();
            for (int neuronIndex = 0; neuronIndex < neurons; neuronIndex++) {
                System.out.print(neuronIndex + ": ");
                double sumDeltaWeight = 0;

                int nextLayer = layerIndex + 1;
                System.out.print("SUM( ");
                for (int i = 0; i < network[nextLayer].getNeuronsCount(); i++) {
                    double tempDelta = delta.get(nextLayer).get(i);
                    double tempWeight = weights.get(nextLayer).get(i).get(neuronIndex + 1);
                    sumDeltaWeight += tempDelta * tempWeight;
                    System.out.print(tempDelta + " * " + tempWeight + ", ");
                }
                System.out.print(" ) * ");

                double derivate = network[layerIndex].getActivationFunction().derivate(currentValues.get(neuronIndex));
                double deltaValue = sumDeltaWeight * derivate;
                System.out.print("derivate (" + currentValues.get(neuronIndex) + ") = ");
                System.out.print(sumDeltaWeight + " * " + derivate);
                System.out.print(" = " + deltaValue);
                currentDelta.set(neuronIndex, deltaValue);
                System.out.println();
            }
        }
        System.out.println();
    }

    private static void updateWeights() {

        System.out.println("Update weights: ");
        for (int layerIndex = network.length - 1; layerIndex > 0; layerIndex--) {
            int currentNeurons = network[layerIndex].getNeuronsCount();
            int previousNeurons = network[layerIndex - 1].getNeuronsCount() + 1;
            System.out.println(layerIndex + " -> " + (layerIndex - 1) + " layer");
            for (int neuronIndex = 0; neuronIndex < currentNeurons; neuronIndex++) {
                double neuronDelta = delta.get(layerIndex).get(neuronIndex);
                System.out.println(neuronIndex + " - delta = " + neuronDelta + ": ");
                for (int prevIndex = 0; prevIndex < previousNeurons; prevIndex++) {
                    double prevNeuronValue = prevIndex == 0 ? 1 : values.get(layerIndex - 1).get(prevIndex - 1);
                    double prevNeuronWeight = weights.get(layerIndex).get(neuronIndex).get(prevIndex);
                    double weightAdjustment = learningRate * neuronDelta * prevNeuronValue;
                    System.out.print(prevIndex + ".previous neuron: ");
                    System.out.printf("alfa(%f) * delta(%f) * value(%f)", learningRate, neuronDelta, prevNeuronValue, weightAdjustment);
                    double prevAdjustment = previousAdjustment.get(layerIndex).get(neuronIndex).get(prevIndex);
                    weightAdjustment += momentum * prevAdjustment;
                    System.out.printf("+  momentum(%f) * previous(%f) = %f -> ", momentum, prevAdjustment, weightAdjustment);
                    double newWeight = prevNeuronWeight + weightAdjustment;
                    previousAdjustment.get(layerIndex).get(neuronIndex).set(prevIndex, weightAdjustment);
                    System.out.printf(" new weight = currenWeight(%f) + %f = %f\n", prevNeuronWeight, weightAdjustment, newWeight);
                    weights.get(layerIndex).get(neuronIndex).set(prevIndex, newWeight);
                }
                System.out.println();
            }
        }
    }
}
TOP

Related Classes of zdenekdrahos.Testing.MainBackAlgorithm

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.