Package org.neuroph.samples.intronn

Source Code of org.neuroph.samples.intronn.SunSpots

/**
* Copyright 2010 Neuroph Project http://neuroph.sourceforge.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*    http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neuroph.samples.intronn;

import java.text.NumberFormat;
import java.util.Observable;
import java.util.Observer;

import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.learning.SupervisedLearning;
import org.neuroph.core.learning.SupervisedTrainingElement;
import org.neuroph.core.learning.TrainingSet;
import org.neuroph.nnet.MultiLayerPerceptron;
import org.neuroph.nnet.Neuroph;
import org.neuroph.nnet.flat.FlatNetworkPlugin;
import org.neuroph.nnet.learning.LMS;
import org.neuroph.util.TransferFunctionType;

/**
* This example shows how to use Neuroph to predict sunspots. 
*
* It demonstrates two very important machine learning techniques.
*
* First, time-window.  Sunspots are organized into input windows used
* to predict the next level of sunspot activity.  This example uses a
* 30 year window.  Basically, 30 years of sunspot activity is used to
* predict the 31st year.  This 30 year window slides forward, one year
* at a time.
*
* Second is normalization.  The sunspots are normalized into a
* range between 0.1 and 0.9.  This is very close to the actual
* 0 to 1 range of the sigmoid function.  We stay away from the
* extream edges of this range, thus using 0.1 and 0.9.
*
* This is an example from the book "Introduction to Neural Networks
* for Java" by Jeff Heaton.  This example has been contributed
* to the Neuroph project by Jeff Heaton. 
*
* http://www.heatonresearch.com/book/programming-neural-networks-java-2.html
*
* @author Jeff Heaton (http://www.heatonresearch.com
*
*/
public class SunSpots implements Observer {

    public final static double[] SUNSPOTS = {
        0.02620.05750.08370.12030.18830.3033
        0.15170.10460.05230.04180.01570.0000
        0.00000.01050.05750.14120.24580.3295
        0.31380.20400.14640.13600.11510.0575
        0.10980.20920.40790.63810.53870.3818
        0.24580.18310.05750.02620.08370.1778
        0.36610.42360.58050.52820.38180.2092
        0.10460.08370.02620.05750.11510.2092
        0.31380.42310.43620.24950.25000.1606
        0.06380.05020.05340.17000.24890.2824
        0.32900.44930.32010.23590.19040.1093
        0.05960.19770.36510.55490.52720.4268
        0.34780.18200.16000.03660.10360.4838
        0.80750.65850.44350.35620.20140.1192
        0.05340.12600.43360.69040.68460.6177
        0.47020.34830.31380.24530.21440.1114
        0.08370.03350.02140.03560.07580.1778
        0.23540.22540.24840.22070.14700.0528
        0.04240.01310.00000.00730.02620.0638
        0.07270.18510.23950.21500.15740.1250
        0.08160.03450.02090.00940.04450.0868
        0.18980.25940.33580.35040.37080.2500
        0.14380.04450.06900.29760.63540.7233
        0.53970.44820.33790.19190.12660.0560
        0.07850.20970.32160.51520.65220.5036
        0.34830.33730.28290.20400.10770.0350
        0.02250.11870.28660.49060.50100.4038
        0.30910.23010.24580.15950.08530.0382
        0.19660.38700.72700.58160.53140.3462
        0.23380.08890.05910.06490.01780.0314
        0.16890.28400.31220.33320.33210.2730
        0.13280.06850.03560.03300.03710.1862
        0.38180.44510.40790.33470.21860.1370
        0.13960.06330.04970.01410.02620.1276
        0.21970.33210.28140.32430.25370.2296
        0.09730.02980.01880.00730.05020.2479
        0.29860.54340.42150.33260.19660.1365
        0.07430.03030.08730.23170.33420.3609
        0.40690.33940.18670.11090.05810.0298
        0.04550.18880.41680.59830.57320.4644
        0.35460.24840.16000.08530.05020.1736
        0.48430.79290.71280.70450.43880.3630
        0.16470.07270.02300.19870.74110.9947
        0.96650.83160.58730.28190.19610.1459
        0.05340.07900.24580.49060.55390.5518
        0.54650.34830.36030.19870.18040.0811
        0.06590.14280.48380.8127
      };
   
    /**
     * Starting year for sunspot data.
     */
  public final static int STARTING_YEAR = 1700;
 
  /**
   * Size of our prediction window.
   */
  public final static int WINDOW_SIZE = 30;
 
  /**
   * Start of training data.
   */
  public final static int TRAIN_START = WINDOW_SIZE;
 
  /**
   * End of training data.
   */
  public final static int TRAIN_END = 259;
 
  /**
   * Beginning of evaluation data.
   */
  public final static int EVALUATE_START = 260;
 
  /**
   * End of evaluation data.
   */
  public final static int EVALUATE_END = SUNSPOTS.length - 1;

  /**
   * This really should be lowered, I am setting it to a level here that will
   * train in under a minute.
   */
  public final static double MAX_ERROR = 0.06;

  /**
   * Normalized sunspots.
   */
  private double[] normalizedSunspots;
 
  /**
   * Closed loop sunspots.  Closed loop means use the neural network output
   * as the input for the next prediction, rather than actual data.
   */
  private double[] closedLoopSunspots;
  private double mean;

  /**
   * Normalize the sunspots.
   * @param lo Low range for normalization.
   * @param hi High range for normalization.
   */
  public void normalizeSunspots(double lo, double hi) {
    double min = Double.MAX_VALUE;
    double max = Double.MIN_VALUE;
    for (int year = 0; year < SUNSPOTS.length; year++) {
      min = Math.min(min, SUNSPOTS[year]);
      max = Math.max(max, SUNSPOTS[year]);
    }

    normalizedSunspots = new double[SUNSPOTS.length];
    closedLoopSunspots = new double[SUNSPOTS.length];

    mean = 0;
    for (int year = 0; year < SUNSPOTS.length; year++) {
      normalizedSunspots[year] = closedLoopSunspots[year] = ((SUNSPOTS[year] - min) / (max - min))
          * (hi - lo) + lo;
      mean += normalizedSunspots[year] / SUNSPOTS.length;
    }
  }

  /**
   * Generate the training data for the training sunspot years.
   * @return The training data.
   */
  public TrainingSet generateTraining() {
    TrainingSet result = new TrainingSet(WINDOW_SIZE, 1);

    for (int year = TRAIN_START; year < TRAIN_END; year++) {
      double[] input = new double[WINDOW_SIZE];
      double[] ideal = new double[1];

      int index = 0;
      for (int i = year - WINDOW_SIZE; i < year; i++) {
        input[index++] = this.normalizedSunspots[i];
      }

      ideal[0] = this.normalizedSunspots[year];

      result.addElement(new SupervisedTrainingElement(input, ideal));
    }
    return result;
  }

  /**
   * Predict sunspots.
   * @param network Neural network to use.
   */
  public void predict(NeuralNetwork network) {
    NumberFormat f = NumberFormat.getNumberInstance();
    f.setMaximumFractionDigits(4);
    f.setMinimumFractionDigits(4);

    System.out.println("Year\tActual\tPredict\tClosed Loop Predict");

    for (int year = EVALUATE_START; year < EVALUATE_END; year++) {
      // calculate based on actual data
      double[] input = new double[WINDOW_SIZE];
      for (int i = 0; i < input.length; i++) {
        input[i] = this.normalizedSunspots[(year - WINDOW_SIZE) + i];
      }

      network.setInput(input);
      network.calculate();

      double[] output = network.getOutput();
      double prediction = output[0];
      this.closedLoopSunspots[year] = prediction;

      // calculate "closed loop", based on predicted data
      for (int i = 0; i < input.length; i++) {
        input[i] = this.closedLoopSunspots[(year - WINDOW_SIZE) + i];
      }

      network.setInput(input);
      network.calculate();
      output = network.getOutput();

      double closedLoopPrediction = output[0];

      // display
      System.out.println((STARTING_YEAR + year) + "\t"
          + f.format(this.normalizedSunspots[year]) + "\t"
          + f.format(prediction) + "\t"
          + f.format(closedLoopPrediction));

    }
  }

  public void run() {
   
    // uncomment the following line to use regular Neuroph (non-flat) processing
    Neuroph.getInstance().setFlattenNetworks(false);
   
    NeuralNetwork network = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, WINDOW_SIZE, 10, 1);
     
    normalizeSunspots(0.1, 0.9);
   
    network.getLearningRule().addObserver(this);
   
    TrainingSet training = generateTraining();
    network.learnInSameThread(training);
    predict(network);
   
    Neuroph.getInstance().shutdown();
  }

  public static void main(String args[]) {
    SunSpots sunspot = new SunSpots();
    sunspot.run();
  }

  @Override
  public void update(Observable arg0, Object arg1) {
    SupervisedLearning rule = (SupervisedLearning)arg0;
    System.out.println( "Training, Network Epoch " + rule.getCurrentIteration() + ", Error:" + rule.getTotalNetworkError());
  }

}
TOP

Related Classes of org.neuroph.samples.intronn.SunSpots

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.