Package org.encog.mathutil.randomize

Examples of org.encog.mathutil.randomize.RangeRandomizer


   */
  @Override
  public final void reset() {

    if (getLayerCount() < 3) {
      (new RangeRandomizer(-1, 1)).randomize(this);
    } else {
      (new NguyenWidrowRandomizer(-1, 1)).randomize(this);
    }
  }
View Full Code Here


      final boolean usePercent, final double percent) {

    final Randomizer d;

    if (useRange) {
      d = new RangeRandomizer(low, high);
    } else {
      d = new Distort(percent);
    }

    // check for errors
View Full Code Here

    network.addLayer(inputLayer);
    network.addLayer(outputLayer);
    network.getStructure().finalizeStructure();

    (new RangeRandomizer(-0.5, 0.5)).randomize(network);

    return network;
  }
View Full Code Here

    return total / SAMPLE_SIZE;
  }

  public static void main(final String args[]) {

    RangeRandomizer rangeRandom = new RangeRandomizer(-1, 1);
    NguyenWidrowRandomizer nwrRandom = new NguyenWidrowRandomizer(-1, 1);
    FanInRandomizer fanRandom = new FanInRandomizer();
    GaussianRandomizer gaussianRandom = new GaussianRandomizer(0, 1);

    System.out.println("Error improvement, higher is better.");
View Full Code Here

  public void testTracks()
  {   
    BasicNetwork network = EncogUtility.simpleFeedForward(5,10,15,20, true);
    double[] weights = network.getStructure().getFlat().getWeights();
    EngineArray.fill(weights, 100);
    (new RangeRandomizer(-1,1)).randomize(network);
   
    for(int i=0;i<weights.length;i++ )
    {
      Assert.assertTrue(weights[i]<10);
    }
View Full Code Here

  public void testGenetic() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    CalculateScore score = new TrainingSetScore(trainingData);
    NeuralGeneticAlgorithm genetic = new NeuralGeneticAlgorithm(network, new RangeRandomizer(-1,1), score, 500,0.1,0.25);
    NetworkUtil.testTraining(genetic,0.00001);
  }
View Full Code Here

      final double percentToMate = dialog.getPercentToMate().getValue();

      CalculateScore score = new TrainingSetScore(trainingData);
      final NeuralGeneticAlgorithm train = new NeuralGeneticAlgorithm(
          (BasicNetwork) file.getObject(),
          new RangeRandomizer(-1, 1), score, populationSize,
          mutationPercent, percentToMate);
      train.setTraining(trainingData);
      startup(file, train, dialog.getMaxError().getValue() / 100.0);
    }
View Full Code Here

  private void optionRandomize(RandomizeNetworkDialog dialog) {
    Randomizer r = null;

    switch (dialog.getType().getSelectedIndex()) {
    case 0: // Random
      r = new RangeRandomizer(dialog.getLow().getValue(), dialog
          .getHigh().getValue());
      break;
    case 1: // Nguyen-Widrow
      r = new NguyenWidrowRandomizer(dialog.getLow().getValue(), dialog
          .getHigh().getValue());
View Full Code Here

        MLTrainFactory.PROPERTY_MUTATION, false, 0.1);
    final double mate = holder.getDouble(MLTrainFactory.PROPERTY_MATE,
        false, 0.25);

    final MLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method,
        new RangeRandomizer(-1, 1), score, populationSize, mutation,
        mate);

    return train;
  }
View Full Code Here

      final boolean usePercent, final double percent) {

    final Randomizer d;

    if (useRange) {
      d = new RangeRandomizer(low, high);
    } else {
      d = new Distort(percent);
    }

    // check for errors
View Full Code Here

TOP

Related Classes of org.encog.mathutil.randomize.RangeRandomizer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.