Package org.encog.neural.networks.training

Examples of org.encog.neural.networks.training.CalculateScore


 
  private NEATPopulation generate()
  {
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);
   
    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
    ActivationStep step = new ActivationStep();
    step.setCenter(0.5);
   
    NEATTraining train = new NEATTraining(
View Full Code Here


    Assert.assertEquals(10,pop.getYoungBonusAgeThreshold());
    Assert.assertEquals(0.3,pop.getYoungScoreBonus());
   
    // see if the population can actually be used to train
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);   
    CalculateScore score = new TrainingSetScore(trainingSet);
    NEATTraining train = new NEATTraining(score,pop);
    train.iteration();

  }
View Full Code Here

  }

  public static double trainNetwork(final String what,
      final BasicNetwork network, final MLDataSet trainingSet) {
    // train the neural network
    CalculateScore score = new TrainingSetScore(trainingSet);
    final MLTrain trainAlt = new NeuralSimulatedAnnealing(
        network, score, 10, 2, 100);

    final MLTrain trainMain = new Backpropagation(network, trainingSet,0.000001, 0.0);
View Full Code Here

  public static void main(final String args[]) {

    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,1000);
    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
    ActivationStep step = new ActivationStep();
    step.setCenter(0.5);
    pop.setOutputActivationFunction(step);
   
View Full Code Here

  }

  public static double trainNetwork(final String what,
      final BasicNetwork network, final MLDataSet trainingSet) {
    // train the neural network
    CalculateScore score = new TrainingSetScore(trainingSet);
    final MLTrain trainAlt = new NeuralSimulatedAnnealing(
        network, score, 10, 2, 100);

    final MLTrain trainMain = new Backpropagation(network, trainingSet,0.000001, 0.0);
View Full Code Here

      if (dialog.getLoadToMemory().getValue()) {
        training = ((BufferedNeuralDataSet) training).loadToMemory();
      }

      CalculateScore score = new TrainingSetScore(training);
      NEATTraining train = new NEATTraining(score, pop);

      BasicTrainingProgress tab = new BasicTrainingProgress(train,
          popFile, train.getTraining());
      tab.setMaxError(dialog.getMaxError().getValue() / 100);
View Full Code Here

    if (dialog.process()) {
      final double startTemp = dialog.getStartTemp().getValue();
      final double stopTemp = dialog.getStartTemp().getValue();
      final int cycles = dialog.getCycles().getValue();

      CalculateScore score = new TrainingSetScore(trainingData);
      final NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
          (BasicNetwork) file.getObject(), score, startTemp,
          stopTemp, cycles);
      train.setTraining(trainingData);
      startup(file, train, dialog.getMaxError().getValue() / 100.0);
View Full Code Here

      final int populationSize = dialog.getPopulationSize().getValue();
      final double mutationPercent = dialog.getMutationPercent()
          .getValue();
      final double percentToMate = dialog.getPercentToMate().getValue();

      CalculateScore score = new TrainingSetScore(trainingData);
      final NeuralGeneticAlgorithm train = new NeuralGeneticAlgorithm(
          (BasicNetwork) file.getObject(),
          new RangeRandomizer(-1, 1), score, populationSize,
          mutationPercent, percentToMate);
      train.setTraining(trainingData);
View Full Code Here

    if (!(method instanceof BasicNetwork)) {
      throw new TrainingError(
          "Invalid method type, requires BasicNetwork");
    }

    final CalculateScore score = new TrainingSetScore(training);

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final double startTemp = holder.getDouble(
        MLTrainFactory.PROPERTY_TEMPERATURE_START, false, 10);
View Full Code Here

    if (!(method instanceof BasicNetwork)) {
      throw new TrainingError(
          "Invalid method type, requires BasicNetwork");
    }

    final CalculateScore score = new TrainingSetScore(training);

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final int populationSize = holder.getInt(
        MLTrainFactory.PROPERTY_POPULATION_SIZE, false, 5000);
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.training.CalculateScore

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.