Package org.encog.neural.networks.training

Examples of org.encog.neural.networks.training.TrainingSetScore


  @Test
  public void testAnneal() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    FreeformNetwork network = NetworkUtil.createXORFreeformNetworkUntrained();
    CalculateScore score = new TrainingSetScore(trainingData);
    NeuralSimulatedAnnealing anneal = new NeuralSimulatedAnnealing(network,score,10,2,100);
    NetworkUtil.testTraining(trainingData,anneal,0.01);
  }
View Full Code Here


 
  @Test
  public void testGenetic() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    CalculateScore score = new TrainingSetScore(trainingData);
    MLMethodGeneticAlgorithm genetic = new MLMethodGeneticAlgorithm(new MethodFactory(){
      @Override
      public MLMethod factor() {
        FreeformNetwork network = NetworkUtil.createXORFreeformNetworkUntrained();
        network.reset();
View Full Code Here

  public void testSort1() {
   
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,100);
    pop.reset();
    CalculateScore score = new TrainingSetScore(trainingSet);
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
       
    NEATGenome genome1 = new NEATGenome();
    genome1.setAdjustedScore(3.0);
    NEATGenome genome2 = new NEATGenome();
View Full Code Here

  public void testSort2() {
   
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,100);
    pop.reset();
    CalculateScore score = new TrainingSetScore(trainingSet);
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
       
    NEATGenome genome1 = new NEATGenome();
    genome1.setAdjustedScore(3.0);
    NEATGenome genome2 = new NEATGenome();
View Full Code Here

     * the same topology as this network.
     * @param trainingSet The training set.
     */
    public NeuralPSO(BasicNetwork network, MLDataSet trainingSet)       
    {  
      this(network, new NguyenWidrowRandomizer(), new TrainingSetScore(trainingSet), 20);
    }
View Full Code Here

    final ParamsHolder holder = new ParamsHolder(args);

    final int particles = holder.getInt(
        MLTrainFactory.PROPERTY_PARTICLES, false, 20);
   
    CalculateScore score = new TrainingSetScore(training);
    Randomizer randomizer = new NguyenWidrowRandomizer();
   
    final MLTrain train = new NeuralPSO((BasicNetwork)method,randomizer,score,particles);
   
    return train;
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.training.TrainingSetScore

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.