Package org.encog.neural.networks.training.propagation.resilient

Examples of org.encog.neural.networks.training.propagation.resilient.ResilientPropagation


     
      if( kFold>0 ) {
        trainingData = this.wrapTrainingData(trainingData);
      }
     
      MLTrain train = new ResilientPropagation(
          (ContainsFlat) file.getObject(), trainingData,
          initialUpdate, maxStep);
     
      switch( dialog.getRpropType().getSelectedIndex() )
      {
View Full Code Here


        RPROPConst.DEFAULT_INITIAL_UPDATE);
    final double maxStep = holder.getDouble(
        MLTrainFactory.PROPERTY_MAX_STEP, false,
        RPROPConst.DEFAULT_MAX_STEP);

    return new ResilientPropagation((ContainsFlat) method, training,
        initialUpdate, maxStep);
  }
View Full Code Here

    final MLDataSet trainingSet = new BasicMLDataSet(
        CustomActivation.XOR_INPUT, CustomActivation.XOR_IDEAL);

   
    // train the neural network
    final MLTrain train = new ResilientPropagation(network, trainingSet);
    // reset if improve is less than 1% over 5 cycles
    train.addStrategy(new RequiredImprovementStrategy(5));

    EncogUtility.trainToError(train, 0.01);
   
    EncogUtility.evaluate(network, trainingSet);
View Full Code Here

    MLDataSet trainingData = XOR.createNoisyXORDataSet(10);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
   
    final FoldedDataSet folded = new FoldedDataSet(trainingData);
    final MLTrain train = new ResilientPropagation(network, folded);
    final CrossValidationKFold trainFolded = new CrossValidationKFold(train,4);
   
    EncogUtility.trainToError(trainFolded, 0.2);
   
    XOR.verifyXOR((MLRegression)trainFolded.getMethod(), 0.2);
View Full Code Here

 
  public void jsFunction_create(JSNeuralNetwork network, JSTrainingData data)
  {
    this.network = network;
    this.data = data;
    train = new ResilientPropagation(network.getNetwork(),data.getData());
  }
View Full Code Here

    network.reset();
    new ConsistentRandomizer(-1,1).randomize(network);

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    final MLTrain train = new ResilientPropagation(network, trainingSet);
    //
    int epoch = 1;
    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > 0.01 && epoch<5000);
   
   
    EncogUtility.evaluate(network, trainingSet);
  }
View Full Code Here

        if(trainingSet==null){
            trainingSet=(MLDataSet)getNeuralNetworkTrainingData(ndArray);
        }else{
            trainingSet=(MLDataSet)getNeuralNetworkTrainingData(ndArray, trainingSet);
        }
        ResilientPropagation train = new ResilientPropagation(neuralNetwork, trainingSet);
      
        EncogUtility.trainToError(train, ProgramConfig.NEURAL_NETWOR_MIN_ERROR);
    }
View Full Code Here

        if(trainingSet==null){
            trainingSet=(MLDataSet)getNeuralNetworkTrainingData(nd);
        }else{
            trainingSet=(MLDataSet)getNeuralNetworkTrainingData(nd, trainingSet);
        }
        ResilientPropagation train = new ResilientPropagation (neuralNetwork, trainingSet);
      
       
        EncogUtility.trainToError(train, ProgramConfig.NEURAL_NETWOR_MIN_ERROR);
    }
View Full Code Here

        RPROPConst.DEFAULT_INITIAL_UPDATE);
    final double maxStep = holder.getDouble(
        MLTrainFactory.PROPERTY_MAX_STEP, false,
        RPROPConst.DEFAULT_MAX_STEP);

    return new ResilientPropagation((ContainsFlat) method, training,
        initialUpdate, maxStep);
  }
View Full Code Here

   * @param minutes
   *            The number of minutes to train for.
   */
  public static void trainConsole(final BasicNetwork network,
      final MLDataSet trainingSet, final int minutes) {
    final Propagation train = new ResilientPropagation(network, trainingSet);
    train.setThreadCount(0);
    EncogUtility.trainConsole(train, network, trainingSet, minutes);
  }
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.training.propagation.resilient.ResilientPropagation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.