Package org.encog.ml.train

Examples of org.encog.ml.train.MLTrain


  public void testSCG() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain bprop = new ScaledConjugateGradient(network, trainingData);
    NetworkUtil.testTraining(trainingData,bprop,0.04);
  }
View Full Code Here


    MLDataSet trainingData = XOR.createNoisyXORDataSet(10);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
   
    final FoldedDataSet folded = new FoldedDataSet(trainingData);
    final MLTrain train = new ResilientPropagation(network, folded);
    final CrossValidationKFold trainFolded = new CrossValidationKFold(train,4);
   
    EncogUtility.trainToError(trainFolded, 0.2);
   
    XOR.verifyXOR((MLRegression)trainFolded.getMethod(), 0.2);
View Full Code Here

  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    FreeformNetwork network = NetworkUtil.createXORFreeformNetworkUntrained();

    MLTrain bprop = new FreeformBackPropagation(network, trainingData, 0.7, 0.9);
    NetworkUtil.testTraining(trainingData,bprop,0.01);
  }
View Full Code Here

  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    FreeformNetwork network = NetworkUtil.createXORFreeformNetworkUntrained();

    MLTrain bprop = new FreeformResilientPropagation(network, trainingData);
    NetworkUtil.testTraining(trainingData,bprop,0.01);
  }
View Full Code Here

  public void testEncode() {
   
    // train (and test) a network
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    FreeformNetwork trainedNetwork = NetworkUtil.createXORFreeformNetworkUntrained();
    MLTrain bprop = new FreeformResilientPropagation(trainedNetwork, trainingData);
    NetworkUtil.testTraining(trainingData,bprop,0.01);
   
    trainedNetwork = (FreeformNetwork) bprop.getMethod();
   
    // allocate space to encode to
    double[] encoded = new double[trainedNetwork.encodedArrayLength()];
   
    // encode the network
View Full Code Here

    network2.setBiasActivation(-1);
    network3.setBiasActivation(0.5);
   
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    MLTrain rprop1 = new ResilientPropagation(network1, trainingData);
    MLTrain rprop2 = new ResilientPropagation(network2, trainingData);
    MLTrain rprop3 = new ResilientPropagation(network3, trainingData);

    NetworkUtil.testTraining(trainingData,rprop1,0.03);
    NetworkUtil.testTraining(trainingData,rprop2,0.01);
    NetworkUtil.testTraining(trainingData,rprop3,0.01);
   
View Full Code Here

  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = EncogUtility.simpleFeedForward(2, 5, 7, 1, true);
    (new ConsistentRandomizer(-1,1)).randomize(network);
    MLTrain rprop = new ResilientPropagation(network, trainingData);
    int iteration = 0;
    do {
      rprop.iteration();
      iteration++;
    } while( iteration<5000 && rprop.getError()>0.01);
    Assert.assertTrue(iteration<40);
  }
View Full Code Here

    pattern.setOutputNeurons(1);
    BasicNetwork network = (BasicNetwork)pattern.generate();
   
    // train it
    MLDataSet training = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    MLTrain train = new TrainAdaline(network,training,0.01);
    NetworkUtil.testTraining(training,train,0.01);
  }
View Full Code Here

    final Stopwatch watch = new Stopwatch();
    try {
      watch.start();

      this.currentJob.createTrainer(this.manager.isSingleThreaded());
      final MLTrain train = this.currentJob.getTrain();
      int interation = 1;

      while (this.currentJob.shouldContinue()) {
        train.iteration();
        interation++;
      }
      watch.stop();
    } catch (final Throwable t) {
      this.currentJob.setError(t);
View Full Code Here

   * @param foldNum The current fold.
   * @param fold The current fold.
   */
  private void fitFold(int k, int foldNum, DataFold fold) {
    MLMethod method = this.createMethod();
    MLTrain train = this.createTrainer(method, fold.getTraining());

    if (train.getImplementationType() == TrainingImplementationType.Iterative) {
      SimpleEarlyStoppingStrategy earlyStop = new SimpleEarlyStoppingStrategy(
          fold.getValidation());
      train.addStrategy(earlyStop);

      StringBuilder line = new StringBuilder();
      while (!train.isTrainingDone()) {
        train.iteration();
        line.setLength(0);
        line.append("Fold #");
        line.append(foldNum);
        line.append("/");
        line.append(k);
        line.append(": Iteration #");
        line.append(train.getIteration());
        line.append(", Training Error: ");
        line.append(Format.formatDouble(train.getError(), 8));
        line.append(", Validation Error: ");
        line.append(Format.formatDouble(earlyStop.getValidationError(),
            8));
        report.report(k, foldNum, line.toString());
      }
      fold.setScore(earlyStop.getValidationError());
      fold.setMethod(method);
    } else if (train.getImplementationType() == TrainingImplementationType.OnePass) {
      train.iteration();
      double validationError = calculateError(method,
          fold.getValidation());
      this.report.report(k, k,
          "Trained, Training Error: " + train.getError()
              + ", Validatoin Error: " + validationError);
      fold.setScore(validationError);
      fold.setMethod(method);
    } else {
      throw new EncogError("Unsupported training type for EncogModel: "
          + train.getImplementationType());
    }
  }
View Full Code Here

TOP

Related Classes of org.encog.ml.train.MLTrain

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.