Package org.encog.ml

Examples of org.encog.ml.CalculateScore


    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,1000);
    pop.setInitialConnectionDensity(1.0);// not required, but speeds training
    pop.reset();

    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
   
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
   
    do {
View Full Code Here


    if (!(method instanceof BasicNetwork)) {
      throw new TrainingError(
          "Invalid method type, requires BasicNetwork");
    }

    final CalculateScore score = new TrainingSetScore(training);

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final double startTemp = holder.getDouble(
        MLTrainFactory.PROPERTY_TEMPERATURE_START, false, 10);
View Full Code Here

    if (!(method instanceof MLEncodable)) {
      throw new TrainingError(
          "Invalid method type, requires an encodable MLMethod");
    }

    final CalculateScore score = new TrainingSetScore(training);

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final int populationSize = holder.getInt(
        MLTrainFactory.PROPERTY_POPULATION_SIZE, false, 5000);
View Full Code Here

   * @return The newly created trainer.
   */
  public MLTrain create(final MLMethod method,
      final MLDataSet training, final String argsStr) {

    final CalculateScore score = new TrainingSetScore(training);   
    final TrainEA train = NEATUtil.constructNEATTrainer((NEATPopulation)method, score);

    return train;
  }
View Full Code Here

  public MLTrain create(final MLMethod method,
      final MLDataSet training, final String argsStr) {
   
    PrgPopulation pop = (PrgPopulation)method;
   
    final CalculateScore score = new TrainingSetScore(training);   
    TrainEA train = new TrainEA(pop, score);
    train.getRules().addRewriteRule(new RewriteConstants());
    train.getRules().addRewriteRule(new RewriteAlgebraic());
    train.setCODEC(new PrgCODEC());
    train.addOperation(0.8, new SubtreeCrossover());
View Full Code Here

 
  private NEATPopulation generate()
  {
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);
   
    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
    ActivationStep step = new ActivationStep();
    step.setCenter(0.5);

    EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(
View Full Code Here

    Assert.assertEquals(10,pop.getPopulationSize());
    Assert.assertEquals(0.2,pop.getSurvivalRate());
   
    // see if the population can actually be used to train
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);   
    CalculateScore score = new TrainingSetScore(trainingSet);
    EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop, score);
    train.iteration();

  }
View Full Code Here

{
  private static double FAKE_DATA[][] = { { 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0 } };

  public void testNPE() throws Exception
  {
    final CalculateScore score = new TrainingSetScore(new BasicMLDataSet(FAKE_DATA, FAKE_DATA));

    // create a new random population and train it
    NEATPopulation pop = new NEATPopulation(FAKE_DATA[0].length, 1, 50);
    pop.reset();
    EvolutionaryAlgorithm training1 = NEATUtil.constructNEATTrainer(pop, score);
View Full Code Here

    new PersistNEATPopulation().save(serialized3, training3.getPopulation());
  }

  public void testSaveRead() throws Exception
  {
    final CalculateScore score = new TrainingSetScore(new BasicMLDataSet(FAKE_DATA, FAKE_DATA));
    NEATPopulation pop = new NEATPopulation(FAKE_DATA[0].length, 1, 50);
    pop.reset();
    // create a new random population and train it
    EvolutionaryAlgorithm training1 = NEATUtil.constructNEATTrainer(pop, score);
    training1.iteration();
View Full Code Here

  @Test
  public void testAnneal() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    CalculateScore score = new TrainingSetScore(trainingData);
    NeuralSimulatedAnnealing anneal = new NeuralSimulatedAnnealing(network,score,10,2,100);
    NetworkUtil.testTraining(trainingData,anneal,0.01);
  }
View Full Code Here

TOP

Related Classes of org.encog.ml.CalculateScore

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.