Package org.encog.neural.som.training.basic

Examples of org.encog.neural.som.training.basic.BasicTrainSOM


    this.setDefaultCloseOperation(EXIT_ON_CLOSE);
    this.network = createNetwork();
    this.getContentPane().add(map = new MapPanel(this));
    this.gaussian = new NeighborhoodRBF(RBFEnum.Gaussian,MapPanel.WIDTH,
        MapPanel.HEIGHT);
    this.train = new BasicTrainSOM(this.network, 0.01, null, gaussian);
    train.setForceWinner(false);
    this.thread = new Thread(this);
    thread.start();
  }
View Full Code Here


   
    // Create the neural network.
    SOM network = new SOM(4,2);
    network.reset();
   
    BasicTrainSOM train = new BasicTrainSOM(
        network,
        0.7,
        training,
        new NeighborhoodSingle());
       
    int iteration = 0;
   
    for(iteration = 0;iteration<=10;iteration++)
    {
      train.iteration();
      System.out.println("Iteration: " + iteration + ", Error:" + train.getError());
    }
   
    MLData data1 = new BasicMLData(SOM_INPUT[0]);
    MLData data2 = new BasicMLData(SOM_INPUT[1]);
    System.out.println("Pattern 1 winner: " + network.winner(data1));
View Full Code Here

    }
    if (neighborhoodStr.equalsIgnoreCase("single")) {
      nf = new NeighborhoodSingle();
    }

    final BasicTrainSOM result = new BasicTrainSOM((SOM) method,
        learningRate, training, nf);

    if (args.containsKey(MLTrainFactory.PROPERTY_ITERATIONS)) {
      final int plannedIterations = holder.getInt(
          MLTrainFactory.PROPERTY_ITERATIONS, false, 1000);
      final double startRate = holder.getDouble(
          MLTrainFactory.PROPERTY_START_LEARNING_RATE, false, 0.05);
      final double endRate = holder.getDouble(
          MLTrainFactory.PROPERTY_END_LEARNING_RATE, false, 0.05);
      final double startRadius = holder.getDouble(
          MLTrainFactory.PROPERTY_START_RADIUS, false, 10);
      final double endRadius = holder.getDouble(
          MLTrainFactory.PROPERTY_END_RADIUS, false, 1);
      result.setAutoDecay(plannedIterations, startRate, endRate,
          startRadius, endRadius);
    }

    return result;
  }
View Full Code Here

    if (sel.getSelected() == selectBasicSOM) {
      InputSOM somDialog = new InputSOM();

      if (somDialog.process()) {
        BasicTrainSOM train = new BasicTrainSOM((SOM) file.getObject(),
            somDialog.getLearningRate().getValue(), trainingData,
            somDialog.getNeighborhoodFunction());
        train.setForceWinner(somDialog.getForceWinner().getValue());
        startup(file, train, somDialog.getMaxError().getValue() / 100.0);
      }
    } else if (sel.getSelected() == selectSOMClusterCopy) {
      SOMClusterCopyTraining train = new SOMClusterCopyTraining(
          (SOM) file.getObject(), trainingData);
      train.iteration();
      if (EncogWorkBench.askQuestion("SOM", "Training done, save?")) {
        file.save();
      } else {
        file.revert();
      }
View Full Code Here

    // Create the neural network.
    SOM network = new SOM(4,2);   
    network.setWeights(new Matrix(MATRIX_ARRAY));

    final BasicTrainSOM train = new BasicTrainSOM(network, 0.4,
        training, new NeighborhoodSingle());
    train.setForceWinner(true);
    int iteration = 0;

    for (iteration = 0; iteration <= 100; iteration++) {
      train.iteration();
    }

    final MLData data1 = new BasicMLData(
        TestCompetitive.SOM_INPUT[0]);
    final MLData data2 = new BasicMLData(
View Full Code Here

    }
    if (neighborhoodStr.equalsIgnoreCase("single")) {
      nf = new NeighborhoodSingle();
    }

    final BasicTrainSOM result = new BasicTrainSOM((SOM) method,
        learningRate, training, nf);

    if (args.containsKey(MLTrainFactory.PROPERTY_ITERATIONS)) {
      final int plannedIterations = holder.getInt(
          MLTrainFactory.PROPERTY_ITERATIONS, false, 1000);
      final double startRate = holder.getDouble(
          MLTrainFactory.PROPERTY_START_LEARNING_RATE, false, 0.05);
      final double endRate = holder.getDouble(
          MLTrainFactory.PROPERTY_END_LEARNING_RATE, false, 0.05);
      final double startRadius = holder.getDouble(
          MLTrainFactory.PROPERTY_START_RADIUS, false, 10);
      final double endRadius = holder.getDouble(
          MLTrainFactory.PROPERTY_END_RADIUS, false, 1);
      result.setAutoDecay(plannedIterations, startRate, endRate,
          startRadius, endRadius);
    }

    return result;
  }
View Full Code Here

    // Create the neural network.
    SOM network = new SOM(4,2);   
    network.setWeights(new Matrix(MATRIX_ARRAY));

    final BasicTrainSOM train = new BasicTrainSOM(network, 0.4,
        training, new NeighborhoodSingle());
    train.setForceWinner(true);
    int iteration = 0;

    for (iteration = 0; iteration <= 100; iteration++) {
      train.iteration();
    }

    final MLData data1 = new BasicMLData(
        TestCompetitive.SOM_INPUT[0]);
    final MLData data2 = new BasicMLData(
View Full Code Here

TOP

Related Classes of org.encog.neural.som.training.basic.BasicTrainSOM

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.