Package org.encog.ml.train

Examples of org.encog.ml.train.MLTrain


    if( method instanceof MLResettable ) {
      this.getAnalyst().setMethod(method);
    }


    MLTrain train = factory.create(method, trainingSet, type, args);

    if ( getKfold() > 0) {
      train = new CrossValidationKFold(train, getKfold() );
    }
View Full Code Here


  public boolean executeCommand(final String args) {

    setKfold( obtainCross() );
    final MLDataSet trainingSet = obtainTrainingSet();
    MLMethod method = obtainMethod();
    final MLTrain trainer = createTrainer(method, trainingSet);
   
    if( method instanceof BayesianNetwork ) {
      final String query = getProp().getPropertyString(
          ScriptProperties.ML_CONFIG_QUERY);
      ((BayesianNetwork)method).defineClassificationStructure(query);
    }
   
    EncogLogging.log(EncogLogging.LEVEL_DEBUG, "Beginning training");

    performTraining(trainer, method, trainingSet);

    final String resourceID = getProp().getPropertyString(
        ScriptProperties.ML_CONFIG_MACHINE_LEARNING_FILE);
    final File resourceFile = getAnalyst().getScript().resolveFilename(
        resourceID);
   
    // reload the method
    method = null;
   
    if( trainer instanceof EvolutionaryAlgorithm ) {
      EvolutionaryAlgorithm ea = (EvolutionaryAlgorithm)trainer;
      method = ea.getPopulation();
    }
   
    if( method==null ) {
      method = trainer.getMethod()
    }
       
    EncogDirectoryPersistence.saveObject(resourceFile, method);
    EncogLogging.log(EncogLogging.LEVEL_DEBUG, "save to:" + resourceID);
    trainingSet.close();
View Full Code Here

    final double stopTemp = holder.getDouble(
        MLTrainFactory.PROPERTY_TEMPERATURE_STOP, false, 2);

    final int cycles = holder.getInt(MLTrainFactory.CYCLES, false, 100);

    final MLTrain train = new NeuralSimulatedAnnealing(
        (BasicNetwork) method, score, startTemp, stopTemp, cycles);

    return train;
  }
View Full Code Here

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final int populationSize = holder.getInt(
        MLTrainFactory.PROPERTY_POPULATION_SIZE, false, 5000);
   
    MLTrain train = new MLMethodGeneticAlgorithm(new MethodFactory(){
      @Override
      public MLMethod factor() {
        final MLMethod result = (MLMethod) ObjectCloner.deepCopy(method);
        ((MLResettable)result).reset();
        return result;
View Full Code Here

   *            The error level to train to.
   */
  public static void trainToError(final MLMethod method,
      final MLDataSet dataSet, final double error) {

    MLTrain train;

    if (method instanceof SVM) {
      train = new SVMTrain((SVM)method, dataSet);
    } if(method instanceof FreeformNetwork ) {
      train = new FreeformResilientPropagation((FreeformNetwork) method, dataSet);
View Full Code Here

      dataSetFactory.setSignificance(D);
      MLDataSet thisSet = dataSetFactory.getNewDataSet();
      GenericEnsembleML newML = new GenericEnsembleML(mlFactory.createML(dataSetFactory.getInputData().getInputSize(), dataSetFactory.getInputData().getIdealSize()),mlFactory.getLabel());
      do {
        mlFactory.reInit(newML.getMl());
        MLTrain train = trainFactory.getTraining(newML.getMl(), thisSet);
        newML.setTraining(train);
        newML.train(targetAccuracy,verbose);
      } while (newML.getError(testset) > selectionError);
      double newWeight = getWeightedError(newML,thisSet);
      members.add(newML);
View Full Code Here

  public void testRPROP() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain rprop = new ResilientPropagation(network, trainingData);
    NetworkUtil.testTraining(trainingData,rprop,0.03);
  }
View Full Code Here

  public void testLMA() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain rprop = new LevenbergMarquardtTraining(network, trainingData);
    NetworkUtil.testTraining(trainingData,rprop,0.03);
  }
View Full Code Here

  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();

    MLTrain bprop = new Backpropagation(network, trainingData, 0.7, 0.9);
    NetworkUtil.testTraining(trainingData,bprop,0.01);
  }
View Full Code Here

  public void testManhattan() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain bprop = new ManhattanPropagation(network, trainingData, 0.01);
    NetworkUtil.testTraining(trainingData,bprop,0.01);
  }
View Full Code Here

TOP

Related Classes of org.encog.ml.train.MLTrain

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.