Package org.encog.ml.data.basic

Examples of org.encog.ml.data.basic.BasicMLDataSet$BasicMLIterator


        ideal[i][j] = input[i][j];
      }
    }

    return new BasicMLDataSet(input, ideal);
  }
View Full Code Here


        //Degenerate training data only provides outputs as 1 or 0 (averaging over all outputs for a given set of inputs would produce something approaching the smooth training data).
        //Smooth training data provides true values for the provided input dimensions.
        create2DSmoothTainingDataGit();

        //Create the training set and train.
        MLDataSet trainingSet = new BasicMLDataSet(INPUT, IDEAL);
        MLTrain train = new SVDTraining(network, trainingSet);

        //SVD is a single step solve
        int epoch = 1;
        do
        {
            train.iteration();
            System.out.println("Epoch #" + epoch + " Error:" + train.getError());
            epoch++;
        } while ((epoch < 1) && (train.getError() > 0.001));

        // test the neural network
        System.out.println("Neural Network Results:");

        //Create a testing array which may be to a higher resoltion than the original training data
        set2DTestingArrays(100);
        trainingSet = new BasicMLDataSet(INPUT, IDEAL);

        FileWriter outFile = new FileWriter("results.csv");
        PrintWriter out = new PrintWriter(outFile);
       
       
View Full Code Here

        "O   O",
        " OOO "  } };
 
  public static MLDataSet generateTraining()
  {
    MLDataSet result = new BasicMLDataSet();
    for(int i=0;i<DIGITS.length;i++)
    {     
      BasicMLData ideal = new BasicMLData(DIGITS.length);
     
      // setup input
      MLData input = image2data(DIGITS[i]);
     
      // setup ideal
      for(int j=0;j<DIGITS.length;j++)
      {
        if( j==i )
          ideal.setData(j,1);
        else
          ideal.setData(j,-1);
      }
     
      // add training element
      result.add(input,ideal);
    }
    return result;
  }
View Full Code Here

    network.addLayer(new BasicLayer(new ActivationSigmoid(), false,
        output[0].length));
    network.getStructure().finalizeStructure();
    network.reset();

    MLDataSet trainingSet = new BasicMLDataSet(input, output);

    // train the neural network
    MLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);

    Stopwatch sw = new Stopwatch();
View Full Code Here

  public static long BenchmarkEncogFlat(double[][] input, double[][] output) {
    FlatNetwork network = new FlatNetwork(input[0].length, HIDDEN_COUNT, 0,
        output[0].length, false);
    network.randomize();
    BasicMLDataSet trainingSet = new BasicMLDataSet(input, output);

    TrainFlatNetworkBackPropagation train = new TrainFlatNetworkBackPropagation(
        network, trainingSet, 0.7, 0.7);

    double[] a = new double[2];
View Full Code Here

   * @param idealSize The ideal size, 0 for unsupervised.
   * @return A NeuralDataSet that holds the contents of the CSV file.
   */
  public static MLDataSet loadCSVTOMemory(CSVFormat format,
      String filename, boolean headers, int inputSize, int idealSize) {
    MLDataSet result = new BasicMLDataSet();
    ReadCSV csv = new ReadCSV(filename, headers, format);
    while (csv.next()) {
      MLData input = null;
      MLData ideal = null;
      int index = 0;

      input = new BasicMLData(inputSize);
      for (int i = 0; i < inputSize; i++) {
        double d = csv.getDouble(index++);
        input.setData(i, d);
      }

      if (idealSize > 0) {
        ideal = new BasicMLData(idealSize);
        for (int i = 0; i < idealSize; i++) {
          double d = csv.getDouble(index++);
          ideal.setData(i, d);
        }
      }

      MLDataPair pair = new BasicMLDataPair(input, ideal);
      result.add(pair);
    }

    return result;
  }
View Full Code Here

          % TemporalXOR.SEQUENCE.length];
      this.ideal[i][0] = TemporalXOR.SEQUENCE[(i + 1)
          % TemporalXOR.SEQUENCE.length];
    }

    return new BasicMLDataSet(this.input, this.ideal);
  }
View Full Code Here

  public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };

 
  public static void main(String[] args)
  {
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    BasicNetwork network = EncogUtility.simpleFeedForward(2, 4, 0, 1, false);
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);
    train.addStrategy(new RequiredImprovementStrategy(5));
   
    System.out.println("Perform initial train.");
View Full Code Here

    // first, create the machine learning method
    MLMethodFactory methodFactory = new MLMethodFactory();   
    MLMethod method = methodFactory.create(methodName, methodArchitecture, 2, outputNeurons);
   
    // second, create the data set   
    MLDataSet dataSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
   
    // third, create the trainer
    MLTrainFactory trainFactory = new MLTrainFactory()
    MLTrain train = trainFactory.create(method,dataSet,trainerName,trainerArgs);       
    // reset if improve is less than 1% over 5 cycles
View Full Code Here

      final int idealCount, final double min, final double max) {
   
    LinearCongruentialGenerator rand =
      new LinearCongruentialGenerator(seed);
   
    final BasicMLDataSet result = new BasicMLDataSet();
    for (int i = 0; i < count; i++) {
      final MLData inputData = new BasicMLData(inputCount);

      for (int j = 0; j < inputCount; j++) {
        inputData.setData(j, rand.range(min, max));
      }

      final MLData idealData = new BasicMLData(idealCount);

      for (int j = 0; j < idealCount; j++) {
        idealData.setData(j, rand.range(min, max));
      }

      final BasicMLDataPair pair = new BasicMLDataPair(inputData,
          idealData);
      result.add(pair);

    }
    return result;
  }
View Full Code Here

TOP

Related Classes of org.encog.ml.data.basic.BasicMLDataSet$BasicMLIterator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.