Package org.encog.ml.data.basic

Examples of org.encog.ml.data.basic.BasicMLDataSet$BasicMLIterator


   * Process the array.
   * @param data The array to process.
   * @return A neural data set that contains the time-series.
   */
  public final MLDataSet process(final double[] data) {
    final MLDataSet result = new BasicMLDataSet();

    final int totalWindowSize = this.inputWindow + this.predictWindow;
    final int stopPoint = data.length - totalWindowSize;

    for (int i = 0; i < stopPoint; i++) {
      final MLData inputData
        = new BasicMLData(this.inputWindow);
      final MLData idealData
        = new BasicMLData(this.predictWindow);

      int index = i;

      // handle input window
      for (int j = 0; j < this.inputWindow; j++) {
        inputData.setData(j, data[index++]);
      }

      // handle predict window
      for (int j = 0; j < this.predictWindow; j++) {
        idealData.setData(j, data[index++]);
      }

      final MLDataPair pair = new BasicMLDataPair(inputData,
          idealData);
      result.add(pair);
    }

    return result;
  }
View Full Code Here


     * @param data The data.
     * @return The data set.
     */
    public MLDataSet process(double[][] data)
    {
        MLDataSet result = new BasicMLDataSet();
        for(double[] doubles : data)
        {
            result.add(processToPair(doubles));
        }
        return result;
    }
View Full Code Here

   * @param idealSize The ideal size, 0 for unsupervised.
   * @return A NeuralDataSet that holds the contents of the CSV file.
   */
  public static MLDataSet loadCSVTOMemory(CSVFormat format,
      String filename, boolean headers, int inputSize, int idealSize) {
    MLDataSet result = new BasicMLDataSet();
    ReadCSV csv = new ReadCSV(filename, headers, format);
    while (csv.next()) {
      MLData input = null;
      MLData ideal = null;
      int index = 0;

      input = new BasicMLData(inputSize);
      for (int i = 0; i < inputSize; i++) {
        double d = csv.getDouble(index++);
        input.setData(i, d);
      }

      if (idealSize > 0) {
        ideal = new BasicMLData(idealSize);
        for (int i = 0; i < idealSize; i++) {
          double d = csv.getDouble(index++);
          ideal.setData(i, d);
        }
      }

      MLDataPair pair = new BasicMLDataPair(input, ideal);
      result.add(pair);
    }

    return result;
  }
View Full Code Here

    if (this.analyst == null) {
      throw new EncogError(
          "Can't normalize yet, file has not been analyzed.");
    }
   
    MLDataSet result = new BasicMLDataSet();
   
    int inputCount = this.analyst.determineInputCount();
    int outputCount = this.analyst.determineOutputCount();
    int totalCount = inputCount+outputCount;
   
    boolean headers = this.analyst.getScript().getProperties()
        .getPropertyBoolean(ScriptProperties.SETUP_CONFIG_INPUT_HEADERS);
   
    final CSVFormat format = this.analyst.getScript().determineFormat();

    CSVHeaders analystHeaders = new CSVHeaders(file, headers,
        format);
   
    ReadCSV csv = new ReadCSV(file.toString(), headers, format);
   
    for (final AnalystField field : analyst.getScript().getNormalize()
        .getNormalizedFields()) {
      field.init();
    }

    TimeSeriesUtil series = new TimeSeriesUtil(analyst,true,
        analystHeaders.getHeaders());
   

    try {
      // write file contents
      while (csv.next()) {

        double[] output = AnalystNormalizeCSV.extractFields(
            this.analyst, analystHeaders, csv, totalCount,
            false);

        if (series.getTotalDepth() > 1) {
          output = series.process(output);
        }

        MLDataPair pair = BasicMLDataPair.createPair(inputCount,outputCount);
        for(int i=0;i<inputCount;i++) {
          pair.getInput().setData(i, output[i]);
        }
        for(int i=0;i<outputCount;i++) {
          pair.getIdeal().setData(i, output[i+inputCount]);
        }
        result.add(pair);
      }
      return result;
    } finally {
      if (csv != null) {
        try {
View Full Code Here

    setExpectInputHeaders(headers);
    setInputFormat(format);

    setAnalyzed(true);
    this.analyst = theAnalyst;
    this.data = new BasicMLDataSet();
    resetStatus();
    int recordCount = 0;

    final int outputLength = this.analyst.determineTotalColumns();
    final ReadCSV csv = new ReadCSV(this.getInputFilename().toString(),
View Full Code Here

  @Override
  public Object read(final InputStream is) {

    final EncogReadHelper in = new EncogReadHelper(is);
    EncogFileSection section;
    final BasicMLDataSet samples = new BasicMLDataSet();
    Map<String, String> networkParams = null;
    PNNKernelType kernel = null;
    PNNOutputMode outmodel = null;
    int inputCount = 0;
    int outputCount = 0;
    double error = 0;
    double[] sigma = null;

    while ((section = in.readNextSection()) != null) {
      if (section.getSectionName().equals("PNN")
          && section.getSubSectionName().equals("PARAMS")) {
        networkParams = section.parseParams();
      }
      if (section.getSectionName().equals("PNN")
          && section.getSubSectionName().equals("NETWORK")) {
        final Map<String, String> params = section.parseParams();
        inputCount = EncogFileSection.parseInt(params,
            PersistConst.INPUT_COUNT);
        outputCount = EncogFileSection.parseInt(params,
            PersistConst.OUTPUT_COUNT);
        kernel = PersistBasicPNN.stringToKernel(params
            .get(PersistConst.KERNEL));
        outmodel = PersistBasicPNN.stringToOutputMode(params
            .get(PersistBasicPNN.PROPERTY_outputMode));
        error = EncogFileSection
            .parseDouble(params, PersistConst.ERROR);
        sigma = section.parseDoubleArray(params,
            PersistConst.SIGMA);
      }
      if (section.getSectionName().equals("PNN")
          && section.getSubSectionName().equals("SAMPLES")) {
        for (final String line : section.getLines()) {
          final List<String> cols = EncogFileSection
              .splitColumns(line);
          int index = 0;
          final MLData inputData = new BasicMLData(inputCount);
          for (int i = 0; i < inputCount; i++) {
            inputData.setData(i,
                CSVFormat.EG_FORMAT.parse(cols.get(index++)));
          }
          final MLData idealData = new BasicMLData(outputCount);
          idealData.setData(0,CSVFormat.EG_FORMAT.parse(cols.get(index++)));
          final MLDataPair pair = new BasicMLDataPair(inputData,idealData);
          samples.add(pair);
        }
      }
    }

    final BasicPNN result = new BasicPNN(kernel, outmodel, inputCount,
View Full Code Here

  public final File EG_FILENAME = TEMP_DIR.createFile("encogtest.eg");
  public final File SERIAL_FILENAME = TEMP_DIR.createFile("encogtest.ser");
 
  private NEATPopulation generate()
  {
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);
   
    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
    ActivationStep step = new ActivationStep();
    step.setCenter(0.5);
View Full Code Here

  {
    Assert.assertEquals(10,pop.getPopulationSize());
    Assert.assertEquals(0.2,pop.getSurvivalRate());
   
    // see if the population can actually be used to train
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);   
    CalculateScore score = new TrainingSetScore(trainingSet);
    EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop, score);
    train.iteration();

  }
View Full Code Here

{
  private static double FAKE_DATA[][] = { { 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0 } };

  public void testNPE() throws Exception
  {
    final CalculateScore score = new TrainingSetScore(new BasicMLDataSet(FAKE_DATA, FAKE_DATA));

    // create a new random population and train it
    NEATPopulation pop = new NEATPopulation(FAKE_DATA[0].length, 1, 50);
    pop.reset();
    EvolutionaryAlgorithm training1 = NEATUtil.constructNEATTrainer(pop, score);
View Full Code Here

    new PersistNEATPopulation().save(serialized3, training3.getPopulation());
  }

  public void testSaveRead() throws Exception
  {
    final CalculateScore score = new TrainingSetScore(new BasicMLDataSet(FAKE_DATA, FAKE_DATA));
    NEATPopulation pop = new NEATPopulation(FAKE_DATA[0].length, 1, 50);
    pop.reset();
    // create a new random population and train it
    EvolutionaryAlgorithm training1 = NEATUtil.constructNEATTrainer(pop, score);
    training1.iteration();
View Full Code Here

TOP

Related Classes of org.encog.ml.data.basic.BasicMLDataSet$BasicMLIterator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.