Package org.encog.ml.data

Examples of org.encog.ml.data.MLData


 
  public void test(CPN network,String[][] pattern,double[][] input)
  {
    for(int i=0;i<pattern.length;i++)
    {
      MLData inputData = new BasicMLData(input[i]);
      MLData outputData = network.compute(inputData);
      double angle = determineAngle(outputData);
     
      // display image
      for(int j=0;j<HEIGHT;j++)
      {
View Full Code Here


          * OCR.DOWNSAMPLE_WIDTH;
      final int outputNeuron = this.letterListModel.size();

      final MLDataSet trainingSet = new BasicMLDataSet();
      for (int t = 0; t < this.letterListModel.size(); t++) {
        final MLData item = new BasicMLData(inputNeuron);
        int idx = 0;
        final SampleData ds = (SampleData) this.letterListModel
            .getElementAt(t);
        for (int y = 0; y < ds.getHeight(); y++) {
          for (int x = 0; x < ds.getWidth(); x++) {
            item.setData(idx++, ds.getData(x, y) ? .5 : -.5);
          }
        }

        trainingSet.add(new BasicMLDataPair(item, null));
      }
View Full Code Here

    } while(train.getError() > 0.01);

    // test the neural network
    System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
          + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
    }
  }
View Full Code Here

        final double newValue = originalValue + ((i - centerPoint))
            * stepSize[jj];

        this.network.setWeight(layer, weight, neuron, newValue);

        final MLData output = this.network.compute(inputData);
        points[i] = output.getData(0);
      } else {
        points[i] = networkOutput;
      }
    }

View Full Code Here

      // Calculate the new error
      sumOfSquaredErrors = 0.0;
      for (int i = 0; i < this.trainingLength; i++) {
        this.indexableTraining.getRecord(i, this.pair);
        final MLData actual = this.network
            .compute(this.pair.getInput());
        final double e = this.pair.getIdeal().getData(0)
            - actual.getData(0);
        sumOfSquaredErrors += e * e;
      }
      sumOfSquaredErrors /= 2.0;

      // Update the objective function
View Full Code Here

    int ji = 0;

    // foreach training vector
    for (final MLDataPair pair : getTraining()) {
      final MLData networkOutput = this.network.compute(pair.getInput());

      // Calculate network error to build the residuals vector
      e = pair.getIdeal().getData(0) - networkOutput.getData(0);
      this.errors[ji] = e;
      sumOfSquaredErrors += e * e;

      // Computation of one of the Jacobian Matrix rows by nummerical differentiation:
      // for each weight wj in the network, we have to compute its partial
      //   derivative to build the jacobian matrix.
      int jj = 0;

      // So, for each layer:
      for (int layer = this.network.getLayerCount() - 1; layer > 0; layer--) {
        // for each neuron:
        for (int neuron = 0; neuron < this.network
            .getLayerNeuronCount(layer); neuron++) {
          // for each weight:
          for (int weight = 0; weight < this.network
              .getLayerTotalNeuronCount(layer - 1); weight++) {
            // Compute its partial derivative
            this.jacobian[ji][jj] = computeDerivative(
                pair.getInput(), layer - 1, neuron, weight,
                this.derivativeStepSize,
                networkOutput.getData(0), jj);
            jj++;
          }
        }
      }

View Full Code Here

      training.getRecord(r, pair);
      this.network.setExclude(this.network.getExclude() - 1);

      err = 0.0;

      final MLData input = pair.getInput();
      final MLData target = pair.getIdeal();

      if (this.network.getOutputMode() == PNNOutputMode.Unsupervised) {
        if (deriv) {
          final MLData output = computeDeriv(input, target);
          for (int z = 0; z < this.network.getOutputCount(); z++) {
            out[z] = output.getData(z);
          }
        } else {
          final MLData output = this.network.compute(input);
          for (int z = 0; z < this.network.getOutputCount(); z++) {
            out[z] = output.getData(z);
          }
        }
        for (int i = 0; i < this.network.getOutputCount(); i++) {
          diff = input.getData(i) - out[i];
          err += diff * diff;
        }
      } else if (this.network.getOutputMode() == PNNOutputMode.Classification) {
        final int tclass = (int) target.getData(0);
        MLData output;

        if (deriv) {
          output = computeDeriv(input, pair.getIdeal());
          output.getData(0);
        } else {
          output = this.network.compute(input);
          output.getData(0);
        }

        out[0] = output.getData(0);

        for (int i = 0; i < out.length; i++) {
          if (i == tclass) {
            diff = 1.0 - out[i];
            err += diff * diff;
          } else {
            err += out[i] * out[i];
          }
        }
      }

      else if (this.network.getOutputMode() == PNNOutputMode.Regression) {
        if (deriv) {
          final MLData output = this.network.compute(input);
          for (int z = 0; z < this.network.getOutputCount(); z++) {
            out[z] = output.getData(z);
          }
        } else {
          final MLData output = this.network.compute(input);
          for (int z = 0; z < this.network.getOutputCount(); z++) {
            out[z] = output.getData(z);
          }
        }
        for (int i = 0; i < this.network.getOutputCount(); i++) {
          diff = target.getData(i) - out[i];
          err += diff * diff;
 
View Full Code Here

            * der1;
      }
    }

    if (this.network.getOutputMode() == PNNOutputMode.Classification) {
      final MLData result = new BasicMLData(1);
      result.setData(0, ibest);
      return result;
    }

    return null;
  }
View Full Code Here

      for (int i = 0; i < getOutputCount(); i++) {
        out[i] /= psum;
      }

      final MLData result = new BasicMLData(1);
      result.setData(0, EncogMath.maxIndex(out));
      return result;
    } else if (getOutputMode() == PNNOutputMode.Unsupervised) {
      for (int i = 0; i < getInputCount(); i++) {
        out[i] /= psum;
      }
View Full Code Here

    bmu.reset();

    // Determine the BMU for each training element.
    for (final MLDataPair pair : data) {
      final MLData input = pair.getInput();
      bmu.calculateBMU(input);
    }

    // update the error
    return bmu.getWorstDistance() / 100.0;
View Full Code Here

TOP

Related Classes of org.encog.ml.data.MLData

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.