Package org.encog.neural.flat

Examples of org.encog.neural.flat.FlatNetwork


  public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };

  public static void main(final String args[]) {
   
    FlatNetwork network = new FlatNetwork(2,4,0,1,false);
    network.randomize();
   
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
   
   
    TrainFlatNetworkResilient train = new TrainFlatNetworkResilient(network,trainingSet);
   
    //Encog.getInstance().initCL();
    //train.setTargetDevice(Encog.getInstance().getCL().getDevices().get(0));
   
    int epoch = 1;

    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > 0.01 );

    double[] output = new double[1];
    // test the neural network
    System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
      double[] input = pair.getInput().getData();
      network.compute(input, output);
      System.out.println(input[0] + "," + input[1] + ":" + output[0]);
    }
  }
View Full Code Here


  public static final int HIDDEN_COUNT = 20;
  public static final int ITERATIONS = 10;
  public static final int AVG_COUNT = 20;

  public static long benchmarkEncogFlat(double[][] input, double[][] output) {
    FlatNetwork network = new FlatNetwork(input[0].length, HIDDEN_COUNT, 0,
        output[0].length, false);
    network.randomize();
    BasicMLDataSet trainingSet = new BasicMLDataSet(input, output);

    TrainFlatNetworkBackPropagation train = new TrainFlatNetworkBackPropagation(
        network, trainingSet, 0.7, 0.7);
View Full Code Here

    layer1.setBiasActivation(0.5);
    layer2.setBiasActivation(-1.0);
    network.getStructure().finalizeStructure();
    network.reset();
   
    FlatNetwork flat = network.getStructure().getFlat();
   
    Assert.assertNotNull(flat);
    double[] layerOutput = flat.getLayerOutput();
    Assert.assertEquals(-1, layerOutput[5], 2 );
    Assert.assertEquals(0.5, layerOutput[8], 2 )
  }
View Full Code Here

    network.reset();
   
    network.setLayerBiasActivation(0,0.5);
    network.setLayerBiasActivation(1,-1.0);
   
    FlatNetwork flat = network.getStructure().getFlat();
   
    Assert.assertNotNull(flat);
    double[] layerOutput = flat.getLayerOutput();
    Assert.assertEquals(layerOutput[5], -1.0);
    Assert.assertEquals(layerOutput[8], 0.5)
  }
View Full Code Here

      report.header("Context Target Offset");
      report.header("Context Count");
      report.endRow();

      BasicNetwork network = (BasicNetwork) method;
      FlatNetwork flat = network.getStructure().getFlat();
      int layerCount = network.getLayerCount();

      for (int l = 0; l < layerCount; l++) {
        report.beginRow();
        StringBuilder str = new StringBuilder();
        str.append(Format.formatInteger(l + 1));
        if (l == 0) {
          str.append(" (Output)");
        } else if (l == network.getLayerCount() - 1) {
          str.append(" (Input)");
        }
        report.cell(str.toString());
        report.cell(Format.formatInteger(flat.getLayerCounts()[l]));
        report.cell(Format.formatInteger(flat.getLayerFeedCounts()[l]));
        report.cell(flat.getActivationFunctions()[l].getClass()
            .getSimpleName());
        report.cell(Format.formatDouble(flat.getBiasActivation()[l], 4));
        report.cell(Format.formatInteger(flat.getContextTargetSize()[l]));
        report.cell(Format.formatInteger(flat.getContextTargetOffset()[l]));
        report.cell(Format.formatInteger(flat.getLayerContextCount()[l]));
        report.endRow();
      }
      report.endTable();
    }
   
View Full Code Here

  public static final int HIDDEN_COUNT = 200;
  public static final int ITERATIONS = 10;
  public static final int AVG_COUNT = 20;

  public static long benchmarkEncogFlat(double[][] input, double[][] output) {
    FlatNetwork network = new FlatNetwork(input[0].length, HIDDEN_COUNT, 0,
        output[0].length, false);
    network.randomize();
    BasicMLDataSet trainingSet = new BasicMLDataSet(input, output);

    TrainFlatNetworkBackPropagation train = new TrainFlatNetworkBackPropagation(
        network, trainingSet, 0.7, 0.7);
View Full Code Here

    double[] p = null;

    if (methodFile.exists()) {
      method = (MLMethod) EncogDirectoryPersistence
          .loadObject(methodFile);
      final FlatNetwork flat = ((BasicNetwork) method).getFlat();

      contextTargetOffset = flat.getContextTargetOffset();
      contextTargetSize = flat.getContextTargetSize();
      hasContext = flat.getHasContext();
      inputCount = flat.getInputCount();
      layerContextCount = flat.getLayerContextCount();
      layerCounts = flat.getLayerCounts();
      layerFeedCounts = flat.getLayerFeedCounts();
      layerIndex = flat.getLayerIndex();
      layerOutput = flat.getLayerOutput();
      layerSums = flat.getLayerSums();
      outputCount = flat.getOutputCount();
      weightIndex = flat.getWeightIndex();
      weights = flat.getWeights();
      activation = createActivations(flat);
      p = createParams(flat);
      neuronCount = flat.getLayerOutput().length;
      layerCount = flat.getLayerCounts().length;
    }

    setIndentLevel(2);
    indentIn();
    addNameValue("string EXPORT_FILENAME", "\"" + processFile.getName()
View Full Code Here

    if (!(method instanceof MLFactory)) {
      throw new EncogError("Code generation not yet supported for: "
          + method.getClass().getName());
    }

    final FlatNetwork flat = ((ContainsFlat) method).getFlat();

    // header
    final StringBuilder line = new StringBuilder();
    line.append("public static MLMethod ");
    line.append(node.getName());
    line.append("() {");
    indentLine(line.toString());

    // create factory
    line.setLength(0);

    addLine("var network = ENCOG.BasicNetwork.create( null );");
    addLine("network.inputCount = " + flat.getInputCount() + ";");
    addLine("network.outputCount = " + flat.getOutputCount() + ";");
    addLine("network.layerCounts = "
        + toSingleLineArray(flat.getLayerCounts()) + ";");
    addLine("network.layerContextCount = "
        + toSingleLineArray(flat.getLayerContextCount()) + ";");
    addLine("network.weightIndex = "
        + toSingleLineArray(flat.getWeightIndex()) + ";");
    addLine("network.layerIndex = "
        + toSingleLineArray(flat.getLayerIndex()) + ";");
    addLine("network.activationFunctions = "
        + toSingleLineArray(flat.getActivationFunctions()) + ";");
    addLine("network.layerFeedCounts = "
        + toSingleLineArray(flat.getLayerFeedCounts()) + ";");
    addLine("network.contextTargetOffset = "
        + toSingleLineArray(flat.getContextTargetOffset()) + ";");
    addLine("network.contextTargetSize = "
        + toSingleLineArray(flat.getContextTargetSize()) + ";");
    addLine("network.biasActivation = "
        + toSingleLineArray(flat.getBiasActivation()) + ";");
    addLine("network.beginTraining = " + flat.getBeginTraining() + ";");
    addLine("network.endTraining=" + flat.getEndTraining() + ";");
    addLine("network.weights = WEIGHTS;");
    addLine("network.layerOutput = "
        + toSingleLineArray(flat.getLayerOutput()) + ";");
    addLine("network.layerSums = " + toSingleLineArray(flat.getLayerSums())
        + ";");

    // return
    addLine("return network;");
View Full Code Here

    double[] p = null;

    if (methodFile.exists()) {
      method = (MLMethod) EncogDirectoryPersistence
          .loadObject(methodFile);
      FlatNetwork flat = ((BasicNetwork) method).getFlat();

      contextTargetOffset = flat.getContextTargetOffset();
      contextTargetSize = flat.getContextTargetSize();
      hasContext = flat.getHasContext();
      inputCount = flat.getInputCount();
      layerContextCount = flat.getLayerContextCount();
      layerCounts = flat.getLayerCounts();
      layerFeedCounts = flat.getLayerFeedCounts();
      layerIndex = flat.getLayerIndex();
      layerOutput = flat.getLayerOutput();
      layerSums = flat.getLayerSums();
      outputCount = flat.getOutputCount();
      weightIndex = flat.getWeightIndex();
      weights = flat.getWeights();
      activation = createActivations(flat);
      p = createParams(flat);
    }

    setIndentLevel(2);
View Full Code Here

          "New neuron count is either a decrease or no change: "
              + neuronCount);
    }

    // access the flat network
    final FlatNetwork flat = this.network.getStructure().getFlat();
    final double[] oldWeights = flat.getWeights();

    // first find out how many connections there will be after this prune.
    int connections = oldWeights.length;
    int inBoundConnections = 0;
    int outBoundConnections = 0;

    // are connections added from the previous layer?
    if (targetLayer > 0) {
      inBoundConnections = this.network
          .getLayerTotalNeuronCount(targetLayer - 1);
      connections += inBoundConnections * increaseBy;
    }

    // are there connections added from the next layer?
    if (targetLayer < (this.network.getLayerCount() - 1)) {
      outBoundConnections = this.network
          .getLayerNeuronCount(targetLayer + 1);
      connections += outBoundConnections * increaseBy;
    }

    // increase layer count
    final int flatLayer = this.network.getLayerCount() - targetLayer - 1;
    flat.getLayerCounts()[flatLayer] += increaseBy;
    flat.getLayerFeedCounts()[flatLayer] += increaseBy;

    // allocate new weights now that we know how big the new weights will be
    final double[] newWeights = new double[connections];

    // construct the new weights
    int weightsIndex = 0;
    int oldWeightsIndex = 0;

    for (int fromLayer = flat.getLayerCounts().length - 2; fromLayer >= 0; fromLayer--) {
      final int fromNeuronCount = this.network
          .getLayerTotalNeuronCount(fromLayer);
      final int toNeuronCount = this.network
          .getLayerNeuronCount(fromLayer + 1);
      final int toLayer = fromLayer + 1;

      for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++) {
        for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++) {
          if ((toLayer == targetLayer)
              && (toNeuron >= oldNeuronCount)) {
            newWeights[weightsIndex++] = 0;
          } else if ((fromLayer == targetLayer)
              && (fromNeuron > oldNeuronCount)) {
            newWeights[weightsIndex++] = 0;
          } else {
            newWeights[weightsIndex++] = this.network.getFlat().getWeights()[oldWeightsIndex++];
          }
        }
      }
    }

    // swap in the new weights
    flat.setWeights(newWeights);

    // reindex
    reindexNetwork();
  }
View Full Code Here

TOP

Related Classes of org.encog.neural.flat.FlatNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.