Package org.encog.engine.network.activation

Examples of org.encog.engine.network.activation.ActivationFunction


        flat.setActivationFunctions(new ActivationFunction[flat
            .getLayerCounts().length]);

        for (final String line : section.getLines()) {
          ActivationFunction af = null;
          final List<String> cols = EncogFileSection
              .splitColumns(line);
          final String name = "org.encog.engine.network.activation."
              + cols.get(0);
          try {
            final Class<?> clazz = Class.forName(name);
            af = (ActivationFunction) clazz.newInstance();
          } catch (final ClassNotFoundException e) {
            throw new PersistError(e);
          } catch (final InstantiationException e) {
            throw new PersistError(e);
          } catch (final IllegalAccessException e) {
            throw new PersistError(e);
          }

          for (int i = 0; i < af.getParamNames().length; i++) {
            af.setParam(i,
                CSVFormat.EG_FORMAT.parse(cols.get(i + 1)));
          }

          flat.getActivationFunctions()[index++] = af;
        }
View Full Code Here


   */
  private Randomizer getRandomizer() {
    boolean useNWR = true;
   
    for(int i=0;i<this.getLayerCount();i++) {
      ActivationFunction af = getActivation(i);
      if( af.getClass()!=ActivationSigmoid.class
          && af.getClass()!=ActivationTANH.class
          && af.getClass()!=ActivationElliott.class
          && af.getClass()!=ActivationElliottSymmetric.class) {
        useNWR = false;
      }
    }
   
    if (getLayerCount() < 3) {
View Full Code Here

        result.append("->");
      }
     
      // handle activation function
      if( currentLayer>0 && this.getActivation(currentLayer)!=null ) {
        ActivationFunction activationFunction = getActivation(currentLayer);
        result.append(activationFunction.getFactoryCode());
        result.append("->");
      }
     
      result.append(this.getLayerNeuronCount(currentLayer));
      if( this.isLayerBiased(currentLayer) ) {
View Full Code Here

  private void randomizeSynapse(BasicNetwork network, int fromLayer) {
    int toLayer = fromLayer+1;
    int toCount = network.getLayerNeuronCount(toLayer);
    int fromCount = network.getLayerNeuronCount(fromLayer);
    int fromCountTotalCount = network.getLayerTotalNeuronCount(fromLayer);
    ActivationFunction af = network.getActivation(toLayer);
    double low = calculateRange(af,Double.MIN_VALUE);
    double high = calculateRange(af,Double.MAX_VALUE);

    double b = 0.7d * Math.pow(toCount, (1d / fromCount)) / (high-low);

View Full Code Here

        .getActivationFunctions().length];

    if (this.shouldFixFlatSpot) {
      for (int i = 0; i < this.currentFlatNetwork
          .getActivationFunctions().length; i++) {
        final ActivationFunction af = this.currentFlatNetwork
            .getActivationFunctions()[i];

        if (af instanceof ActivationSigmoid) {
          this.flatSpot[i] = 0.1;
        } else {
View Full Code Here

    final List<NEATLink> linkList = new ArrayList<NEATLink>();

    final ActivationFunction[] afs = new ActivationFunction[substrate
        .getNodeCount()];

    final ActivationFunction af = new ActivationSteepenedSigmoid();
    // all activation functions are the same
    for (int i = 0; i < afs.length; i++) {
      afs[i] = af;
    }
View Full Code Here

  public ActivationFunction create(String fn) {
   
    for (EncogPluginBase plugin : Encog.getInstance().getPlugins()) {
      if (plugin instanceof EncogPluginService1) {
        ActivationFunction result = ((EncogPluginService1) plugin).createActivationFunction(fn);
        if (result != null) {
          return result;
        }
      }
    }
View Full Code Here

    }
   
   
    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
      if (questionPhase == 0) {
        defaultCount = input;
      } else {
        defaultCount = output;
      }

      final ArchitectureLayer layer = ArchitectureParse.parseLayer(
          layerStr, defaultCount);
      final boolean bias = layer.isBias();

      String part = layer.getName();
      if (part != null) {
        part = part.trim();
      } else {
        part = "";
      }
     
      ActivationFunction lookup = this.factory.create(part);
     
      if (lookup!=null) {
        af = lookup;
      } else {
        if (layer.isUsedDefault()) {
View Full Code Here

   * Parse an activation function from a value.
   * @param value The value.
   * @return The activation function.
   */
  public static ActivationFunction parseActivationFunction(String value) {
    ActivationFunction af = null;
    final String[] cols = value.split("\\|");

    final String afName = "org.encog.engine.network.activation." + cols[0];
    try {
      final Class<?> clazz = Class.forName(afName);
      af = (ActivationFunction) clazz.newInstance();
    } catch (final ClassNotFoundException e) {
      throw new PersistError(e);
    } catch (final InstantiationException e) {
      throw new PersistError(e);
    } catch (final IllegalAccessException e) {
      throw new PersistError(e);
    }

    for (int i = 0; i < af.getParamNames().length; i++) {
      af.setParam(i, CSVFormat.EG_FORMAT.parse(cols[i + 1]));
    }
    return af;
  }
View Full Code Here

    final int toLayerIndex = this.layerIndex[currentLevel];
    final int fromLayerSize = this.layerCounts[currentLevel + 1];
    final int toLayerSize = this.layerFeedCounts[currentLevel];

    final int index = this.weightIndex[currentLevel];
    final ActivationFunction activation = this.network
        .getActivationFunctions()[currentLevel];
    final double currentFlatSpot = this.flatSpot[currentLevel + 1];

    // handle weights
    // array references are made method local to avoid one indirection
    final double[] layerDelta = this.layerDelta;
    final double[] weights = this.weights;
    final double[] gradients = this.gradients;
    final double[] layerOutput = this.layerOutput;
    final double[] layerSums = this.layerSums;
    int yi = fromLayerIndex;
    for (int y = 0; y < fromLayerSize; y++) {
      final double output = layerOutput[yi];
      double sum = 0;

      int wi = index + y;
      final int loopEnd = toLayerIndex+toLayerSize;
      for (int xi = toLayerIndex; xi < loopEnd; xi++, wi += fromLayerSize) {
        gradients[wi] += output * layerDelta[xi];
        sum += weights[wi] * layerDelta[xi];
      }

      layerDelta[yi] = sum
          * (activation.derivativeFunction(layerSums[yi], layerOutput[yi])+currentFlatSpot);

      yi++;
    }
  }
View Full Code Here

TOP

Related Classes of org.encog.engine.network.activation.ActivationFunction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.