Package org.encog.engine.network.activation

Examples of org.encog.engine.network.activation.ActivationLinear


    }
   
   
    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
View Full Code Here


      final int outputCount, final RadialBasisFunction[] rbf) {

    FlatLayer[] layers = new FlatLayer[3];
    this.rbf = rbf;

    layers[0] = new FlatLayer(new ActivationLinear(), inputCount, 0.0);
    layers[1] = new FlatLayer(new ActivationLinear(), hiddenCount, 0.0);
    layers[2] = new FlatLayer(new ActivationLinear(), outputCount, 0.0);

    init(layers);
  }
View Full Code Here

    if (name.equalsIgnoreCase(MLActivationFactory.AF_GAUSSIAN)) {
      return new ActivationGaussian();
    }

    if (name.equalsIgnoreCase(MLActivationFactory.AF_LINEAR)) {
      return new ActivationLinear();
    }

    if (name.equalsIgnoreCase(MLActivationFactory.AF_LOG)) {
      return new ActivationLOG();
    }
View Full Code Here

   *            True if this is a tanh activation, false for sigmoid.
   */
  public FlatNetwork(final int input, final int hidden1, final int hidden2,
      final int output, final boolean tanh) {

    final ActivationFunction linearAct = new ActivationLinear();
    FlatLayer[] layers;
    final ActivationFunction act = tanh ? new ActivationTANH()
        : new ActivationSigmoid();

    if ((hidden1 == 0) && (hidden2 == 0)) {
View Full Code Here

    final FlatLayer[] flatLayers = new FlatLayer[this.layers.size()];

    for (int i = 0; i < this.layers.size(); i++) {
      final BasicLayer layer = (BasicLayer) this.layers.get(i);
      if (layer.getActivation() == null) {
        layer.setActivation(new ActivationLinear());
      }

      flatLayers[i] = layer;
    }
View Full Code Here

public class TestActivationLinear extends TestCase {
  @Test
  public void testLinear() throws Throwable
  {
    ActivationLinear activation = new ActivationLinear();
    Assert.assertTrue(activation.hasDerivative());
   
    ActivationLinear clone = (ActivationLinear)activation.clone();
    Assert.assertNotNull(clone);
   
    double[] input = { 1,2,3 };
   
    activation.activationFunction(input,0,input.length);
View Full Code Here

    }
   
   
    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
View Full Code Here

   * @return The generated network.
   */
  public MLMethod generate() {
    final BasicNetwork network = new BasicNetwork();

    final Layer inputLayer = new BasicLayer(new ActivationLinear(), true,
        this.inputNeurons);
    final Layer outputLayer = new BasicLayer(new ActivationLinear(), false,
        this.outputNeurons);

    network.addLayer(inputLayer);
    network.addLayer(outputLayer);
    network.getStructure().finalizeStructure();
View Full Code Here

TOP

Related Classes of org.encog.engine.network.activation.ActivationLinear

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.