Examples of Fmincg


Examples of de.jungblut.math.minimize.Fmincg

    // sample a parable of points and use one hidden layer
    MultilayerPerceptron mlp = MultilayerPerceptron.MultilayerPerceptronBuilder
        .create(
            new int[] { 2, 2, 1 },
            new ActivationFunction[] { LINEAR.get(), SIGMOID.get(),
                LINEAR.get() }, new SquaredMeanErrorFunction(), new Fmincg(),
            10000).verbose(false).build();

    // sample a parable of points
    Tuple<DoubleVector[], DoubleVector[]> sample = sampleParable();
View Full Code Here

Examples of de.jungblut.math.minimize.Fmincg

  public void testXORSoftMaxFminCG() {
    MultilayerPerceptron mlp = MultilayerPerceptron.MultilayerPerceptronBuilder
        .create(
            new int[] { 2, 4, 2 },
            new ActivationFunction[] { LINEAR.get(), SIGMOID.get(),
                SOFTMAX.get() }, new CrossEntropyErrorFunction(), new Fmincg(),
            100).build();
    Tuple<DoubleVector[], DoubleVector[]> sampleXOR = sampleXORSoftMax();
    double error = mlp.train(sampleXOR.getFirst(), sampleXOR.getSecond(),
        new Fmincg(), 100, 0.0d, false);
    System.out.println(error);
    if (error < 0.01) {
      assertTrue(error < 0.01);
      testPredictionsSoftMax(sampleXOR, mlp);
    } else {
View Full Code Here

Examples of de.jungblut.math.minimize.Fmincg

        .create(
            new int[] { 2, 4, 1 },
            new ActivationFunction[] { LINEAR.get(),
                ActivationFunctionSelector.ELLIOT.get(),
                ActivationFunctionSelector.ELLIOT.get() },
            new LogisticErrorFunction(), new Fmincg(), 100).build();
    Tuple<DoubleVector[], DoubleVector[]> sampleXOR = sampleXOR();
    double error = mlp.train(sampleXOR.getFirst(), sampleXOR.getSecond(),
        new Fmincg(), 100, 0.0d, false);
    System.out.println(error);
    // increase the error here a bit, because it is just an approx. to sigmoid
    if (error < 0.02) {
      assertTrue(error < 0.02);
      testPredictionsSoftMax(sampleXOR, mlp);
View Full Code Here

Examples of de.jungblut.math.minimize.Fmincg

  public void testXORFminCG() {
    MultilayerPerceptron mlp = MultilayerPerceptron.MultilayerPerceptronBuilder
        .create(
            new int[] { 2, 4, 1 },
            new ActivationFunction[] { LINEAR.get(), SIGMOID.get(),
                SIGMOID.get() }, new LogisticErrorFunction(), new Fmincg(), 100)
        .build();
    Tuple<DoubleVector[], DoubleVector[]> sampleXOR = sampleXOR();
    double error = mlp.train(sampleXOR.getFirst(), sampleXOR.getSecond(),
        new Fmincg(), 100, 0.0d, false);
    System.out.println(error);
    if (error < 0.01) {
      assertTrue(error < 0.001);
      validatePredictions(sampleXOR, mlp);
    } else {
View Full Code Here

Examples of de.jungblut.math.minimize.Fmincg

  public void testXORPSO() {
    MultilayerPerceptron mlp = MultilayerPerceptron.MultilayerPerceptronBuilder
        .create(
            new int[] { 2, 4, 1 },
            new ActivationFunction[] { LINEAR.get(), SIGMOID.get(),
                SIGMOID.get() }, new LogisticErrorFunction(), new Fmincg(), 100)
        .build();
    Tuple<DoubleVector[], DoubleVector[]> sampleXOR = sampleXOR();
    double error = mlp.train(sampleXOR.getFirst(), sampleXOR.getSecond(),
        new ParticleSwarmOptimization(1000, 2.8d, 0.2, 0.4, 4), 400, 0.0d,
        false);
View Full Code Here

Examples of de.jungblut.math.minimize.Fmincg

    if (mlp == null) {
      mlp = MultilayerPerceptron.MultilayerPerceptronBuilder
          .create(
              new int[] { 2, 4, 1 },
              new ActivationFunction[] { LINEAR.get(), SIGMOID.get(),
                  SIGMOID.get() }, new LogisticErrorFunction(), new Fmincg(),
              100).build();
    }
    Tuple<DoubleVector[], DoubleVector[]> sampleXOR = sampleXOR();
    double error = mlp.train(sampleXOR.getFirst(), sampleXOR.getSecond(),
        new Fmincg(), 100, 0.0d, false);
    System.out.println(error);
    if (error < 0.01) {
      assertTrue(error < 0.001);
      validatePredictions(sampleXOR, mlp);
    } else {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.