Package de.jungblut.math.dense

Examples of de.jungblut.math.dense.DenseDoubleVector


    assertEquals(10d, error, 1e-4);
  }

  @Test
  public void testSigmoidErrorVector() {
    DoubleVector y = new DenseDoubleVector(new double[] { 0d, 1d, 0d, 1d, 0d });
    DoubleVector hypothesis = new DenseDoubleVector(new double[] { 0d, 0d, 0d,
        1d, 0d });
    double error = new LogisticErrorFunction().calculateError(y, hypothesis);
    assertEquals(10d, error, 1e-4);
  }
View Full Code Here


  @Test
  public void testMaximize() {
    // maximize -x^2-y^2
    // derivative is -2*x and -2*y
    // the max should be at 0
    DoubleVector theta = new DenseDoubleVector(new double[] { -25, -25 });
    DoubleVector minimizeFunction = Fmincg.minimizeFunction(
        new NegatedCostFunction(new CostFunction() {
          @Override
          public CostGradientTuple evaluateCost(DoubleVector input) {
            double cost = -Math.pow(input.get(0), 2)
                - Math.pow(input.get(1), 2);
            DenseDoubleVector gradient = new DenseDoubleVector(new double[] {
                -2 * input.get(0), -2 * input.get(1) });

            return new CostGradientTuple(cost, gradient);
          }
        }), theta, 10, false);
View Full Code Here

    // sample some points from 0 to 2000
    DoubleVector[] train = new DoubleVector[2000];
    DoubleVector[] outcome = new DoubleVector[2000];

    for (int i = 0; i < train.length; i++) {
      train[i] = new DenseDoubleVector(new double[] { i, i });
      outcome[i] = new DenseDoubleVector(new double[] { i });
    }

    return new Tuple<>(train, outcome);
  }
View Full Code Here

    // sample some points from 0 to 100
    DoubleVector[] train = new DoubleVector[100];
    DoubleVector[] outcome = new DoubleVector[100];

    for (int i = 0; i < train.length; i++) {
      train[i] = new DenseDoubleVector(new double[] { i, i });
      outcome[i] = new DenseDoubleVector(new double[] { i * i });
    }

    return new Tuple<>(train, outcome);
  }
View Full Code Here

    return new Tuple<>(train, outcome);
  }

  public Tuple<DoubleVector[], DoubleVector[]> sampleXOR() {
    DoubleVector[] train = new DoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 0 }),
        new DenseDoubleVector(new double[] { 1, 1 }) };
    DoubleVector[] prediction = new DoubleVector[] {
        new DenseDoubleVector(new double[] { 0 }),
        new DenseDoubleVector(new double[] { 1 }),
        new DenseDoubleVector(new double[] { 1 }),
        new DenseDoubleVector(new double[] { 0 }) };
    return new Tuple<>(train, prediction);
  }
View Full Code Here

    return new Tuple<>(train, prediction);
  }

  public Tuple<DoubleVector[], DoubleVector[]> sampleXORSoftMax() {
    DoubleVector[] train = new DoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 0 }),
        new DenseDoubleVector(new double[] { 1, 1 }) };
    DoubleVector[] prediction = new DoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 0 }),
        new DenseDoubleVector(new double[] { 1, 0 }),
        new DenseDoubleVector(new double[] { 0, 1 }) };
    return new Tuple<>(train, prediction);
  }
View Full Code Here

  @Test
  public void testSimpleParable() {
    int startPoint = -5;
    // start at x=-5
    DoubleVector start = new DenseDoubleVector(new double[] { startPoint });

    // our function is f(x) = (4-x)^2+10
    // the derivative is f'(x) = 2x-8
    CostFunction inlineFunction = new CostFunction() {
      @Override
      public CostGradientTuple evaluateCost(DoubleVector input) {

        double cost = Math.pow(4 - input.get(0), 2) + 10;
        DenseDoubleVector gradient = new DenseDoubleVector(
            new double[] { 2 * input.get(0) - 8 });

        return new CostGradientTuple(cost, gradient);
      }
    };
View Full Code Here

public class DenseMatrixFolderTest {

  @Test
  public void testFoldAndUnfold() {
    DenseDoubleVector referenceFold = new DenseDoubleVector(new double[] { 1.0,
        4.0, 2.0, 5.0, 3.0, 6.0, 7.0, 10.0, 8.0, 11.0, 9.0, 12.0 });
    DenseDoubleMatrix mat1 = new DenseDoubleMatrix(new double[][] {
        { 1, 2, 3 }, { 4, 5, 6 } });
    DenseDoubleMatrix mat2 = new DenseDoubleMatrix(new double[][] {
        { 7, 8, 9 }, { 10, 11, 12 } });

    DoubleVector foldMatrices = DenseMatrixFolder.foldMatrices(mat1, mat2);
    assertEquals(12, foldMatrices.getLength());
    assertEquals(0.0d, referenceFold.subtract(foldMatrices).sum(), 1e-5);

    DoubleMatrix[] unfoldMatrices = DenseMatrixFolder.unfoldMatrices(
        foldMatrices, new int[][] { { 2, 3 }, { 2, 3 } });

    assertEquals(0.0d, unfoldMatrices[0].subtract(mat1).sum(), 1e-5);
View Full Code Here

    // we have 70% chance of observing an umbrella
    Random r = new Random(0L);
    for (int i = 0; i < features.length; i++) {
      if (r.nextDouble() > 0.7) {
        // umbrella
        features[i] = new DenseDoubleVector(new double[] { 0d, 1d });
        // in 10% our case we add noisy data
        if (r.nextDouble() > 0.9) {
          outcome[i] = new DenseDoubleVector(new double[] { 0d, 1d });
        } else {
          outcome[i] = new DenseDoubleVector(new double[] { 1d, 0d });
        }
      } else {
        // no umbrella
        features[i] = new DenseDoubleVector(new double[] { 1d, 0d });
        outcome[i] = new DenseDoubleVector(new double[] { 0d, 1d });
      }
    }

  }
View Full Code Here

    assertLogEquals(hmm.getEmissionProbabilitiyMatrix().get(0, 1), 0.03, 0.01);
    assertLogEquals(hmm.getEmissionProbabilitiyMatrix().get(1, 0), 0.003, 0.01);
    assertLogEquals(hmm.getEmissionProbabilitiyMatrix().get(1, 1), 0.99, 0.01);

    // observe an umbrella
    DoubleVector predict = hmm.predict(new DenseDoubleVector(new double[] { 0d,
        1d }));
    // so it is very likely that it rains ~91%
    assertEquals(1, predict.maxIndex());
    assertEquals(0.91, predict.get(1), 0.01);
    assertEquals(0.08, predict.get(0), 0.1);
    // observe no umbrella after observing one
    predict = hmm.predict(new DenseDoubleVector(new double[] { 1d, 0d }),
        predict);
    // so it is very likely that it is sunny now
    assertEquals(0, predict.maxIndex());
    assertEquals(0.99, predict.get(0), 0.01);
    assertEquals(0.001, predict.get(1), 0.01);
View Full Code Here

TOP

Related Classes of de.jungblut.math.dense.DenseDoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.