Package de.jungblut.math

Examples of de.jungblut.math.DoubleVector


  }

  @Test
  public void testParticleSwarmOptimizationNonConvex() {

    DoubleVector start = new DenseDoubleVector(new double[] { 100, 30 });

    CostFunction inlineFunction = new CostFunction() {
      @Override
      public CostGradientTuple evaluateCost(DoubleVector input) {
        double x = input.get(0);
        double y = input.get(1);
        // that's the rosenbrock function
        double cost = Math.pow((1 - x), 2) + 100 * Math.pow((y - x * x), 2);

        return new CostGradientTuple(cost, null);
      }
    };

    DoubleVector minimizeFunction = ParticleSwarmOptimization.minimizeFunction(
        inlineFunction, start, 1000, 2.8, 0.4, 0.8, 65, 8, false);

    assertEquals(minimizeFunction.get(0), 1d, 0.2);
    assertEquals(minimizeFunction.get(1), 1d, 0.2);
  }
View Full Code Here


      arr[i % 5] = 1d;
      outcome.add(new DenseDoubleVector(arr));
    }
    knn.train(features, outcome);

    DoubleVector prediction = knn.predict(new SingleEntryDoubleVector(5));
    assertArrayEquals(new double[] { 1d, 0, 0, 0, 1d }, prediction.toArray());
    prediction = knn.predictProbability(new SingleEntryDoubleVector(5));
    assertArrayEquals(new double[] { 0.5, 0, 0, 0, 0.5 }, prediction.toArray());
  }
View Full Code Here

  @Test
  public void testSimpleParable() {
    int startPoint = -5;
    // start at x=-5
    DoubleVector start = new DenseDoubleVector(new double[] { startPoint });

    // our function is f(x) = (4-x)^2+10
    // the derivative is f'(x) = 2x-8
    CostFunction inlineFunction = new CostFunction() {
      @Override
      public CostGradientTuple evaluateCost(DoubleVector input) {

        double cost = Math.pow(4 - input.get(0), 2) + 10;
        DenseDoubleVector gradient = new DenseDoubleVector(
            new double[] { 2 * input.get(0) - 8 });

        return new CostGradientTuple(cost, gradient);
      }
    };

    DoubleVector minimizeFunction = OWLQN.minimizeFunction(inlineFunction,
        start, 100, true);

    assertEquals(4.0d, minimizeFunction.get(0), 1e-5);
  }
View Full Code Here

  @Test
  public void testVectorInvertedIndex() {

    InvertedIndex<DoubleVector, Integer> invIndex = InvertedIndex
        .createVectorIndex(new CosineDistance());
    DoubleVector v1 = new SparseDoubleVector(4);
    v1.set(1, 0.6931471805599453);
    v1.set(0, 1.3862943611198906);
    DoubleVector v2 = new SparseDoubleVector(4);
    v2.set(2, 0.6931471805599453);
    v2.set(1, 1.3862943611198906);

    invIndex.build(Arrays.asList(v1, v2));

    DoubleVector v3 = new SparseDoubleVector(4);
    v3.set(3, 0.2);
    v3.set(1, 1);
    List<DistanceResult<DoubleVector>> res = invIndex.query(v3);
    assertEquals(2, res.size());
    assertEquals(v2, res.get(0).get());
    assertEquals(v1, res.get(1).get());
View Full Code Here

    reg.setRandom(new Random(0));
    reg.train(data);

    double loss = 0d;
    for (Tuple<DoubleVector, DoubleVector> dx : data) {
      DoubleVector prediction = reg.predict(dx.getFirst());
      prediction = new DenseDoubleVector(
          prediction.apply(new DoubleVectorFunction() {

            @Override
            public double calculate(int index, double value) {
              return value > 0.5 ? 1d : 0d;
            }
View Full Code Here

    internalChecks(classifier);
  }

  public void internalChecks(MultinomialNaiveBayes classifier) {
    DoubleVector classProbability = classifier.getClassProbability();
    assertEquals(FastMath.log(2d / 5d), classProbability.get(0), 0.01d);
    assertEquals(FastMath.log(3d / 5d), classProbability.get(1), 0.01d);

    DoubleMatrix mat = classifier.getProbabilityMatrix();
    double[] realFirstRow = new double[] { 0.0, 0.0, -2.1972245773362196,
        -1.5040773967762742, -1.5040773967762742 };
    double[] realSecondRow = new double[] { -0.9808292530117262,
        -2.0794415416798357, 0.0, 0.0, 0.0 };

    double[] firstRow = mat.getRowVector(0).toArray();
    assertEquals(realFirstRow.length, firstRow.length);
    for (int i = 0; i < firstRow.length; i++) {
      assertEquals("" + Arrays.toString(firstRow), realFirstRow[i],
          firstRow[i], 0.05d);
    }

    double[] secondRow = mat.getRowVector(1).toArray();
    assertEquals(realSecondRow.length, secondRow.length);
    for (int i = 0; i < firstRow.length; i++) {
      assertEquals("" + Arrays.toString(secondRow), realSecondRow[i],
          secondRow[i], 0.05d);
    }

    DoubleVector claz = classifier.predict(new DenseDoubleVector(new double[] {
        1, 0, 0, 0, 0 }));
    assertEquals("" + claz, 0, claz.get(0), 0.05d);
    assertEquals("" + claz, 1, claz.get(1), 0.05d);

    claz = classifier.predict(new DenseDoubleVector(new double[] { 0, 0, 0, 1,
        1 }));
    assertEquals("" + claz, 1, claz.get(0), 0.05d);
    assertEquals("" + claz, 0, claz.get(1), 0.05d);
  }
View Full Code Here

    reg.setRandom(new Random(0));
    reg.train(data);

    double loss = 0d;
    for (Tuple<DoubleVector, DoubleVector> dx : data) {
      DoubleVector prediction = reg.predict(dx.getFirst());
      prediction = new DenseDoubleVector(
          prediction.apply(new DoubleVectorFunction() {

            @Override
            public double calculate(int index, double value) {
              return value > 0.5 ? 1d : 0d;
            }
View Full Code Here

    // now check if the properties hold
    for (DoubleVector v : canopies) {
      // remove all vectors that are in t1
      Iterator<DoubleVector> iterator = input.iterator();
      while (iterator.hasNext()) {
        DoubleVector next = iterator.next();
        if (measure.measureDistance(v, next) < t1) {
          iterator.remove();
        }
      }
    }
View Full Code Here

    // now check if the properties hold
    for (DoubleVector v : centers) {
      // remove all vectors that are in t1
      Iterator<DoubleVector> iterator = input.iterator();
      while (iterator.hasNext()) {
        DoubleVector next = iterator.next();
        if (measure.measureDistance(v, next) < t1) {
          iterator.remove();
        }
      }
    }
View Full Code Here

        new DenseDoubleVector(new double[] { 9, 6 }) };

    int index = 0;
    Iterator<DoubleVector> iterator = tree.iterator();
    while (iterator.hasNext()) {
      DoubleVector next = iterator.next();
      assertEquals(result[index++], next);
    }
    assertEquals(result.length, index);
  }
View Full Code Here

TOP

Related Classes of de.jungblut.math.DoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.