Package de.jungblut.math.sparse

Examples of de.jungblut.math.sparse.SparseDoubleVector


  public void testMultiClassStratifiedSplits_5_20() {
    DoubleVector[] feats = new DoubleVector[20_000];
    DoubleVector[] outcome = new DoubleVector[20_000];

    for (int i = 0; i < outcome.length; i++) {
      outcome[i] = new SparseDoubleVector(20);
      int index = i % 20;
      outcome[i].set(index, 1d);
    }

    EvaluationSplit split = EvaluationSplit.createStratified(feats, outcome,
View Full Code Here


  public void testMultiClassStratifiedSplits_5_19997() {
    DoubleVector[] feats = new DoubleVector[19997];
    DoubleVector[] outcome = new DoubleVector[19997];

    for (int i = 0; i < outcome.length; i++) {
      outcome[i] = new SparseDoubleVector(20);
      int index = i % 20;
      outcome[i].set(index, 1d);
    }

    EvaluationSplit split = EvaluationSplit.createStratified(feats, outcome,
View Full Code Here

    assertTrue(check instanceof DenseDoubleVector);
  }

  @Test
  public void testSparseSerDe() throws Exception {
    SparseDoubleVector vec = new SparseDoubleVector(new double[] { 1, 2, 3 });
    DoubleVector check = check(vec);
    assertTrue(check instanceof SparseDoubleVector);
  }
View Full Code Here

    List<DoubleVector> tfIdfVectorize = VectorizerUtils.tfIdfVectorize(
        tokenizedDocuments, dict, docCount);

    // {10=0.6931471805599453, 7=0.28768207245178085, 3=0.28768207245178085,
    // 0=1.3862943611198906}
    DoubleVector v1 = new SparseDoubleVector(13);
    v1.set(10, 0.6931471805599453);
    v1.set(7, 0.28768207245178085);
    v1.set(3, 0.28768207245178085);
    v1.set(0, 1.3862943611198906);
    // {10=0.6931471805599453, 3=0.28768207245178085, 1=1.3862943611198906}
    DoubleVector v2 = new SparseDoubleVector(13);
    v2.set(10, 0.6931471805599453);
    v2.set(3, 0.28768207245178085);
    v2.set(1, 1.3862943611198906);
    // {12=1.3862943611198906, 11=1.3862943611198906, 8=0.6931471805599453,
    // 7=0.28768207245178085, 3=0.28768207245178085}
    DoubleVector v3 = new SparseDoubleVector(13);
    v3.set(12, 1.3862943611198906);
    v3.set(11, 1.3862943611198906);
    v3.set(8, 0.6931471805599453);
    v3.set(7, 0.28768207245178085);
    v3.set(3, 0.28768207245178085);

    // {9=1.3862943611198906, 8=0.6931471805599453, 7=0.28768207245178085,
    // 6=1.3862943611198906, 5=1.3862943611198906, 4=1.3862943611198906,
    // 2=1.3862943611198906}
    DoubleVector v4 = new SparseDoubleVector(13);
    v4.set(9, 1.3862943611198906);
    v4.set(8, 0.6931471805599453);
    v4.set(7, 0.28768207245178085);
    v4.set(6, 1.3862943611198906);
    v4.set(5, 1.3862943611198906);
    v4.set(4, 1.3862943611198906);
    v4.set(2, 1.3862943611198906);

    assertEquals(4, tfIdfVectorize.size());

    assertEquals(0d, tfIdfVectorize.get(0).subtract(v1).sum(), 1e-5);
    assertEquals(0d, tfIdfVectorize.get(1).subtract(v2).sum(), 1e-5);
View Full Code Here

  public MultinomialNaiveBayes getTrainedClassifier() {
    MultinomialNaiveBayes classifier = new MultinomialNaiveBayes();

    DoubleVector[] features = new DoubleVector[] {
        new SparseDoubleVector(new double[] { 1, 0, 0, 0, 0 }),
        new SparseDoubleVector(new double[] { 1, 0, 0, 0, 0 }),
        new SparseDoubleVector(new double[] { 1, 1, 0, 0, 0 }),
        new SparseDoubleVector(new double[] { 0, 0, 1, 1, 1 }),
        new SparseDoubleVector(new double[] { 0, 0, 0, 1, 1 }), };
    DenseDoubleVector[] outcome = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 1 }),
        new DenseDoubleVector(new double[] { 1 }),
        new DenseDoubleVector(new double[] { 1 }),
        new DenseDoubleVector(new double[] { 0 }),
View Full Code Here

public class MinHashTest {

  @Test
  public void testMinHashing() throws Exception {

    SparseDoubleVector vec1 = new SparseDoubleVector(5);
    vec1.set(0, 2d);
    vec1.set(3, 311d);
    vec1.set(4, 2d);
    SparseDoubleVector vec2 = new SparseDoubleVector(5);
    vec2.set(0, 2d);
    vec1.set(2, 2d);
    vec2.set(3, 311d);
    vec2.set(4, 2d);

    MinHash minHash = MinHash.create(4);

    int[] minHashVector = minHash.minHashVector(vec1);
    int[] minHashVector2 = minHash.minHashVector(vec2);
View Full Code Here

  }

  @Test
  public void testMinHashingMurmur() throws Exception {

    SparseDoubleVector vec1 = new SparseDoubleVector(5);
    vec1.set(0, 2d);
    vec1.set(3, 311d);
    vec1.set(4, 2d);
    SparseDoubleVector vec2 = new SparseDoubleVector(5);
    vec2.set(0, 2d);
    vec1.set(2, 2d);
    vec2.set(3, 311d);
    vec2.set(4, 2d);

    MinHash minHash = MinHash.create(4, HashType.MURMUR128);

    int[] minHashVector = minHash.minHashVector(vec1);
    int[] minHashVector2 = minHash.minHashVector(vec2);
View Full Code Here

  }

  @Test
  public void testMinHashingMd5() throws Exception {

    SparseDoubleVector vec1 = new SparseDoubleVector(5);
    vec1.set(0, 2d);
    vec1.set(3, 311d);
    vec1.set(4, 2d);
    SparseDoubleVector vec2 = new SparseDoubleVector(5);
    vec2.set(0, 2d);
    vec1.set(2, 2d);
    vec2.set(3, 311d);
    vec2.set(4, 2d);

    MinHash minHash = MinHash.create(4, HashType.MD5);

    int[] minHashVector = minHash.minHashVector(vec1);
    int[] minHashVector2 = minHash.minHashVector(vec2);
View Full Code Here

  @Test
  public void testInsertSparse() throws Exception {
    KDTree<Object> tree = new KDTree<>();
    DoubleVector[] array = new DoubleVector[] {
        new SparseDoubleVector(new double[] { 2, 3 }),
        new SparseDoubleVector(new double[] { 5, 4 }),
        new SparseDoubleVector(new double[] { 9, 6 }),
        new SparseDoubleVector(new double[] { 4, 7 }),
        new SparseDoubleVector(new double[] { 8, 1 }),
        new SparseDoubleVector(new double[] { 7, 2 }), };

    DoubleVector[] result = new DoubleVector[] {
        new SparseDoubleVector(new double[] { 2, 3 }),
        new SparseDoubleVector(new double[] { 8, 1 }),
        new SparseDoubleVector(new double[] { 5, 4 }),
        new SparseDoubleVector(new double[] { 7, 2 }),
        new SparseDoubleVector(new double[] { 4, 7 }),
        new SparseDoubleVector(new double[] { 9, 6 }) };

    for (DoubleVector v : array)
      tree.add(v, null);

    int index = 0;
View Full Code Here

  @Test
  public void testKNearestNeighboursSparse() throws Exception {
    KDTree<Object> tree = new KDTree<>();
    DoubleVector[] array = new DoubleVector[] {
        new SparseDoubleVector(new double[] { 2, 3 }),
        new SparseDoubleVector(new double[] { 5, 4 }),
        new SparseDoubleVector(new double[] { 9, 6 }),
        new SparseDoubleVector(new double[] { 4, 7 }),
        new SparseDoubleVector(new double[] { 8, 1 }),
        new SparseDoubleVector(new double[] { 7, 2 }), };

    for (DoubleVector v : array)
      tree.add(v, null);

    List<VectorDistanceTuple<Object>> nearestNeighbours = tree
        .getNearestNeighbours(new SparseDoubleVector(new double[] { 0, 0 }), 1);
    assertEquals(1, nearestNeighbours.size());
    assertTrue(array[0] == nearestNeighbours.get(0).getVector());
  }
View Full Code Here

TOP

Related Classes of de.jungblut.math.sparse.SparseDoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.