Package org.apache.mahout.math.function

Examples of org.apache.mahout.math.function.PlusMult


    Vector w = new DenseVector(c[COL]);
    for (int i = 0; i < c[ROW]; i++) {
      Vector xi = getRow(i);
      double d = xi.dot(v);
      if(d != 0)
        w.assign(xi, new PlusMult(d));

    }
    return w;
  }
View Full Code Here


                    DoubleMatrix1D y) {
    A.zMult(x, y, alpha, beta, transposeA);
  }

  public void dger(double alpha, DoubleMatrix1D x, DoubleMatrix1D y, DoubleMatrix2D A) {
    PlusMult fun = PlusMult.plusMult(0);
    for (int i = A.rows(); --i >= 0;) {
      fun.setMultiplicator(alpha * x.getQuick(i));
      A.viewRow(i).assign(y, fun);

    }
  }
View Full Code Here

      Brows[k] = B.viewRow(k);
    }

    // transformations
    Mult div = Mult.div(0);
    PlusMult minusMult = PlusMult.minusMult(0);

    IntArrayList nonZeroIndexes =
        new IntArrayList(); // sparsity
    DoubleMatrix1D Browk = org.apache.mahout.math.matrix.DoubleFactory1D.dense.make(nx); // blocked row k

    // Solve L*Y = B(piv,:)
    int CUT_OFF = 10;
    for (int k = 0; k < n; k++) {
      // blocking (make copy of k-th row to localize references)
      Browk.assign(Brows[k]);

      // sparsity detection
      int maxCardinality = nx / CUT_OFF; // == heuristic depending on speedup
      Browk.getNonZeros(nonZeroIndexes, null, maxCardinality);
      int cardinality = nonZeroIndexes.size();
      boolean sparse = (cardinality < maxCardinality);

      for (int i = k + 1; i < n; i++) {
        //for (int j = 0; j < nx; j++) B[i][j] -= B[k][j]*LU[i][k];
        //for (int j = 0; j < nx; j++) B.set(i,j, B.get(i,j) - B.get(k,j)*LU.get(i,k));

        minusMult.setMultiplicator(-LU.getQuick(i, k));
        if (minusMult.getMultiplicator() != 0) {
          if (sparse) {
            Brows[i].assign(Browk, minusMult, nonZeroIndexes);
          } else {
            Brows[i].assign(Browk, minusMult);
          }
        }
      }
    }

    // Solve U*B = Y;
    for (int k = n - 1; k >= 0; k--) {
      // for (int j = 0; j < nx; j++) B[k][j] /= LU[k][k];
      // for (int j = 0; j < nx; j++) B.set(k,j, B.get(k,j) / LU.get(k,k));
      div.setMultiplicator(1 / LU.getQuick(k, k));
      Brows[k].assign(div);

      // blocking
      if (Browk == null) {
        Browk = org.apache.mahout.math.matrix.DoubleFactory1D.dense.make(B.columns());
      }
      Browk.assign(Brows[k]);

      // sparsity detection
      int maxCardinality = nx / CUT_OFF; // == heuristic depending on speedup
      Browk.getNonZeros(nonZeroIndexes, null, maxCardinality);
      int cardinality = nonZeroIndexes.size();
      boolean sparse = (cardinality < maxCardinality);

      //Browk.getNonZeros(nonZeroIndexes,null);
      //boolean sparse = nonZeroIndexes.size() < nx/10;

      for (int i = 0; i < k; i++) {
        // for (int j = 0; j < nx; j++) B[i][j] -= B[k][j]*LU[i][k];
        // for (int j = 0; j < nx; j++) B.set(i,j, B.get(i,j) - B.get(k,j)*LU.get(i,k));

        minusMult.setMultiplicator(-LU.getQuick(i, k));
        if (minusMult.getMultiplicator() != 0) {
          if (sparse) {
            Brows[i].assign(Browk, minusMult, nonZeroIndexes);
          } else {
            Brows[i].assign(Browk, minusMult);
          }
View Full Code Here

      startTime(TimingSection.ITERATE);
      Vector nextVector = isSymmetric ? corpus.times(currentVector) : corpus.timesSquared(currentVector);
      log.info("{} passes through the corpus so far...", i);
      calculateScaleFactor(nextVector);
      nextVector.assign(new Scale(1 / scaleFactor));
      nextVector.assign(previousVector, new PlusMult(-beta));
      // now orthogonalize
      alpha = currentVector.dot(nextVector);
      nextVector.assign(currentVector, new PlusMult(-alpha));
      endTime(TimingSection.ITERATE);
      startTime(TimingSection.ORTHOGANLIZE);
      orthoganalizeAgainstAllButLast(nextVector, basis);
      endTime(TimingSection.ORTHOGANLIZE);
      // and normalize
      beta = nextVector.norm(2);
      if (outOfRange(beta) || outOfRange(alpha)) {
        log.warn("Lanczos parameters out of range: alpha = {}, beta = {}.  Bailing out early!", alpha, beta);
        break;
      }
      final double b = beta;
      nextVector.assign(new Scale(1 / b));
      basis.assignRow(i, nextVector);
      previousVector = currentVector;
      currentVector = nextVector;
      // save the projections and norms!
      triDiag.set(i - 1, i - 1, alpha);
      if (i < desiredRank - 1) {
        triDiag.set(i - 1, i, beta);
        triDiag.set(i, i - 1, beta);
      }
    }
    startTime(TimingSection.TRIDIAG_DECOMP);

    log.info("Lanczos iteration complete - now to diagonalize the tri-diagonal auxiliary matrix.");
    // at this point, have tridiag all filled out, and basis is all filled out, and orthonormalized
    EigenvalueDecomposition decomp = new EigenvalueDecomposition(triDiag);

    DoubleMatrix2D eigenVects = decomp.getV();
    DoubleMatrix1D eigenVals = decomp.getRealEigenvalues();
    endTime(TimingSection.TRIDIAG_DECOMP);
    startTime(TimingSection.FINAL_EIGEN_CREATE);

    for (int i = 0; i < basis.numRows() - 1; i++) {
      Vector realEigen = new DenseVector(corpus.numCols());
      // the eigenvectors live as columns of V, in reverse order.  Weird but true.
      DoubleMatrix1D ejCol = eigenVects.viewColumn(basis.numRows() - i - 1);
      for (int j = 0; j < ejCol.size(); j++) {
        double d = ejCol.getQuick(j);
        realEigen.assign(basis.getRow(j), new PlusMult(d));
      }
      realEigen = realEigen.normalize();
      eigenVectors.assignRow(i, realEigen);
      log.info("Eigenvector {} found with eigenvalue {}", i, eigenVals.get(i));
      eigenValues.add(eigenVals.get(i));
View Full Code Here

  private static void orthoganalizeAgainstAllButLast(Vector nextVector, Matrix basis) {
    for (int i = 0; i < basis.numRows() - 1; i++) {
      double alpha = 0;
      if(basis.getRow(i) == null || (alpha = nextVector.dot(basis.getRow(i))) == 0) continue;
      nextVector.assign(basis.getRow(i), new PlusMult(-alpha));
    }
  }
View Full Code Here

            numPreviousEigens - 1);
      }
    }
    if (currentState.getActivationDenominatorSquared() == 0 || trainingVectorNorm == 0) {
      if (currentState.getActivationDenominatorSquared() == 0) {
        pseudoEigen.assign(trainingVector, new PlusMult(1));
        currentState.setHelperVector(currentState.currentTrainingProjection().clone());
        double helperNorm = currentState.getHelperVector().norm(2);
        currentState.setActivationDenominatorSquared(trainingVectorNorm * trainingVectorNorm - helperNorm * helperNorm);
      }
      return;
    }
    currentState.setActivationNumerator(pseudoEigen.dot(trainingVector));
    currentState.setActivationNumerator(currentState.getActivationNumerator() - currentState.getHelperVector().dot(currentState.currentTrainingProjection()));

    double activation = currentState.getActivationNumerator() / Math.sqrt(currentState.getActivationDenominatorSquared());
    currentState.setActivationDenominatorSquared(currentState.getActivationDenominatorSquared() + 2 * activation * currentState.getActivationNumerator()
        + (activation * activation) * (trainingVector.getLengthSquared() - currentState.currentTrainingProjection().getLengthSquared()));
    if (numPreviousEigens > 0)
      currentState.getHelperVector().assign(currentState.currentTrainingProjection(), new PlusMult(activation));
    pseudoEigen.assign(trainingVector, new PlusMult(activation));
  }
View Full Code Here

     * Step 1: orthogonalize currentPseudoEigen by subtracting off eigen(i) * helper.get(i)
     * Step 2: zero-out the helper vector because it has already helped.
     */
    for (int i = 0; i < state.getNumEigensProcessed(); i++) {
      Vector previousEigen = previousEigens.getRow(i);
      currentPseudoEigen.assign(previousEigen, new PlusMult(-state.getHelperVector().get(i)));
      state.getHelperVector().set(i, 0);
    }
    if (debug && currentPseudoEigen.norm(2) > 0) {
      for (int i = 0; i < state.getNumEigensProcessed(); i++) {
        Vector previousEigen = previousEigens.getRow(i);
View Full Code Here

    final DoubleMatrix1D[] Crows = new DoubleMatrix1D[m];
    for (int i = m; --i >= 0;) {
      Crows[i] = C.viewRow(i);
    }

    final PlusMult fun = PlusMult.plusMult(0);

    this.elements.forEachPair(
        new IntDoubleProcedure() {
          public boolean apply(int key, double value) {
            int i = key / columns;
            int j = key % columns;
            fun.setMultiplicator(value * alpha);
            if (!transposeA) {
              Crows[i].assign(Brows[j], fun);
            } else {
              Crows[j].assign(Brows[i], fun);
            }
View Full Code Here

    DoubleMatrix1D[] Crows = new DoubleMatrix1D[m];
    for (int i = m; --i >= 0;) {
      Crows[i] = C.viewRow(i);
    }

    PlusMult fun = PlusMult.plusMult(0);

    int[] idx = indexes.elements();
    double[] vals = values.elements();
    for (int i = starts.length - 1; --i >= 0;) {
      int low = starts[i];
      for (int k = starts[i + 1]; --k >= low;) {
        int j = idx[k];
        fun.setMultiplicator(vals[k] * alpha);
        if (!transposeA) {
          Crows[i].assign(Brows[j], fun);
        } else {
          Crows[j].assign(Brows[i], fun);
        }
View Full Code Here

    final DoubleMatrix1D[] Crows = new DoubleMatrix1D[m];
    for (int i = m; --i >= 0;) {
      Crows[i] = C.viewRow(i);
    }

    final PlusMult fun = PlusMult.plusMult(0);

    forEachNonZero(
        new IntIntDoubleFunction() {
          public double apply(int i, int j, double value) {
            fun.setMultiplicator(value * alpha);
            if (!transposeA) {
              Crows[i].assign(Brows[j], fun);
            } else {
              Crows[j].assign(Brows[i], fun);
            }
View Full Code Here

TOP

Related Classes of org.apache.mahout.math.function.PlusMult

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.