Package org.apache.mahout.math.function

Examples of org.apache.mahout.math.function.PlusMult


    Vector w = new DenseVector(columns);
    for (int i = 0; i < rows; i++) {
      Vector xi = viewRow(i);
      double d = xi.dot(v);
      if (d != 0.0) {
        w.assign(xi, new PlusMult(d));
      }

    }
    return w;
  }
View Full Code Here


    if (numPreviousEigens > 0 && currentState.isFirstPass()) {
      updateTrainingProjectionsVector(currentState, trainingVector, numPreviousEigens - 1);
    }
    if (currentState.getActivationDenominatorSquared() == 0 || trainingVectorNorm == 0) {
      if (currentState.getActivationDenominatorSquared() == 0) {
        pseudoEigen.assign(trainingVector, new PlusMult(1));
        currentState.setHelperVector(currentState.currentTrainingProjection().clone());
        double helperNorm = currentState.getHelperVector().norm(2);
        currentState.setActivationDenominatorSquared(trainingVectorNorm * trainingVectorNorm - helperNorm * helperNorm);
      }
      return;
    }
    currentState.setActivationNumerator(pseudoEigen.dot(trainingVector));
    currentState.setActivationNumerator(
        currentState.getActivationNumerator()
            - currentState.getHelperVector().dot(currentState.currentTrainingProjection()));

    double activation = currentState.getActivationNumerator()
        / Math.sqrt(currentState.getActivationDenominatorSquared());
    currentState.setActivationDenominatorSquared(
        currentState.getActivationDenominatorSquared()
            + 2 * activation * currentState.getActivationNumerator()
            + activation * activation
                * (trainingVector.getLengthSquared() - currentState.currentTrainingProjection().getLengthSquared()));
    if (numPreviousEigens > 0) {
      currentState.getHelperVector().assign(currentState.currentTrainingProjection(), new PlusMult(activation));
    }
    pseudoEigen.assign(trainingVector, new PlusMult(activation));
  }
View Full Code Here

      if (state.getScaleFactor() <= 0) {
        state.setScaleFactor(calculateScaleFactor(nextVector));
      }
      nextVector.assign(new Scale(1.0 / state.getScaleFactor()));
      if (previousVector != null) {
        nextVector.assign(previousVector, new PlusMult(-beta));
      }
      // now orthogonalize
      double alpha = currentVector.dot(nextVector);
      nextVector.assign(currentVector, new PlusMult(-alpha));
      endTime(TimingSection.ITERATE);
      startTime(TimingSection.ORTHOGANLIZE);
      orthoganalizeAgainstAllButLast(nextVector, state);
      endTime(TimingSection.ORTHOGANLIZE);
      // and normalize
      beta = nextVector.norm(2);
      if (outOfRange(beta) || outOfRange(alpha)) {
        log.warn("Lanczos parameters out of range: alpha = {}, beta = {}.  Bailing out early!",
            alpha, beta);
        break;
      }
      nextVector.assign(new Scale(1 / beta));
      state.setBasisVector(i, nextVector);
      previousVector = currentVector;
      currentVector = nextVector;
      // save the projections and norms!
      triDiag.set(i - 1, i - 1, alpha);
      if (i < desiredRank - 1) {
        triDiag.set(i - 1, i, beta);
        triDiag.set(i, i - 1, beta);
      }
      state.setIterationNumber(++i);
    }
    startTime(TimingSection.TRIDIAG_DECOMP);

    log.info("Lanczos iteration complete - now to diagonalize the tri-diagonal auxiliary matrix.");
    // at this point, have tridiag all filled out, and basis is all filled out, and orthonormalized
    EigenDecomposition decomp = new EigenDecomposition(triDiag);

    Matrix eigenVects = decomp.getV();
    Vector eigenVals = decomp.getRealEigenvalues();
    endTime(TimingSection.TRIDIAG_DECOMP);
    startTime(TimingSection.FINAL_EIGEN_CREATE);
    for (int row = 0; row < i; row++) {
      Vector realEigen = null;
      // the eigenvectors live as columns of V, in reverse order.  Weird but true.
      Vector ejCol = eigenVects.viewColumn(i - row - 1);
      int size = Math.min(ejCol.size(), state.getBasisSize());
      for (int j = 0; j < size; j++) {
        double d = ejCol.get(j);
        Vector rowJ = state.getBasisVector(j);
        if (realEigen == null) {
          realEigen = rowJ.like();
        }
        realEigen.assign(rowJ, new PlusMult(d));
      }

      Preconditions.checkState(realEigen != null);
      assert realEigen != null;
View Full Code Here

      Vector basisVector = state.getBasisVector(i);
      double alpha;
      if (basisVector == null || (alpha = nextVector.dot(basisVector)) == 0.0) {
        continue;
      }
      nextVector.assign(basisVector, new PlusMult(-alpha));
    }
  }
View Full Code Here

     * Step 1: orthogonalize currentPseudoEigen by subtracting off eigen(i) * helper.get(i)
     * Step 2: zero-out the helper vector because it has already helped.
     */
    for (int i = 0; i < state.getNumEigensProcessed(); i++) {
      Vector previousEigen = previousEigens.viewRow(i);
      currentPseudoEigen.assign(previousEigen, new PlusMult(-state.getHelperVector().get(i)));
      state.getHelperVector().set(i, 0);
    }
    if (DEBUG && currentPseudoEigen.norm(2) > 0) {
      for (int i = 0; i < state.getNumEigensProcessed(); i++) {
        Vector previousEigen = previousEigens.viewRow(i);
View Full Code Here

      if(state.getScaleFactor() <= 0) {
        state.setScaleFactor(calculateScaleFactor(nextVector));
      }
      nextVector.assign(new Scale(1.0 / state.getScaleFactor()));
      if(previousVector != null) {
        nextVector.assign(previousVector, new PlusMult(-beta));
      }
      // now orthogonalize
      double alpha = currentVector.dot(nextVector);
      nextVector.assign(currentVector, new PlusMult(-alpha));
      endTime(TimingSection.ITERATE);
      startTime(TimingSection.ORTHOGANLIZE);
      orthoganalizeAgainstAllButLast(nextVector, state);
      endTime(TimingSection.ORTHOGANLIZE);
      // and normalize
      beta = nextVector.norm(2);
      if (outOfRange(beta) || outOfRange(alpha)) {
        log.warn("Lanczos parameters out of range: alpha = {}, beta = {}.  Bailing out early!",
            alpha, beta);
        break;
      }
      nextVector.assign(new Scale(1 / beta));
      state.setBasisVector(i, nextVector);
      previousVector = currentVector;
      currentVector = nextVector;
      // save the projections and norms!
      triDiag.set(i - 1, i - 1, alpha);
      if (i < desiredRank - 1) {
        triDiag.set(i - 1, i, beta);
        triDiag.set(i, i - 1, beta);
      }
      state.setIterationNumber(++i);
    }
    startTime(TimingSection.TRIDIAG_DECOMP);

    log.info("Lanczos iteration complete - now to diagonalize the tri-diagonal auxiliary matrix.");
    // at this point, have tridiag all filled out, and basis is all filled out, and orthonormalized
    EigenvalueDecomposition decomp = new EigenvalueDecomposition(triDiag);

    DoubleMatrix2D eigenVects = decomp.getV();
    DoubleMatrix1D eigenVals = decomp.getRealEigenvalues();
    endTime(TimingSection.TRIDIAG_DECOMP);
    startTime(TimingSection.FINAL_EIGEN_CREATE);
    for (int row = 0; row < i; row++) {
      Vector realEigen = null;
      // the eigenvectors live as columns of V, in reverse order.  Weird but true.
      DoubleMatrix1D ejCol = eigenVects.viewColumn(i - row - 1);
      int size = ejCol.size();
      for (int j = 0; j < size; j++) {
        double d = ejCol.get(j);
        Vector rowJ = state.getBasisVector(j);
        if(realEigen == null) {
          realEigen = rowJ.like();
        }
        realEigen.assign(rowJ, new PlusMult(d));
      }
      realEigen = realEigen.normalize();
      state.setRightSingularVector(row, realEigen);
      double e = eigenVals.get(row) * state.getScaleFactor();
      if(!isSymmetric) {
View Full Code Here

      Vector basisVector = state.getBasisVector(i);
      double alpha;
      if (basisVector == null || (alpha = nextVector.dot(basisVector)) == 0.0) {
        continue;
      }
      nextVector.assign(basisVector, new PlusMult(-alpha));
    }
  }
View Full Code Here

    final DoubleMatrix1D[] Crows = new DoubleMatrix1D[m];
    for (int i = m; --i >= 0;) {
      Crows[i] = C.viewRow(i);
    }

    final PlusMult fun = PlusMult.plusMult(0);

    this.elements.forEachPair(
        new IntDoubleProcedure() {
          @Override
          public boolean apply(int key, double value) {
            int i = key / columns;
            int j = key % columns;
            fun.setMultiplicator(value * alpha);
            if (transposeA) {
              Crows[j].assign(Brows[i], fun);
            } else {
              Crows[i].assign(Brows[j], fun);
            }
View Full Code Here

            numPreviousEigens - 1);
      }
    }
    if (currentState.getActivationDenominatorSquared() == 0 || trainingVectorNorm == 0) {
      if (currentState.getActivationDenominatorSquared() == 0) {
        pseudoEigen.assign(trainingVector, new PlusMult(1));
        currentState.setHelperVector(currentState.currentTrainingProjection().clone());
        double helperNorm = currentState.getHelperVector().norm(2);
        currentState.setActivationDenominatorSquared(trainingVectorNorm * trainingVectorNorm - helperNorm * helperNorm);
      }
      return;
    }
    currentState.setActivationNumerator(pseudoEigen.dot(trainingVector));
    currentState.setActivationNumerator(
        currentState.getActivationNumerator()
            - currentState.getHelperVector().dot(currentState.currentTrainingProjection()));

    double activation = currentState.getActivationNumerator() / Math.sqrt(currentState.getActivationDenominatorSquared());
    currentState.setActivationDenominatorSquared(
        currentState.getActivationDenominatorSquared()
            + 2 * activation * currentState.getActivationNumerator()
            + activation * activation * (trainingVector.getLengthSquared() - currentState.currentTrainingProjection().getLengthSquared()));
    if (numPreviousEigens > 0) {
      currentState.getHelperVector().assign(currentState.currentTrainingProjection(), new PlusMult(activation));
    }
    pseudoEigen.assign(trainingVector, new PlusMult(activation));
  }
View Full Code Here

     * Step 1: orthogonalize currentPseudoEigen by subtracting off eigen(i) * helper.get(i)
     * Step 2: zero-out the helper vector because it has already helped.
     */
    for (int i = 0; i < state.getNumEigensProcessed(); i++) {
      Vector previousEigen = previousEigens.getRow(i);
      currentPseudoEigen.assign(previousEigen, new PlusMult(-state.getHelperVector().get(i)));
      state.getHelperVector().set(i, 0);
    }
    if (DEBUG && currentPseudoEigen.norm(2) > 0) {
      for (int i = 0; i < state.getNumEigensProcessed(); i++) {
        Vector previousEigen = previousEigens.getRow(i);
View Full Code Here

TOP

Related Classes of org.apache.mahout.math.function.PlusMult

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.