Package org.apache.hama.commons.math

Examples of org.apache.hama.commons.math.DoubleMatrix


    for (int i = 0; i < inputInstance.getDimension(); ++i) {
      internalInstance.set(i + 1, inputInstance.get(i));
    }
    DoubleFunction squashingFunction = model
        .getSquashingFunction(inputLayer);
    DoubleMatrix weightMatrix = null;
    if (inputLayer == 0) {
      weightMatrix = this.getEncodeWeightMatrix();
    } else {
      weightMatrix = this.getDecodeWeightMatrix();
    }
    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(internalInstance);
    vec = vec.applyToElements(squashingFunction);
    return vec;
  }
View Full Code Here


      int sizePrevLayer = this.layerSizeList.get(layerIdx - 1);
      // row count equals to size of current size and column count equals to
      // size of previous layer
      int row = isFinalLayer ? size : size - 1;
      int col = sizePrevLayer;
      DoubleMatrix weightMatrix = new DenseDoubleMatrix(row, col);
      // initialize weights
      weightMatrix.applyToElements(new DoubleFunction() {
        @Override
        public double apply(double value) {
          return RandomUtils.nextDouble() - 0.5;
        }
View Full Code Here

   *
   * @param matrices
   */
  public void updateWeightMatrices(DoubleMatrix[] matrices) {
    for (int i = 0; i < matrices.length; ++i) {
      DoubleMatrix matrix = this.weightMatrixList.get(i);
      this.weightMatrixList.set(i, matrix.add(matrices[i]));
    }
  }
View Full Code Here

    // read weights and construct matrices of previous updates
    int numOfMatrices = input.readInt();
    this.weightMatrixList = Lists.newArrayList();
    this.prevWeightUpdatesList = Lists.newArrayList();
    for (int i = 0; i < numOfMatrices; ++i) {
      DoubleMatrix matrix = MatrixWritable.read(input);
      this.weightMatrixList.add(matrix);
      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(
          matrix.getRowCount(), matrix.getColumnCount()));
    }

  }
View Full Code Here

   * @param fromLayer The index of the previous layer.
   * @param intermediateOutput The intermediateOutput of previous layer.
   * @return
   */
  protected DoubleVector forward(int fromLayer, DoubleVector intermediateOutput) {
    DoubleMatrix weightMatrix = this.weightMatrixList.get(fromLayer);

    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(intermediateOutput);
    vec = vec.applyToElements(this.squashingFunctionList.get(fromLayer));

    // add bias
    DoubleVector vecWithBias = new DenseDoubleVector(vec.getDimension() + 1);
    vecWithBias.set(0, 1);
View Full Code Here

        this.layerSizeList.get(this.layerSizeList.size() - 1));

    DoubleFunction squashingFunction = this.squashingFunctionList
        .get(this.squashingFunctionList.size() - 1);

    DoubleMatrix lastWeightMatrix = this.weightMatrixList
        .get(this.weightMatrixList.size() - 1);
    for (int i = 0; i < deltaVec.getDimension(); ++i) {
      double costFuncDerivative = this.costFunction.applyDerivative(
          labels.get(i), output.get(i + 1));
      // add regularization
      costFuncDerivative += this.regularizationWeight
          * lastWeightMatrix.getRowVector(i).sum();
      deltaVec.set(i, costFuncDerivative);
      deltaVec.set(
          i,
          deltaVec.get(i)
              * squashingFunction.applyDerivative(output.get(i + 1)));
View Full Code Here

    // get layer related information
    DoubleFunction squashingFunction = this.squashingFunctionList
        .get(curLayerIdx);
    DoubleVector curLayerOutput = outputCache.get(curLayerIdx);
    DoubleMatrix weightMatrix = this.weightMatrixList.get(curLayerIdx);
    DoubleMatrix prevWeightMatrix = this.prevWeightUpdatesList.get(curLayerIdx);

    // next layer is not output layer, remove the delta of bias neuron
    if (curLayerIdx != this.layerSizeList.size() - 2) {
      nextLayerDelta = nextLayerDelta.slice(1,
          nextLayerDelta.getDimension() - 1);
    }

    DoubleVector delta = weightMatrix.transpose()
        .multiplyVector(nextLayerDelta);
    for (int i = 0; i < delta.getDimension(); ++i) {
      delta.set(
          i,
          delta.get(i)
              * squashingFunction.applyDerivative(curLayerOutput.get(i)));
    }

    // update weights
    for (int i = 0; i < weightUpdateMatrix.getRowCount(); ++i) {
      for (int j = 0; j < weightUpdateMatrix.getColumnCount(); ++j) {
        weightUpdateMatrix.set(i, j,
            -learningRate * nextLayerDelta.get(i) * curLayerOutput.get(j)
                + this.momentumWeight * prevWeightMatrix.get(i, j));
      }
    }

    return delta;
  }
View Full Code Here

    int offset = 1;
    while (offset < matrix_size) {
      slices.add(vc.slice(offset, matrix_rank));
      offset += matrix_rank;
    }
    DoubleMatrix res = new DenseDoubleMatrix((DoubleVector[])slices.toArray());
    return res;
  }
View Full Code Here

    //
    // ν_ld, μ_lc (V) is type of matrix,
    // but 2τ * y_bd (α_al + μ_l: * x_a:)(r − R) (M later) will be type of vector
    // in order to add vector values to matrix
    // we create matrix with MMMMM and then transpose it.
    DoubleMatrix tmp = null;
    if (isAvailableItemFeature) {
      DoubleVector[] Mtransposed = new DenseDoubleVector[rank];
      for (int i = 0; i<rank; i++) {
        Mtransposed[i] = e.itemFeatureFactorized.getRowVector(i).multiply(aal_ml_xa.get(i));
      }
      tmp = new DenseDoubleMatrix(Mtransposed);
      tmp = tmp.multiply(2*TETTA*scoreDifference);
      res.itemFeatureFactorized = e.itemFeatureFactorized.add(tmp);
    }

    if (isAvailableUserFeature) {
      DoubleVector[] Mtransposed = new DenseDoubleVector[rank];
      for (int i = 0; i<rank; i++) {
        Mtransposed[i] = e.userFeatureFactorized.getRowVector(i).multiply(bbl_vl_yb.get(i));
      }
      tmp = new DenseDoubleMatrix(Mtransposed);
      tmp = tmp.multiply(2*TETTA*scoreDifference);
      res.userFeatureFactorized = e.userFeatureFactorized.add(tmp);
    }
    return res;
  }
View Full Code Here

    String master = peer.getPeerName(peer.getNumPeers()/2);
    peer.send(master, msg);
    peer.sync();

    // normalize
    DoubleMatrix res = null;
    if (peer.getPeerName().equals(master)) {
      res = new DenseDoubleMatrix(featureMatrix.getRowCount(),
                                  featureMatrix.getColumnCount(), 0);
      int incomingMsgCount = 0;
      while ( (msg = peer.getCurrentMessage()) != null) {
        MatrixWritable tmp = (MatrixWritable) msg.get(msgFeatureMatrix);
        res.add(tmp.getMatrix());
        incomingMsgCount++;
      }
      res.divide(incomingMsgCount);
    }

    if (broadcast) {
      if (peer.getPeerName().equals(master)) {
        // broadcast to all
View Full Code Here

TOP

Related Classes of org.apache.hama.commons.math.DoubleMatrix

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.