Package org.apache.hama.commons.math

Examples of org.apache.hama.commons.math.DoubleVector$DoubleVectorElement


   * @return Cached output of each layer.
   */
  public List<DoubleVector> getOutputInternal(DoubleVector instanceWithBias) {
    List<DoubleVector> outputCache = new ArrayList<DoubleVector>();
    // fill with instance
    DoubleVector intermediateOutput = instanceWithBias;
    outputCache.add(intermediateOutput);

    for (int i = 0; i < this.layerSizeList.size() - 1; ++i) {
      intermediateOutput = forward(i, intermediateOutput);
      outputCache.add(intermediateOutput);
View Full Code Here


   * @return
   */
  protected DoubleVector forward(int fromLayer, DoubleVector intermediateOutput) {
    DoubleMatrix weightMatrix = this.weightMatrixList.get(fromLayer);

    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(intermediateOutput);
    vec = vec.applyToElements(this.squashingFunctionList.get(fromLayer));

    // add bias
    DoubleVector vecWithBias = new DenseDoubleVector(vec.getDimension() + 1);
    vecWithBias.set(0, 1);
    for (int i = 0; i < vec.getDimension(); ++i) {
      vecWithBias.set(i + 1, vec.get(i));
    }
    return vecWithBias;
  }
View Full Code Here

    this.updateWeightMatrices(updateMatrices);
  }

  @Override
  public DoubleMatrix[] trainByInstance(DoubleVector trainingInstance) {
    DoubleVector transformedVector = this.featureTransformer
        .transform(trainingInstance.sliceUnsafe(this.layerSizeList.get(0) - 1));

    int inputDimension = this.layerSizeList.get(0) - 1;
    int outputDimension;
    DoubleVector inputInstance = null;
    DoubleVector labels = null;
    if (this.learningStyle == LearningStyle.SUPERVISED) {
      outputDimension = this.layerSizeList.get(this.layerSizeList.size() - 1);
      // validate training instance
      Preconditions.checkArgument(
          inputDimension + outputDimension == trainingInstance.getDimension(),
          String
              .format(
                  "The dimension of training instance is %d, but requires %d.",
                  trainingInstance.getDimension(), inputDimension
                      + outputDimension));

      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
      inputInstance.set(0, 1); // add bias
      // get the features from the transformed vector
      for (int i = 0; i < inputDimension; ++i) {
        inputInstance.set(i + 1, transformedVector.get(i));
      }
      // get the labels from the original training instance
      labels = trainingInstance.sliceUnsafe(inputInstance.getDimension() - 1,
          trainingInstance.getDimension() - 1);
    } else if (this.learningStyle == LearningStyle.UNSUPERVISED) {
      // labels are identical to input features
      outputDimension = inputDimension;
      // validate training instance
      Preconditions.checkArgument(inputDimension == trainingInstance
          .getDimension(), String.format(
          "The dimension of training instance is %d, but requires %d.",
          trainingInstance.getDimension(), inputDimension));

      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
      inputInstance.set(0, 1); // add bias
      // get the features from the transformed vector
      for (int i = 0; i < inputDimension; ++i) {
        inputInstance.set(i + 1, transformedVector.get(i));
      }
      // get the labels by copying the transformed vector
      labels = transformedVector.deepCopy();
    }

    List<DoubleVector> internalResults = this.getOutputInternal(inputInstance);
    DoubleVector output = internalResults.get(internalResults.size() - 1);

    // get the training error
    calculateTrainingError(labels,
        output.deepCopy().sliceUnsafe(1, output.getDimension() - 1));

    if (this.trainingMethod.equals(TrainingMethod.GRADIENT_DESCENT)) {
      return this.trainByInstanceGradientDescent(labels, internalResults);
    } else {
      throw new IllegalArgumentException(
View Full Code Here

   * @return The weight update matrices.
   */
  private DoubleMatrix[] trainByInstanceGradientDescent(DoubleVector labels,
      List<DoubleVector> internalResults) {

    DoubleVector output = internalResults.get(internalResults.size() - 1);
    // initialize weight update matrices
    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.weightMatrixList
        .size()];
    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
      weightUpdateMatrices[m] = new DenseDoubleMatrix(this.weightMatrixList
          .get(m).getRowCount(), this.weightMatrixList.get(m).getColumnCount());
    }
    DoubleVector deltaVec = new DenseDoubleVector(
        this.layerSizeList.get(this.layerSizeList.size() - 1));

    DoubleFunction squashingFunction = this.squashingFunctionList
        .get(this.squashingFunctionList.size() - 1);

    DoubleMatrix lastWeightMatrix = this.weightMatrixList
        .get(this.weightMatrixList.size() - 1);
    for (int i = 0; i < deltaVec.getDimension(); ++i) {
      double costFuncDerivative = this.costFunction.applyDerivative(
          labels.get(i), output.get(i + 1));
      // add regularization
      costFuncDerivative += this.regularizationWeight
          * lastWeightMatrix.getRowVector(i).sum();
      deltaVec.set(i, costFuncDerivative);
      deltaVec.set(
          i,
          deltaVec.get(i)
              * squashingFunction.applyDerivative(output.get(i + 1)));
    }

    // start from previous layer of output layer
    for (int layer = this.layerSizeList.size() - 2; layer >= 0; --layer) {
View Full Code Here

    return true;
  }

  private DoubleMatrix convertVectorWritable(VectorWritable value) {
    //format of array: matrix_rank, matrix_converted_to_vector
    DoubleVector vc = value.getVector();
    int matrix_rank = (int) vc.get(0);
    int matrix_size = vc.getLength()-1;
    LinkedList<DoubleVector> slices = new LinkedList<DoubleVector>();
    int offset = 1;
    while (offset < matrix_size) {
      slices.add(vc.slice(offset, matrix_rank));
      offset += matrix_rank;
    }
    DoubleMatrix res = new DenseDoubleMatrix((DoubleVector[])slices.toArray());
    return res;
  }
View Full Code Here

      DenseDoubleMatrix weightUpdateMatrix) {

    // get layer related information
    DoubleFunction squashingFunction = this.squashingFunctionList
        .get(curLayerIdx);
    DoubleVector curLayerOutput = outputCache.get(curLayerIdx);
    DoubleMatrix weightMatrix = this.weightMatrixList.get(curLayerIdx);
    DoubleMatrix prevWeightMatrix = this.prevWeightUpdatesList.get(curLayerIdx);

    // next layer is not output layer, remove the delta of bias neuron
    if (curLayerIdx != this.layerSizeList.size() - 2) {
      nextLayerDelta = nextLayerDelta.slice(1,
          nextLayerDelta.getDimension() - 1);
    }

    DoubleVector delta = weightMatrix.transpose()
        .multiplyVector(nextLayerDelta);
    for (int i = 0; i < delta.getDimension(); ++i) {
      delta.set(
          i,
          delta.get(i)
              * squashingFunction.applyDerivative(curLayerOutput.get(i)));
    }

    // update weights
    for (int i = 0; i < weightUpdateMatrix.getRowCount(); ++i) {
View Full Code Here

  }

  @Override
  protected void calculateTrainingError(DoubleVector labels, DoubleVector output) {
    DoubleVector errors = labels.deepCopy().applyToElements(output,
        this.costFunction);
    this.trainingError = errors.sum();
  }
View Full Code Here

    VectorWritable usr2 = this.modelUserFactorizedValues.get(Long.valueOf(user2));
    if (usr1 == null || usr2 == null) {
      return Double.MAX_VALUE;
    }
   
    DoubleVector usr1Vector = usr1.getVector();
    DoubleVector usr2Vector = usr2.getVector();
   
    // Euclidean distance
    return Math.pow( usr1Vector
                    .subtract(usr2Vector)
                    .applyToElements(new SquareVectorFunction())
View Full Code Here

    VectorWritable itm2 = this.modelUserFactorizedValues.get(Long.valueOf(item2));
    if (itm1 == null || itm2 == null) {
      return Double.MAX_VALUE;
    }
   
    DoubleVector itm1Vector = itm1.getVector();
    DoubleVector itm2Vector = itm2.getVector();
   
    // Euclidean distance
    return Math.pow( itm1Vector
                      .subtract(itm2Vector)
                      .applyToElements(new SquareVectorFunction())
View Full Code Here

    int rank = e.user.getVector().getLength();
    if (zeroVector == null) {
      zeroVector = new DenseDoubleVector(rank, 0);
    }
    // below vectors are all size of MATRIX_RANK
    DoubleVector vl_yb_item = zeroVector;
    DoubleVector ml_xa_user = zeroVector;
    DoubleVector bbl_vl_yb = null;
    DoubleVector aal_ml_xa = null;

    boolean isAvailableUserFeature = (e.userFeatures!=null);
    boolean isAvailableItemFeature = (e.itemFeatures!=null);

    if (isAvailableItemFeature) {
      DoubleVector yb = e.itemFeatures.getVector();
      vl_yb_item = e.itemFeatureFactorized.multiplyVector(yb);
    }
   
    if (isAvailableUserFeature) {
      DoubleVector xa = e.userFeatures.getVector();
      ml_xa_user = e.userFeatureFactorized.multiplyVector(xa);
    }
   
    bbl_vl_yb = e.item.getVector().add(vl_yb_item);
    aal_ml_xa = e.user.getVector().add(ml_xa_user);
   
    //calculated score
    double calculatedScore = aal_ml_xa.multiply(bbl_vl_yb).sum();
    double expectedScore = e.expectedScore.get();
    double scoreDifference = 0.0;
    scoreDifference = expectedScore - calculatedScore;
   
    // β_bl ← β_bl + 2τ * (α_al + μ_l: * x_a:)(r − R)
    // items ← item + itemFactorization (will be used later)
    DoubleVector itemFactorization = aal_ml_xa.multiply(2*TETTA*scoreDifference);
    DoubleVector items = e.item.getVector().add( itemFactorization );
    res.itemFactorized = new VectorWritable(items);
   
    // α_al ← α_al + 2τ * (β_bl + ν_l: * y_b:)(r − R)
    // users ← user + userFactorization (will be used later)
    DoubleVector userFactorization = bbl_vl_yb.multiply(2*TETTA*scoreDifference);
    DoubleVector users = e.user.getVector().add( userFactorization );
    res.userFactorized = new VectorWritable(users);

    // for d = 1 to D do:
    //   ν_ld ← ν_ld + 2τ * y_bd (α_al + μ_l: * x_a:)(r − R)
    // for c = 1 to C do:
View Full Code Here

TOP

Related Classes of org.apache.hama.commons.math.DoubleVector$DoubleVectorElement

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.