Package org.apache.hama.ml.math

Examples of org.apache.hama.ml.math.DenseDoubleMatrix


  DenseDoubleMatrix[] trainByInstance(DoubleVector trainingInstance)
      throws Exception {
    // initialize weight update matrices
    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.layerSizeArray.length - 1];
    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
      weightUpdateMatrices[m] = new DenseDoubleMatrix(
          this.layerSizeArray[m] + 1, this.layerSizeArray[m + 1]);
    }

    if (trainingInstance == null) {
      return weightUpdateMatrices;
    }

    double[] trainingVec = trainingInstance.toArray();
    double[] trainingFeature = Arrays.copyOfRange(trainingVec, 0,
        this.layerSizeArray[0]);
    double[] trainingLabels = Arrays.copyOfRange(trainingVec,
        this.layerSizeArray[0], trainingVec.length);

    DoubleVector trainingFeatureVec = new DenseDoubleVector(trainingFeature);
    List<double[]> outputCache = this.outputInternal(trainingFeatureVec);

    // calculate the delta of output layer
    double[] delta = new double[this.layerSizeArray[this.layerSizeArray.length - 1]];
    double[] outputLayerOutput = outputCache.get(outputCache.size() - 1);
    double[] lastHiddenLayerOutput = outputCache.get(outputCache.size() - 2);

    DenseDoubleMatrix prevWeightUpdateMatrix = this.prevWeightUpdateMatrices[this.prevWeightUpdateMatrices.length - 1];
    for (int j = 0; j < delta.length; ++j) {
      delta[j] = this.costFunction.applyDerivative(trainingLabels[j],
          outputLayerOutput[j]);
      // add regularization term
      if (this.regularization != 0.0) {
        double derivativeRegularization = 0.0;
        DenseDoubleMatrix weightMatrix = this.weightMatrice[this.weightMatrice.length - 1];
        for (int k = 0; k < this.layerSizeArray[this.layerSizeArray.length - 1]; ++k) {
          derivativeRegularization += weightMatrix.get(k, j);
        }
        derivativeRegularization /= this.layerSizeArray[this.layerSizeArray.length - 1];
        delta[j] += this.regularization * derivativeRegularization;
      }

View Full Code Here


    int prevLayerIdx = curLayerIdx - 1;
    double[] delta = new double[this.layerSizeArray[curLayerIdx]];
    double[] curLayerOutput = outputCache.get(curLayerIdx);
    double[] prevLayerOutput = outputCache.get(prevLayerIdx);

    DenseDoubleMatrix prevWeightUpdateMatrix = this.prevWeightUpdateMatrices[curLayerIdx - 1];
    // for each neuron j in nextLayer, calculate the delta
    for (int j = 0; j < delta.length; ++j) {
      // aggregate delta from next layer
      for (int k = 0; k < nextLayerDelta.length; ++k) {
        double weight = this.weightMatrice[curLayerIdx].get(j, k);
View Full Code Here

   */
  private DenseDoubleMatrix[] getZeroWeightMatrices() {
    DenseDoubleMatrix[] weightUpdateCache = new DenseDoubleMatrix[this.layerSizeArray.length - 1];
    // initialize weight matrix each layer
    for (int i = 0; i < weightUpdateCache.length; ++i) {
      weightUpdateCache[i] = new DenseDoubleMatrix(this.layerSizeArray[i] + 1,
          this.layerSizeArray[i + 1]);
    }
    return weightUpdateCache;
  }
View Full Code Here

TOP

Related Classes of org.apache.hama.ml.math.DenseDoubleMatrix

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.