Package org.apache.mahout.math

Examples of org.apache.mahout.math.Matrix.divide()


            for (int l = 0; l < this.numberLayers; l++) {

                Matrix add = deltas.get( l ).getFirst().divide( this.inputTrainingData.numRows() ).times( lr );
                if(normalizeByInputRows)
                    add = add.divide( this.inputTrainingData.numRows() );

                if (useRegularization) {

                    //add = add.times( this.preTrainingLayers[ l ].getConnectionWeights().times( l2 ) );
                    add = MatrixUtils.elementWiseMultiplication(add, this.preTrainingLayers[ l ].getConnectionWeights().times( l2 ));
View Full Code Here


                this.hiddenLayers[ l ].connectionWeights = this.preTrainingLayers[l].getConnectionWeights();
                Matrix deltaColumnSums = MatrixUtils.columnSums( deltas.get( l + 1 ).getSecond() );

                // TODO: check this, needs to happen in place?
                deltaColumnSums = deltaColumnSums.divide( this.inputTrainingData.numRows() );

                // TODO: check this, needs to happen in place?
                //this.preTrainingLayers[ l ].getHiddenBias().subi( deltaColumnSums.times( lr ) );
                Matrix hbiasMinus = this.preTrainingLayers[ l ].getHiddenBias().minus( deltaColumnSums.times( lr ) );
                this.preTrainingLayers[ l ].sethBias(hbiasMinus);
View Full Code Here

        for (int l = 0; l < this.numberLayers; l++) {

            Matrix add = deltas.get( l ).getFirst().transpose();
            if(normalizeByInputRows)
                add = add.divide( this.inputTrainingData.numRows() );
            //get the gradient
            if (this.useAdaGrad ) {
                add = MatrixUtils.elementWiseMultiplication(add,this.preTrainingLayers[ l ].getAdaGrad().getLearningRates( add ));

            } else {
View Full Code Here

            biasGradient = MatrixUtils.elementWiseMultiplication(biasGradient,logisticRegressionLayer.getBiasAdaGrad().getLearningRates(biasGradient));
        else
            biasGradient = biasGradient.times(lr);

        if(normalizeByInputRows)
            biasGradient = biasGradient.divide(inputTrainingData.numRows());


        if(useAdaGrad)
            logLayerGradient = MatrixUtils.elementWiseMultiplication(logLayerGradient,logisticRegressionLayer.getBiasAdaGrad().getLearningRates(logLayerGradient));
View Full Code Here

  //    dy.divi(input.rows);
   
    if(normalizeByInputRows) {
   
    //   weight decay
      dy = dy.divide( this.input.numRows() );
     
    }
   
   
    //Matrix wGradient = input.transpose().mmul(dy).mul(lr);
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.