Package org.apache.mahout.math

Examples of org.apache.mahout.math.Matrix.times()


            //  deltaColumnSums.divi(input.rows);
            if(normalizeByInputRows)
                MatrixUtils.divi( deltaColumnSums, this.inputTrainingData.numRows() );

            //  getLayers()[l].gethBias().addi(deltaColumnSums.mul(lr));
            MatrixUtils.subi( this.preTrainingLayers[ l ].getHiddenBias(), deltaColumnSums.times( lr ) );
            //  getSigmoidLayers()[l].setB(getLayers()[l].gethBias());
            this.hiddenLayers[ l ].biasTerms = this.preTrainingLayers[ l ].getHiddenBias();

        }
View Full Code Here


        Matrix logLayerGradient = deltas.get(numberLayers).getFirst().transpose();
        Matrix biasGradient = MatrixUtils.columnSums(deltas.get(numberLayers).getSecond());


        if(momentum != 0)
            biasGradient = biasGradient.times(momentum);

        if(useAdaGrad)
            biasGradient = MatrixUtils.elementWiseMultiplication(biasGradient,logisticRegressionLayer.getBiasAdaGrad().getLearningRates(biasGradient));
        else
            biasGradient = biasGradient.times(lr);
View Full Code Here

            biasGradient = biasGradient.times(momentum);

        if(useAdaGrad)
            biasGradient = MatrixUtils.elementWiseMultiplication(biasGradient,logisticRegressionLayer.getBiasAdaGrad().getLearningRates(biasGradient));
        else
            biasGradient = biasGradient.times(lr);

        if(normalizeByInputRows)
            biasGradient = biasGradient.divide(inputTrainingData.numRows());

View Full Code Here

    //Matrix wGradient = input.transpose().mmul(dy).mul(lr);
    Matrix wGradient = input.transpose().times( dy ); //.times( lr );
    if ( this.useAdaGrad ) {
     
      // wGradient.muli(adaGrad.getLearningRates(wGradient));
      wGradient = wGradient.times( this.adaLearningRates.getLearningRates(wGradient));
     
    } else {
     
      // wGradient.muli(lr);
      wGradient = wGradient.times(lr);
View Full Code Here

      wGradient = wGradient.times( this.adaLearningRates.getLearningRates(wGradient));
     
    } else {
     
      // wGradient.muli(lr);
      wGradient = wGradient.times(lr);
     
    }

    if (this.useAdaGrad) {
     
View Full Code Here

        Vector actualWeights = new DenseVector(new double[]{
                1, 0.25, -0.25, 0, 0,
                0, 0, 0, 0, -1});

        Vector probs = recipes.times(actualWeights);

        ContextualBayesBandit banditry = new ContextualBayesBandit(recipes);

        for (int i = 0; i < 1000; i++) {
            int k = banditry.sample();
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.