Package org.apache.commons.math3.distribution

Examples of org.apache.commons.math3.distribution.NormalDistribution


            x3 += step3;
            assertEquals(x3, r.get("v3").get("value").asDouble(), 0);
        }

        // now compare against reference distributions to test accuracy of the observed step distributions
        NormalDistribution normalDistribution = new NormalDistribution();
        GammaDistribution gd1 = new GammaDistribution(0.2, 5);
        GammaDistribution gd2 = new GammaDistribution(1, 1);
        TDistribution tDistribution = new TDistribution(2);
        for (double q : new double[]{0.001, 0.01, 0.1, 0.2, 0.5, 0.8, 0.9, 0.99, 0.99}) {
            double uG1 = gd1.cumulativeProbability(tdG1.quantile(q));
            assertEquals(q, uG1, (1 - q) * q * 10e-2);

            double uG2 = gd2.cumulativeProbability(tdG2.quantile(q));
            assertEquals(q, uG2, (1 - q) * q * 10e-2);

            double u1 = normalDistribution.cumulativeProbability(td1.quantile(q));
            assertEquals(q, u1, (1 - q) * q * 10e-2);

            double u2 = normalDistribution.cumulativeProbability(td2.quantile(q) / 2);
            assertEquals(q, u2, (1 - q) * q * 10e-2);

            double u3 = tDistribution.cumulativeProbability(td3.quantile(q));
            assertEquals(q, u3, (1 - q) * q * 10e-2);
        }
View Full Code Here


        final Multiset<String> k2 = HashMultiset.create();
        for (int i = 0; i < 50000; i++) {
            k2.add(x2.sample());
        }

        final NormalDistribution normal = new NormalDistribution();
        List<Double> scores = Ordering.natural().sortedCopy(Iterables.transform(k1.elementSet(),
                new Function<String, Double>() {
                    public Double apply(String s) {
                        return normal.cumulativeProbability(LogLikelihood.rootLogLikelihoodRatio(k1.count(s), 50000 - k1.count(s), k2.count(s), 50000 - k2.count(s)));
                    }
                }));
        int n = scores.size();
//        System.out.printf("%.5f, %.5f, %.5f, %.5f, %.5f, %.5f, %.5f", scores.get(0), scores.get((int) (0.05*n)), scores.get(n / 4), scores.get(n / 2), scores.get(3 * n / 4), scores.get((int) (0.95 * n)), scores.get(n - 1));
        int i = 0;
View Full Code Here

  List<String> words = Lists.newArrayList();
  List<String> urls = Lists.newArrayList();
  Configuration conf = getConf();

  int averageOutlinksPerUrl = conf.getInt("average.outlinks", AVERAGE_OUTLINKS);
  norm = new NormalDistribution(averageOutlinksPerUrl, averageOutlinksPerUrl / 2);

  int wordCount = 0;
  File vocabFile = new File(conf.get("vocab.path", INTPUT_VOCAB_FILENAME));
  Scanner wordScanner = null;
  try {
View Full Code Here

    size = envSize;
    std = relativeStd;
    bin = binSize;

    // generate discrete approximation of Normal distribution
    final NormalDistribution nd = new NormalDistribution(size / 2d, size * std);
    final int numBins = DoubleMath
        .roundToInt(envSize / binSize, RoundingMode.UP);
    final ImmutableSortedSet.Builder<Double> b = ImmutableSortedSet
        .naturalOrder();
    b.add(0d);
    for (int i = 1; i < numBins; i++) {
      b.add(nd.cumulativeProbability(i * binSize));
    }
    b.add(1d);
    probabilities = b.build();
    probabilitiesList = probabilities.asList();
  }
View Full Code Here

       * a standard deviation of about 0.01. Using larger random values can speed the initial learning, but
       * it may lead to a slightly worse final model. Care should be taken to ensure that the initial weight
       * values do not allow typical visible vectors to drive the hidden unit probabilities very close to 1 or 0
       * as this significantly slows the learning.
       */
            NormalDistribution u = new NormalDistribution(rng,0,.01,NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);

            //this.connectionWeights = Matrix.zeros(nVisible,nHidden);
            this.connectionWeights = new DenseMatrix( nVisible, nHidden );
            this.connectionWeights.assign(0);

            for(int i = 0; i < this.connectionWeights.numRows(); i++) {
                for(int j = 0; j < this.connectionWeights.numCols(); j++) {
                    this.connectionWeights.set(i,j,u.sample());
                }
            }


        } else {
View Full Code Here

     * as this significantly slows the learning.
     */

        if (this.connectionWeights == null) {

            NormalDistribution u = new NormalDistribution( this.randNumGenerator, 0, .01, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY );

            this.connectionWeights = new DenseMatrix( this.numberVisibleNeurons, this.numberHiddenNeurons );// Matrix.zeros(nVisible,nHidden);
            this.connectionWeights.assign(0.0);

            //  for(int i = 0; i < this.W.rows; i++)
            //    this.W.putRow(i,new Matrix(u.sample(this.W.columns)));

            for ( int i = 0; i < this.connectionWeights.numRows(); i++ ) {

                // u.sample( "number of cols in weights" )

                double[] rowSamples = u.sample( this.connectionWeights.numCols() );

                this.connectionWeights.viewRow(i).assign(rowSamples);


            }
View Full Code Here

     * a standard deviation of about 0.01. Using larger random values can speed the initial learning, but
     * it may lead to a slightly worse final model. Care should be taken to ensure that the initial weight
     * values do not allow typical visible vectors to drive the hidden unit probabilities very close to 1 or 0
     * as this significantly slows the learning.
     */
        NormalDistribution u = new NormalDistribution( this.randNumGenerator, 0, .01, fanIn() );

        Matrix weights = new DenseMatrix( this.numberVisibleNeurons, this.numberHiddenNeurons ); //Matrix.zeros(nVisible,nHidden);
        weights.assign(0.0);

        for (int i = 0; i < this.connectionWeights.numRows(); i++) {
View Full Code Here

   
    double a = 1.0 / (double) this.numberVisibleNeurons;
   
   
   
    NormalDistribution u = new NormalDistribution(this.randNumGenerator,0,.01,NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);

    this.connectionWeights = new DenseMatrix( this.numberVisibleNeurons, this.numberHiddenNeurons );
    this.connectionWeights.assign(0.0);
   
    for (int r = 0; r < this.connectionWeights.numRows(); r++) {
     
      for(int c = 0; c < this.connectionWeights.numCols(); c++) {
     
        double init_weight = u.sample( );
       
      //  System.out.println("w: " + init_weight);
       
        this.connectionWeights.setQuick( r, c, init_weight );
     
View Full Code Here

    } else {
      this.rndNumGenerator = rndGen;
    }
   

    NormalDistribution u = new NormalDistribution( this.rndNumGenerator, 0, .01, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY );

   
    // init the connection weights
    this.connectionWeights = new DenseMatrix( this.neuronCountPreviousLayer, this.neuronCount );
    this.connectionWeights.assign(0.0);

    for (int r = 0; r < this.connectionWeights.numRows(); r++) {
     
      for(int c = 0; c < this.connectionWeights.numCols(); c++) {
     
        this.connectionWeights.setQuick( r, c, u.sample() );
     
      }

    } 
   
View Full Code Here

    private static String getConfidenceInterval(SummaryStatistics statistics, double confidence)
    {
  StringBuilder sb = new StringBuilder("Confidence interval (" + confidence * 100 + "%): ");

  //it is possible to change to T-distribution and provide statistics.getN()-1 freedom degree
  double sampleError = new NormalDistribution(0, 1).inverseCumulativeProbability(confidence) * (statistics.getStandardDeviation() / Math.sqrt(statistics.getN()));
  sb.append("(" + (statistics.getMean() - sampleError) + " <= " + statistics.getMean() + " <= " + (statistics.getMean() + sampleError) + ")");

  return sb.toString();
    }
View Full Code Here

TOP

Related Classes of org.apache.commons.math3.distribution.NormalDistribution

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.