Package gov.sandia.cognition.statistics.distribution

Examples of gov.sandia.cognition.statistics.distribution.NormalInverseWishartDistribution


          new double[particle.getDistributionCount() + 1];

      /*
       * Evaluate the log likelihood for a new component.
       */
      final NormalInverseWishartDistribution centeringDist = particle.getCenteringDistribution();
      final double newComponentPriorPredDof =
          2d * centeringDist.getInverseWishart().getDegreesOfFreedom()
              - centeringDist.getInputDimensionality() + 1d;
      final double kappa = centeringDist.getCovarianceDivisor();
      final Matrix newComponentPriorPredPrecision =
          centeringDist.getInverseWishart().getInverseScale()
              .scale(2d * (kappa + 1d) / (kappa * newComponentPriorPredDof));
      final MultivariateStudentTDistribution newComponentPriorPred =
          new MultivariateStudentTDistribution(newComponentPriorPredDof, centeringDist
              .getGaussian().getMean(), newComponentPriorPredPrecision.inverse());

      final double newComponentLogLikelihood =
          Math.log(particle.getAlpha()) - Math.log(particle.getAlpha() + particle.getIndex())
              + newComponentPriorPred.getProbabilityFunction().logEvaluate(observation);
      componentPriorPredLogLikelihoods[0] = newComponentLogLikelihood;

      double totalLogLikelihood = newComponentLogLikelihood;

      /*
       * Now, evaluate log likelihood for the current mixture components
       */
      int n = 0;
      for (final MultivariateGaussian component : particle.getDistributions()) {

        final double componentN = particle.getCounts().getElement(n);
        final double componentPriorPredDof =
            2d * centeringDist.getInverseWishart().getDegreesOfFreedom() + componentN
                - centeringDist.getInputDimensionality() + 1d;
        final Vector componentPriorPredMean =
            centeringDist.getGaussian().getMean().scale(kappa)
                .plus(component.getMean().scale(componentN)).scale(1d / (kappa + componentN));


        final Vector componentCenteringMeanDiff =
            centeringDist.getGaussian().getMean().minus(component.getMean());
        final Matrix componentD =
            component.getCovariance().plus(
                componentCenteringMeanDiff.outerProduct(componentCenteringMeanDiff).scale(
                    kappa * componentN / (kappa + componentN)));

        final Matrix componentPriorPredCovariance =
            centeringDist
                .getInverseWishart()
                .getInverseScale()
                .plus(componentD.scale(1d / 2d))
                .scale(
                    2d * (kappa + componentN + 1d) / ((kappa + componentN) * componentPriorPredDof));
 
View Full Code Here


            - centeringCovPriorMean.getNumColumns() - 1d), centeringCovDof);
    final MultivariateGaussian centeringMeanPrior =
        new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(new double[] {0d, 0d}),
            centeringCovariancePrior.getMean());
    final double centeringCovDivisor = 0.25d;
    final NormalInverseWishartDistribution centeringPrior =
        new NormalInverseWishartDistribution(centeringMeanPrior, centeringCovariancePrior,
            centeringCovDivisor);
    final double dpAlphaPrior = 2d;

    /*
     * Initialize the an empty mixture. The components will be created form the Dirichlet process
View Full Code Here

TOP

Related Classes of gov.sandia.cognition.statistics.distribution.NormalInverseWishartDistribution

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.