Package gov.sandia.cognition.statistics.distribution

Examples of gov.sandia.cognition.statistics.distribution.MultivariateGaussian


      Matrix priorPredObsCov = X.times(
          particle.getPriorBeta().getCovariance())
          .times(X.transpose()).plus(particle.augLassoSample);
      priorPredObsCov.times(particle.priorObsCovSample);

      final MultivariateGaussian priorPredictiveObsDist=
          new MultivariateGaussian(priorPredObsMean, priorPredObsCov);

      return priorPredictiveObsDist.getProbabilityFunction().logEvaluate(y);
    }
View Full Code Here


  protected GaussianArHpTransitionState propagate(
      GaussianArHpTransitionState prevState, int predClass, ObservedValue<Vector,Void> data) {
    /*
     * Perform the filtering step
     */
    MultivariateGaussian priorPredictedState = prevState.getState().clone();
    KalmanFilter kf = Iterables.get(prevState.getHmm().getStateFilters(), predClass);
    kf.predict(priorPredictedState);
   
    final DlmHiddenMarkovModel newHmm = prevState.getHmm().clone();
    final InverseGammaDistribution invScaleSS = prevState.getInvScaleSS().clone();
View Full Code Here

          }
        });
    for (Entry<LogitMixParticle, ? extends Number> particleEntry : target.asMap().entrySet()) {
      final LogitMixParticle particle = particleEntry.getKey();

      final MultivariateGaussian predictivePrior = particle.getLinearState().clone();
      KalmanFilter kf = particle.getRegressionFilter();
      final Matrix G = kf.getModel().getA();
      final Matrix F = data.getObservedData();
      predictivePrior.setMean(G.times(predictivePrior.getMean()));
      predictivePrior.setCovariance(
          G.times(predictivePrior.getCovariance()).times(G.transpose())
            .plus(kf.getModelCovariance()));
      final Vector betaMean = predictivePrior.getMean();

      final int particleCount;
      if (particleEntry.getValue() instanceof MutableDoubleCount) {
        particleCount = ((MutableDoubleCount)particleEntry.getValue()).count;
      } else {
        particleCount = 1;
      }
      for (int p = 0; p < particleCount; p++) {

        for (int j = 0; j < 10; j++) {
          final LogitMixParticle predictiveParticle = particle.clone();
          predictiveParticle.setPreviousParticle(particle);
          predictiveParticle.setBetaSample(betaMean);
          predictiveParticle.setLinearState(predictivePrior);

          final UnivariateGaussian componentDist =
              this.evDistribution.getDistributions().get(j);

          predictiveParticle.setEVcomponent(componentDist);
         
          /*
           * Update the observed data for the regression component.
           */
          predictiveParticle.getRegressionFilter().getModel().setC(F);

          // TODO would be great to have a 1x1 matrix class here...
          final Matrix compVar = MatrixFactory.getDefault().copyArray(
              new double[][] {{componentDist.getVariance()}});
          predictiveParticle.getRegressionFilter().setMeasurementCovariance(compVar);
         
          final double compPredPriorObsMean =
               F.times(betaMean).getElement(0)
               + componentDist.getMean();
          final double compPredPriorObsCov =
               F.times(predictivePrior.getCovariance()).times(F.transpose()).getElement(0, 0)
               + componentDist.getVariance();
          predictiveParticle.setPriorPredMean(compPredPriorObsMean);
          predictiveParticle.setPriorPredCov(compPredPriorObsCov);

          final double logLikelihood =
View Full Code Here

    /*
     * Create a mixture distribution to fit.
     */
    final double[] trueComponentWeights = new double[] {0.1d, 0.5d, 0.1d, 0.2d, 0.1d};
    final List<MultivariateGaussian> trueComponentModels = Lists.newArrayList();
    trueComponentModels.add(new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(
        new double[] {0d, 0d}), MatrixFactory.getDenseDefault().copyArray(
        new double[][] { {100d, 0d}, {0d, 100d}})));
    trueComponentModels.add(new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(
        new double[] {100d, 1d}), MatrixFactory.getDenseDefault().copyArray(
        new double[][] { {100d, 0d}, {0d, 100d}})));
    trueComponentModels.add(new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(
        new double[] {10d, -10d}), MatrixFactory.getDenseDefault().copyArray(
        new double[][] { {100d, 0d}, {0d, 100d}})));
    trueComponentModels.add(new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(
        new double[] {1d, -200d}), MatrixFactory.getDenseDefault().copyArray(
        new double[][] { {10d, 0d}, {0d, 10d}})));
    trueComponentModels.add(new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(
        new double[] {50d, -50d}), MatrixFactory.getDenseDefault().copyArray(
        new double[][] { {20d, 0d}, {0d, 20d}})));

    final MultivariateMixtureDensityModel<MultivariateGaussian> trueMixture =
        new MultivariateMixtureDensityModel<MultivariateGaussian>(trueComponentModels,
            trueComponentWeights);

    final Random rng = new Random(829351983l);
    /*
     * Sample a lot of test data to fit against. TODO For a proper study, we would randomize subsets
     * of this data and fit against those.
     */
    final List<Vector> observations = trueMixture.sample(rng, 10000);

    /*
     * Instantiate PL filter by first providing prior parameters/distributions. We start by creating
     * a prior conjugate centering distribution (which is a Normal Inverse Wishart), then we provide
     * the Dirichlet Process prior parameters (group counts and concentration parameter).
     */
    final int centeringCovDof = 2 + 2;
    final Matrix centeringCovPriorMean =
        MatrixFactory.getDenseDefault().copyArray(new double[][] { {1000d, 0d}, {0d, 1000d}});
    final InverseWishartDistribution centeringCovariancePrior =
        new InverseWishartDistribution(centeringCovPriorMean.scale(centeringCovDof
            - centeringCovPriorMean.getNumColumns() - 1d), centeringCovDof);
    final MultivariateGaussian centeringMeanPrior =
        new MultivariateGaussian(VectorFactory.getDenseDefault().copyArray(new double[] {0d, 0d}),
            centeringCovariancePrior.getMean());
    final double centeringCovDivisor = 0.25d;
    final NormalInverseWishartDistribution centeringPrior =
        new NormalInverseWishartDistribution(centeringMeanPrior, centeringCovariancePrior,
            centeringCovDivisor);
View Full Code Here

TOP

Related Classes of gov.sandia.cognition.statistics.distribution.MultivariateGaussian

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.