final int K = 3;
final int T = 200;
final int N = 1000;
final GaussianArHpWfPlFilter wfFilter =
new GaussianArHpWfPlFilter(trueKf, sigmaPrior, phiPrior, random, K, true);
/*
* Note: replications are over the same set of simulated observations.
*/
List<SimObservedValue<Vector, Matrix, Vector>> simulations = DlmUtils.sampleDlm(
random, T, trueKf.createInitialLearnedObject(), trueKf);
wfFilter.setNumParticles(N);
// log.info("rep\tt\tfilter.type\tmeasurement.type\tresample.type\tmeasurement");
RingAccumulator<MutableDouble> wfLatency =
new RingAccumulator<MutableDouble>();
Stopwatch wfWatch = new Stopwatch();
RingAccumulator<MutableDouble> wfStateRMSEs =
new RingAccumulator<MutableDouble>();
RingAccumulator<MutableDouble> wfPsiRMSEs =
new RingAccumulator<MutableDouble>();
RingAccumulator<MutableDouble> wfSigma2RMSEs =
new RingAccumulator<MutableDouble>();
String outputFilename = args[0] + "/nar-" + N + "-" + K + "-wf.csv";
CSVWriter writer = new CSVWriter(new FileWriter(outputFilename), ',');
String[] header = "rep,t,filter.type,measurement.type,resample.type,measurement".split(",");
writer.writeNext(header);
for (int k = 0; k < K; k++) {
log.info("Processing replication " + k);
GaussianArHpEvaluator wfEvaluator = new GaussianArHpEvaluator("wf-pl",
truePsi, trueSigma2, writer);
CountedDataDistribution<GaussianArHpWfParticle> wfDistribution =
(CountedDataDistribution<GaussianArHpWfParticle>) wfFilter.getUpdater().createInitialParticles(N);
final long numPreRuns = -1l;//wfDistribution.getMaxValueKey().getTime();
/*
* Recurse through the particle filter
*/
for (int i = 0; i < T; i++) {
if (i > numPreRuns) {
if (i > 0) {
wfWatch.reset();
wfWatch.start();
wfFilter.update(wfDistribution, simulations.get(i));
wfWatch.stop();
final long latency = wfWatch.elapsed(TimeUnit.MILLISECONDS);
wfLatency.accumulate(new MutableDouble(latency));
}