package complex1;
import java.io.IOException;
import org.apache.commons.math.random.RandomDataImpl;
import org.apache.commons.math.random.RandomGenerator;
import org.apache.commons.math.random.Well44497b;
import common.FragmentPartitionPrior;
import common.FragmentPenalty;
import common.LinearSizePenalty;
import common.PartitionPrior;
import common.PoissonTuningDistribution;
import common.PowerFragmentPenalty;
import common.SizePenalty;
import common.TuningDistribution;
public class RunMCMC {
public static void main(String[] args) throws IOException {
int n = 20;
String locsFile = "/home/adu/Dropbox/research/vardeman/mc-on-partition/data/jdata/locs2.bin";
String dataFile = "/home/adu/Dropbox/research/vardeman/mc-on-partition/data/jdata/data3.bin";
String iapFile = "/home/adu/Dropbox/research/vardeman/mc-on-partition/data/jdata/tap.bin";
double[][] locs = dclong.io.BinaryReader.readDouble(locsFile, 0,0,n, 2);
double[] data = dclong.io.BinaryReader.readDouble(dataFile, 0, 0, n);
int[] initArrayPartition = dclong.io.BinaryReader.readInt(iapFile,0,0,n);
//define partition prior
FragmentPenalty fragmentPenalty = new PowerFragmentPenalty(1,90);
//define size penalty
SizePenalty sizePenalty = new LinearSizePenalty(0);
//define partition prior
// PartitionPrior partitionPrior = new DistPartitionPrior(locs,3,fragmentPenalty,sizePenalty);
PartitionPrior fragPrior = new FragmentPartitionPrior(fragmentPenalty);
//define tuning distribution
RandomGenerator rg = new Well44497b();
RandomDataImpl rng = new RandomDataImpl(rg);
TuningDistribution tuningDistribution = new PoissonTuningDistribution(rng,1);
//initial jumping weights
double[] initJumpWeight = new double[n];
java.util.Arrays.fill(initJumpWeight, 10);
double variance = 0.1;
double varianceRatio = 0.01;
//------------------------------------------------------------------------------------
MCMC mcmc = new MCMC(rng, data, initArrayPartition, data,
initJumpWeight,fragPrior,tuningDistribution,variance,varianceRatio);
// System.out.println("1 class complexity is: "+mcmc.classComplexity());
dclong.util.Timer timer = new dclong.util.Timer();
timer.start();
mcmc.warmup(10000,false);
timer.stop();
timer.printSeconds("warmup with auto adjusting");
System.out.println("Jump ratio: "+mcmc.getJumpRatio());
// timer.start();
// mcmc.warmup(10, false);
// timer.stop();
// timer.printSeconds("warmup without auto adjusting");
// System.out.println("Jump ratio: "+mcmc.getJumpRatio());
// System.out.println(dclong.util.Arrays.sum(mcmc.getJumpWeights())-100*20);
// dclong.io.Print.print(dclong.util.Arrays.cbind(dclong.util.Arrays.sequence(0, 19, 1),mcmc.getJumpWeights()));
// dclong.io.Print.print(mcmc.getProbability());
//
timer.start();
mcmc.run(10000);
timer.stop();
System.out.println("Jump Ratio: "+mcmc.getJumpRatio());
timer.printSeconds("simulation");
//write partitions into disk
// mcmc.writeFP("/home/adu/Dropbox/research/vardeman/mc-on-partition/simulation/draws-ap1-f13-s0.1.bin");
System.out.println("Current size of fp is "+mcmc.getFps().size());
timer.start();
common.FreqPartition center = common.FreqPartition.center(mcmc.getFps(), true);
timer.end();
timer.printSeconds("finding the center");
System.out.println("Current size of fp is "+mcmc.getFps().size());
System.out.println("The center is:\n" + center);
System.out.println("The center in list representation is:\n"+dclong.util.Arrays.splitArray(center.getPartition()));
common.FreqPartition mode = common.FreqPartition.mode(mcmc.getFps());
System.out.println("The mode is: \n"+mode);
System.out.println("The mode in list representation is:\n"+dclong.util.Arrays.splitArray(mode.getPartition()));
}
}