private double scoreReadAgainstHaplotype(final PileupElement p, final int contextSize, final Haplotype haplotype, final int locus) {
double expected = 0.0;
double mismatches = 0.0;
final GATKSAMRecord read = p.getRead();
if ( read.getCigar() == null )
return 0.0;
// What's the expected mismatch rate under the model that this read is actually sampled from
// this haplotype? Let's assume the consensus base c is a random choice one of A, C, G, or T, and that
// the observed base is actually from a c with an error rate e. Since e is the rate at which we'd
// see a miscalled c, the expected mismatch rate is really e. So the expected number of mismatches
// is just sum_i e_i for i from 1..n for n sites
//
// Now, what's the probabilistic sum of mismatches? Suppose that the base b is equal to c. Well, it could
// actually be a miscall in a matching direction, which would happen at a e / 3 rate. If b != c, then
// the chance that it is actually a mismatch is 1 - e, since any of the other 3 options would be a mismatch.
// so the probability-weighted mismatch rate is sum_i ( matched ? e_i / 3 : 1 - e_i ) for i = 1 ... n
final byte[] haplotypeBases = haplotype.getBases();
byte[] readBases = read.getReadBases();
readBases = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readBases); // Adjust the read bases based on the Cigar string
byte[] readQuals = read.getBaseQualities();
readQuals = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string
int readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, read.getAlignmentStart(), locus);
final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1) / 2;
for (int i = 0; i < contextSize; i++) {
final int baseOffset = i + baseOffsetStart;
if (baseOffset < 0) {