Package htsjdk.variant.variantcontext

Examples of htsjdk.variant.variantcontext.Allele


     *   allele1   #       #
     *   allele2   #       #
     * @return a 2x2 contingency table
     */
    private static int[][] getContingencyTable( final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap, final VariantContext vc) {
        final Allele ref = vc.getReference();
        final Allele alt = vc.getAltAlleleWithHighestAlleleCount();
        int[][] table = new int[2][2];

        for (PerReadAlleleLikelihoodMap maps : stratifiedPerReadAlleleLikelihoodMap.values() ) {
            for (Map.Entry<GATKSAMRecord,Map<Allele,Double>> el : maps.getLikelihoodReadMap().entrySet()) {
                final boolean matchesRef = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()).equals(ref);
View Full Code Here


                    continue;

                if ( p.getQual() < minQScoreToConsider || p.getMappingQual() < minQScoreToConsider )
                    continue;

                final Allele base = Allele.create(p.getBase(), false);
                final boolean isFW = !p.getRead().getReadNegativeStrandFlag();

                final boolean matchesRef = ref.equals(base, true);
                final boolean matchesAlt = alt.equals(base, true);
                if ( matchesRef || matchesAlt ) {
View Full Code Here

                //    continue; // todo -- fixme, should take filtered context!
                //if ( pBoth[50] != 0 )
                //    continue; // troubleshoot


                Allele base = Allele.create(p.getBase(), false);
                 boolean matchesRef = ref.equals(base, true);
                boolean matchesAlt = alt.equals(base, true);
    double eps = Math.pow(10.0,-1.0*(p.getQual()/10.0)); // convert base quality score to error rate
    int vAB=0;
          double[] p1 = new double[steps+1]; // log probability for this base (or log(p(b|eps,AB))), similar to ContEst with f=0
View Full Code Here

                //if ( p.getRead().getMappingQuality() < 20 || p.getQual() < 10 )
                //    continue; // todo -- fixme, should take filtered context!

                Allele base = Allele.create(p.getBase(), false);
                 boolean matchesRef = ref.equals(base, true);
                boolean matchesAlt = alt.equals(base, true);
    double eps = Math.pow(10.0,-1.0*(p.getQual()/10.0)); // convert base quality score to error rate
    int vAB=0;
          double[] p1 = new double[steps+1]; // log probability for this base (or log(p(b|eps,AB))), similar to ContEst with f=0
View Full Code Here

        final double[] log10Likelihoods = MathUtils.normalizeFromLog10(new double[]{getLog10LikelihoodOfAFzero(), getLog10LikelihoodOfAFNotZero()}, true);
        final double[] log10Priors = MathUtils.normalizeFromLog10(new double[]{log10PriorsByAC[0], MathUtils.log10sumLog10(log10PriorsByAC, 1)}, true);

        final Map<Allele, Double> log10pRefByAllele = new HashMap<Allele, Double>(allelesUsedInGenotyping.size());
        for ( int i = 0; i < subACOfMLE.length; i++ ) {
            final Allele allele = allelesUsedInGenotyping.get(i+1);
            final double log10PRef = alleleCountsOfMAP[i] > 0 ? -10000 : 0; // TODO -- a total hack but in effect what the old behavior was
            log10pRefByAllele.put(allele, log10PRef);
        }

        return new AFCalculationResult(subACOfMLE, nEvaluations, allelesUsedInGenotyping, log10Likelihoods, log10Priors, log10pRefByAllele);
View Full Code Here

                    worstRelativeLikelihood += UNREPORTED_HAPLOTYPE_LIKELIHOOD_PENALTY;
                }
                final double bestLikelihood = 0.0; // the best becomes zero.
                maxAlternative.put(read, bestLikelihood);
                for (final Map.Entry<Haplotype, Allele> entry : alleleVersions.entrySet()) {
                    final Allele a = entry.getValue();
                    final Double relativeLikelihoodO = existingLikelihoods.get(a);
                    final double relativeLikelihood = relativeLikelihoodO == null || Double.isInfinite(relativeLikelihoodO) ? worstRelativeLikelihood : relativeLikelihoodO;
                    final double likelihood = relativeLikelihood - bestRelativeLikelihood + bestLikelihood;
                    if (likelihood > 0)
                        throw new IllegalStateException("Likelihood larger than 1 with read " + read.getReadName());
View Full Code Here

                    homCount += 1;
                    continue;
                }

                //get alternate allele for each sample
                final Allele a1 = g.getAllele(0);
                final Allele a2 = g.getAllele(1);
                if (a2.isNonReference()) {
                    final int[] idxVector = vc.getGLIndecesOfAlternateAllele(a2);
                    idxAA = idxVector[0];
                    idxAB = idxVector[1];
                    idxBB = idxVector[2];
                }
View Full Code Here

    public Allele getBottomAllele() {
        return bottom;
    }

    public void swapAlleles() {
        Allele tmp = top;
        top = bottom;
        bottom = tmp;
    }
View Full Code Here

        // If the allele list passed is empty there is no effect.
        result.addMissingAlleles(Collections.EMPTY_LIST,Double.NEGATIVE_INFINITY);
        testLikelihoodMatrixQueries(samples,result,originalLikelihoods);

        final Allele newOne;
        final Allele newTwo;
        final Allele newThree;

        // We add a single missing.
        result.addMissingAlleles(Arrays.asList(newOne = Allele.create("ACCCCCAAAATTTAAAGGG".getBytes(),false)),-12345.6);
        Assert.assertEquals(result.alleleCount(), original.alleleCount() + 1);
View Full Code Here

            Assert.assertTrue(sIndex >= 0);
            Assert.assertTrue(sIndex < sampleCount);
            final int sampleReadCount = sampleToReads.get(sample).size();
            final ReadLikelihoods.Matrix<Allele> sampleLikelihoods = subject.sampleMatrix(sIndex);
            for (int a = 0; a < alleleCount; a++) {
                final Allele allele = alleleList.alleleAt(a);
                final int aIndex = subject.alleleIndex(allele);
                Assert.assertEquals(aIndex >= 0,alleleWithLikelihoodsSet.contains(allele));
                Assert.assertTrue(aIndex < alleleCount);
                if (aIndex == -1) continue;
                for (int r = 0; r < sampleReadCount; r++) {
View Full Code Here

TOP

Related Classes of htsjdk.variant.variantcontext.Allele

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.