Package org.broadinstitute.gatk.utils.pileup

Examples of org.broadinstitute.gatk.utils.pileup.ReadBackedPileup


        // calculate the GLs
        ArrayList<SampleGenotypeData> GLs = new ArrayList<SampleGenotypeData>(contexts.size());
        for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
        // Down-sample with bias according to the contamination level (global or per file)
            ReadBackedPileup pileup = AlignmentContextUtils.stratify(sample.getValue(), contextType).getBasePileup();
            final Double contamination =  UAC.getSampleContamination().get(sample.getKey());
            if( contamination > 0.0 ) //no need to enter if no contamination reduction
                pileup = perReadAlleleLikelihoodMap.createPerAlleleDownsampledBasePileup(pileup, contamination);
            if ( useBAQedPileup )
                pileup = createBAQedPileup(pileup);
View Full Code Here


        final byte baseQual = (byte)30;

        final byte[] baseQuals = Utils.dupBytes(baseQual, readBases.length());
        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(readBases.getBytes(), baseQuals, readBases.length() + "M");
        final GenomeLoc loc = new UnvalidatingGenomeLoc("20",0,refOffset,refOffset);
        final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc,Collections.singletonList(read),readOffset);
        final VariantContextBuilder vcb = new VariantContextBuilder();
        final GenotypeBuilder gb = new GenotypeBuilder();
        final List<String> alleleStrings = new ArrayList<>( 1 + alternatives.length);
        alleleStrings.add(referenceAllele);
        alleleStrings.addAll(Arrays.asList(alternatives));
View Full Code Here

            read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
            reads.add(read);
        }

        // create a pileup with all reads having offset 0
        final ReadBackedPileup pileup = new ReadBackedPileupImpl(myLocation, reads, 0);
        Allele base_A = Allele.create(BaseUtils.Base.A.base);
        Allele base_C = Allele.create(BaseUtils.Base.C.base);
        Allele base_T = Allele.create(BaseUtils.Base.T.base);

        PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap();
        for ( final PileupElement e : pileup ) {
            for ( final Allele allele : Arrays.asList(base_A,base_C,base_T) ) {
                Double likelihood = allele == base_A ? -0.04 : -3.0;
                perReadAlleleLikelihoodMap.add(e,allele,likelihood);
            }
        }

        Assert.assertEquals(perReadAlleleLikelihoodMap.size(),pileup.depthOfCoverage());
        Assert.assertEquals(perReadAlleleLikelihoodMap.getAlleleStratifiedReadMap().keySet().size(),3);
        Map<Allele,List<GATKSAMRecord>> shouldBeAllA = perReadAlleleLikelihoodMap.getAlleleStratifiedReadMap();
        Assert.assertEquals(shouldBeAllA.get(base_A).size(),pileup.depthOfCoverage());
        Assert.assertEquals(shouldBeAllA.get(base_C).size(),0);
        Assert.assertEquals(shouldBeAllA.get(base_T).size(),0);
    }
View Full Code Here

        VariantContext vcCall = builder.make();

        if ( annotationEngine != null && !limitedContext ) { // limitedContext callers need to handle annotations on their own by calling their own annotationEngine
            // Note: we want to use the *unfiltered* and *unBAQed* context for the annotations
            final ReadBackedPileup pileup = rawContext.getBasePileup();
            stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(pileup);

            vcCall = annotationEngine.annotateContext(tracker, refContext, stratifiedContexts, vcCall, perReadAlleleLikelihoodMap);
        }

View Full Code Here

            read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
            reads.add(read);
        }

        // create a pileup with all reads having offset 0
        final ReadBackedPileup pileup = new ReadBackedPileupImpl(myLocation, reads, 0);
        Allele base_A = Allele.create(BaseUtils.Base.A.base);
        Allele base_C = Allele.create(BaseUtils.Base.C.base);
        Allele base_T = Allele.create(BaseUtils.Base.T.base);

        PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap();
        int idx = 0;
        for ( final PileupElement e : pileup ) {
            for ( final Allele allele : Arrays.asList(base_A,base_C,base_T) ) {
                Double likelihood;
                if ( idx % 2 == 0 )
                    likelihood = allele == base_A ? -0.04 : -3.0;
                else
                    likelihood = allele == base_C ? -0.04 : -3.0;
                perReadAlleleLikelihoodMap.add(e,allele,likelihood);
            }
            idx++;
        }

        Assert.assertEquals(perReadAlleleLikelihoodMap.size(),pileup.depthOfCoverage());
        Assert.assertEquals(perReadAlleleLikelihoodMap.getAlleleStratifiedReadMap().keySet().size(),3);
        Map<Allele,List<GATKSAMRecord>> halfAhalfC = perReadAlleleLikelihoodMap.getAlleleStratifiedReadMap();
        Assert.assertEquals(halfAhalfC.get(base_A).size(),pileup.depthOfCoverage()/2);
        Assert.assertEquals(halfAhalfC.get(base_C).size(),pileup.depthOfCoverage()/2);
        Assert.assertEquals(halfAhalfC.get(base_T).size(),0);

        // make sure the likelihoods are retrievable

        idx = 0;
        for ( final PileupElement e : pileup ) {
            Assert.assertTrue(perReadAlleleLikelihoodMap.containsPileupElement(e));
            Map<Allele,Double> likelihoods = perReadAlleleLikelihoodMap.getLikelihoodsAssociatedWithPileupElement(e);
            for ( final Allele allele : Arrays.asList(base_A,base_C,base_T) ) {
                Double expLik;
                if ( idx % 2 == 0 )
                    expLik = allele == base_A ? -0.04 : -3.0;
                else
                    expLik = allele == base_C ? -0.04 : -3.0;
                Assert.assertEquals(likelihoods.get(allele),expLik);
            }
            idx++;
        }

        // and test downsampling for good measure

        final List<GATKSAMRecord> excessReads = new LinkedList<GATKSAMRecord>();
        int prevSize = perReadAlleleLikelihoodMap.size();
        for ( int i = 0; i < 10 ; i++ ) {
            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myExcessRead" + i, 0, 1, readLength);
            final byte[] bases = Utils.dupBytes((byte)'A', readLength);
            bases[0] = (byte)(i % 2 == 0 ? 'A' : 'C'); // every other base is a C

            // set the read's bases and quals
            read.setReadBases(bases);
            read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
            for ( final Allele allele : Arrays.asList(base_A,base_C,base_T) ) {
                perReadAlleleLikelihoodMap.add(read,allele,allele==base_A ? -0.04 : -3.0);
            }
            Assert.assertEquals(perReadAlleleLikelihoodMap.size(),1+prevSize);
            prevSize = perReadAlleleLikelihoodMap.size();
        }

        Assert.assertEquals(perReadAlleleLikelihoodMap.size(),pileup.depthOfCoverage()+10);
        Assert.assertEquals(perReadAlleleLikelihoodMap.getAlleleStratifiedReadMap().get(base_A).size(),60);
        perReadAlleleLikelihoodMap.performPerAlleleDownsampling(0.1);
        Assert.assertEquals(perReadAlleleLikelihoodMap.size(),(int) (0.9*(pileup.depthOfCoverage()+10)));

        Map<Allele,List<GATKSAMRecord>> downsampledStrat = perReadAlleleLikelihoodMap.getAlleleStratifiedReadMap();
        Assert.assertEquals(downsampledStrat.get(base_A).size(),(int) (pileup.depthOfCoverage()/2) - 1);
        Assert.assertEquals(downsampledStrat.get(base_C).size(),(int) (pileup.depthOfCoverage()/2));
        Assert.assertEquals(downsampledStrat.get(base_T).size(),0);
    }
View Full Code Here

                   ref.getLocus().getStart(), ref.getLocus().getStart(), alleles).make();
        }

        if ( vc != null && annotationEngine != null ) {
            // Note: we want to use the *unfiltered* and *unBAQed* context for the annotations
            final ReadBackedPileup pileup = rawContext.getBasePileup();
            vc = annotationEngine.annotateContext(tracker, ref,  AlignmentContextUtils.splitContextBySampleName(pileup), vc);
        }

        return new VariantCallContext(vc, false);
    }
View Full Code Here

        switch (model) {
            case INDEL:
            case GENERALPLOIDYINDEL:

                final ReadBackedPileup pileup = rawContext.getBasePileup().getMappingFilteredPileup(configuration.MIN_BASE_QUALTY_SCORE);
                // don't call when there is no coverage
                if ( pileup.getNumberOfElements() == 0 && configuration.outputMode != OutputMode.EMIT_ALL_SITES  )
                    return null;

                // stratify the AlignmentContext and cut by sample
                return AlignmentContextUtils.splitContextBySampleName(pileup);
            case SNP:
View Full Code Here

        System.out.printf("%n");
    }

    private static Pair<Byte, Byte> combineBaseProbs(GenomeLocParser genomeLocParser,List<GATKSAMRecord> duplicates, int readOffset, int maxQScore) {
        GenomeLoc loc = genomeLocParser.createGenomeLoc(duplicates.get(0));
        ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, duplicates, readOffset);

        final boolean debug = false;

        // calculate base probs
        double[] qualSums = {0.0, 0.0, 0.0, 0.0};
        if ( debug ) print4BaseQuals("start", qualSums);

        for (PileupElement e : pileup ) {
            int baseIndex = e.getBaseIndex();
            byte qual = e.getQual();
            double pqual = QualityUtils.qualToProb(qual);
            for ( int j = 0; j < 4; j++) {
                qualSums[j] += Math.log10(j == baseIndex ?  pqual : (1 - pqual)/3);
            }

            if ( debug ) print4BaseQuals(String.format("%c Q%2d", e.getBase(), qual), qualSums);
        }
        if ( debug ) print4BaseQuals("final", qualSums);

        Pair<Byte, Byte> combined = baseProbs2BaseAndQual(qualSums, maxQScore);
        if ( debug ) System.out.printf("%s => %c Q%s%n", pileup.getPileupString('N'), (char)(byte)combined.getFirst(), combined.getSecond());

        return combined;
    }
View Full Code Here

        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 50);
        final int readLen = 100;

        for ( int pileupN = 0; pileupN < nPileupsToGenerate; pileupN++ ) {
            ReadBackedPileup rbp = ArtificialSAMUtils.createReadBackedPileup(header, loc, readLen, insertSize, pileupSize);
            pileups.add(rbp);
        }
    }
View Full Code Here

    }

    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
    public void testAsPileup(FragmentUtilsTest test) {
        for ( TestState testState : test.statesForPileup ) {
            ReadBackedPileup rbp = testState.pileup;
            FragmentCollection<PileupElement> fp = FragmentUtils.create(rbp);
            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
        }
    }
View Full Code Here

TOP

Related Classes of org.broadinstitute.gatk.utils.pileup.ReadBackedPileup

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.