Package org.broadinstitute.gatk.engine.contexts

Examples of org.broadinstitute.gatk.engine.contexts.AlignmentContext


                                            Map<String,AlignmentContext> contexts) {
        // Get reference base from VCF or Reference
        if (UAC.referenceSampleName == null)
            return null;

        AlignmentContext context = contexts.get(UAC.referenceSampleName);
        ArrayList<Allele> trueReferenceAlleles = new ArrayList<Allele>();

        VariantContext referenceSampleVC;

        if (tracker != null && context != null)
            referenceSampleVC = tracker.getFirstValue(UAC.referenceSampleRod, context.getLocation());
        else
            return null;

        if (referenceSampleVC == null) {
            trueReferenceAlleles.add(Allele.create(ref.getBase(),true));
View Full Code Here


        HashMap<String, ErrorModel> perLaneErrorModels = getPerLaneErrorModels(tracker, ref, contexts);
        if (perLaneErrorModels == null && UAC.referenceSampleName != null)
            return null;

        if (UAC.TREAT_ALL_READS_AS_SINGLE_POOL) {
            AlignmentContext mergedContext = AlignmentContextUtils.joinContexts(contexts.values());
            Map<String,AlignmentContext> newContext = new HashMap<String,AlignmentContext>();
            newContext.put(DUMMY_SAMPLE_NAME,mergedContext);
            contexts = newContext;
        }
        if (contextType == AlignmentContextUtils.ReadOrientation.COMPLETE) {
View Full Code Here

                                                                Map<String, AlignmentContext> contexts) {
        VariantContext refVC =  getTrueAlleles(tracker, ref, contexts);


        // Build error model for site based on reference sample, and keep stratified for each lane.
        AlignmentContext refContext = null;
        if (UAC.referenceSampleName != null)
            refContext = contexts.get(UAC.referenceSampleName);

        ReadBackedPileup refPileup = null;
        if (refContext != null) {
            HashMap<String, ErrorModel> perLaneErrorModels = new HashMap<String, ErrorModel>();
            refPileup = refContext.getBasePileup();

            Set<String> laneIDs = new TreeSet<String>();
            if (UAC.TREAT_ALL_READS_AS_SINGLE_POOL || UAC.IGNORE_LANE_INFO)
                laneIDs.add(DUMMY_LANE);
            else
View Full Code Here

        HashMap<String, Integer> consensusIndelStrings = new HashMap<String, Integer>();

        int insCount = 0, delCount = 0;
        // quick check of total number of indels in pileup
        for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
            final AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);

            final ReadBackedPileup indelPileup = context.getBasePileup();
            insCount += indelPileup.getNumberOfInsertionsAfterThisElement();
            delCount += indelPileup.getNumberOfDeletionsAfterThisElement();
        }

        if ( insCount < minIndelCountForGenotyping && delCount < minIndelCountForGenotyping )
            return Collections.emptyMap();

        for (Map.Entry<String, AlignmentContext> sample : contexts.entrySet()) {
            // todo -- warning, can be duplicating expensive partition here
            AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);

            final ReadBackedPileup indelPileup = context.getBasePileup();

            final int nIndelReads = indelPileup.getNumberOfInsertionsAfterThisElement() + indelPileup.getNumberOfDeletionsAfterThisElement();
            final int nReadsOverall = indelPileup.getNumberOfElements();

            if ( nIndelReads == 0 || (nIndelReads / (1.0 * nReadsOverall)) < minFractionInOneSample ) {
View Full Code Here

        // For each sample, get genotype likelihoods based on pileup
        // compute prior likelihoods on haplotypes, and initialize haplotype likelihood matrix with them.

        for (Map.Entry<String, AlignmentContext> sample : contexts.entrySet()) {
            AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);

            if (!perReadAlleleLikelihoodMap.containsKey(sample.getKey())){
                // no likelihoods have been computed for this sample at this site
                perReadAlleleLikelihoodMap.put(sample.getKey(), new PerReadAlleleLikelihoodMap());
            }
            final ReadBackedPileup pileup = context.getBasePileup();
            if (pileup != null) {
                final GenotypeBuilder b = new GenotypeBuilder(sample.getKey());
                final double[] genotypeLikelihoods = pairModel.computeDiploidReadHaplotypeLikelihoods(pileup, haplotypeMap, ref, eventLength, perReadAlleleLikelihoodMap.get(sample.getKey()), UAC.getSampleContamination().get(sample.getKey()));
                b.PL(genotypeLikelihoods);
                b.alleles(noCall);
View Full Code Here

        // for each sample that we haven't examined yet
        final int sampleCount = samples.sampleCount();
        for (int i = 0; i < sampleCount; i++) {
            final String sample = samples.sampleAt(i);
            final AlignmentContext context = contexts.get(sample);
            if ( ignoreCoveredSamples && context != null )
                continue;
            final int depth = context == null ? 0 : context.getBasePileup().depthOfCoverage();
            log10POfRef += estimateLog10ReferenceConfidenceForOneSample(depth, theta);
        }

        return new VariantCallContext(vc, QualityUtils.phredScaleLog10CorrectRate(log10POfRef) >= configuration.genotypeArgs.STANDARD_CONFIDENCE_FOR_CALLING, false);
    }
View Full Code Here

            Set<List<GATKSAMRecord>> readSets = uniqueReadSets(readsAtLoc((GATKSAMRecord) read, iter));
            if ( DEBUG ) logger.debug(String.format("*** TraverseDuplicates.traverse at %s with %d read sets", site, readSets.size()));

            // Jump forward in the reference to this locus location
            AlignmentContext locus = new AlignmentContext(site, new ReadBackedPileupImpl(site));

            // update the number of duplicate sets we've seen
            dataProvider.getShard().getReadMetrics().incrementNumIterations();

            // actually call filter and map, accumulating sum
View Full Code Here

            if ( done )
                return false;
            else {

                while( locusView.hasNext() ) {
                    final AlignmentContext locus = locusView.next();
                    final GenomeLoc location = locus.getLocation();

                    rememberLastLocusLocation(location);

                    // get all of the new reads that appear in the current pileup, and them to our list of reads
                    // provided we haven't seen them before
                    final Collection<GATKSAMRecord> reads = locusView.getLIBS().transferReadsFromAllPreviousPileups();
                    for( final GATKSAMRecord read : reads ) {
                        // note that ActiveRegionShards span entire contigs, so this check is in some
                        // sense no longer necessary, as any read that appeared in the last shard would now
                        // by definition be on a different contig.  However, the logic here doesn't hurt anything
                        // and makes us robust should we decided to provide shards that don't fully span
                        // contigs at some point in the future
                        if ( ! appearedInLastShard(locOfLastReadAtTraversalStart, read) ) {
                            rememberLastReadLocation(read);
                            myReads.add(read);
                        }
                    }

                    // skip this location -- it's not part of our engine intervals
                    if ( outsideEngineIntervals(location) )
                        continue;

                    // we've move across some interval boundary, restart profile
                    final boolean flushProfile = ! activityProfile.isEmpty()
                            && ( activityProfile.getContigIndex() != location.getContigIndex()
                            || location.getStart() != activityProfile.getStop() + 1);
                    final List<MapData> newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false, referenceOrderedDataView);

                    dataProvider.getShard().getReadMetrics().incrementNumIterations();

                    // create reference context. Note that if we have a pileup of "extended events", the context will
                    // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup).
                    final ReferenceContext refContext = referenceView.getReferenceContext(location);

                    // Iterate forward to get all reference ordered data covering this location
                    final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(locus.getLocation());

                    // Call the walkers isActive function for this locus and add them to the list to be integrated later
                    addIsActiveResult(walker, tracker, refContext, locus);

                    maxReadsInMemory = Math.max(myReads.size(), maxReadsInMemory);
View Full Code Here

            // only do this if the walker isn't done!
            final RodLocusView rodLocusView = (RodLocusView)locusView;
            final long nSkipped = rodLocusView.getLastSkippedBases();
            if ( nSkipped > 0 ) {
                final GenomeLoc site = rodLocusView.getLocOneBeyondShard();
                final AlignmentContext ac = new AlignmentContext(site, new ReadBackedPileupImpl(site), nSkipped);
                final M x = walker.map(null, null, ac);
                sum = walker.reduce(x, sum);
            }
        }
View Full Code Here

            return locusView.hasNext() && ! engine.exceedsRuntimeLimit();
        }

        @Override
        public MapData next() {
            final AlignmentContext locus = locusView.next();
            final GenomeLoc location = locus.getLocation();

            //logger.info("Pulling data from MapDataIterator at " + location);

            // create reference context. Note that if we have a pileup of "extended events", the context will
            // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup).
View Full Code Here

TOP

Related Classes of org.broadinstitute.gatk.engine.contexts.AlignmentContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.