Package org.broadinstitute.gatk.utils.exceptions

Examples of org.broadinstitute.gatk.utils.exceptions.UserException$IncompatibleReadFiltersException


            // only operate at the start of events
            if ( mergedVC == null )
                continue;

            if ( mergedVC.hasAllele(GATKVariantContextUtils.NON_REF_SYMBOLIC_ALLELE) )
                throw new UserException("CombineVariants should not be used to merge gVCFs produced by the HaplotypeCaller; use CombineGVCFs instead");

            final VariantContextBuilder builder = new VariantContextBuilder(mergedVC);
            // re-compute chromosome counts
            VariantContextUtils.calculateChromosomeCounts(builder, false);
View Full Code Here


            }
            for ( String sample : header.getValue().getGenotypeSamples() ) {
                if ( ! metaDataFile.getAbsolutePath().endsWith(".fam") ) {
                    Map<String,String> mVals = sampleMetaValues.get(sample);
                    if ( mVals == null ) {
                        throw new UserException("No metadata provided for sample "+sample);
                    }
                    if ( ! mVals.containsKey("phenotype") ) {
                        throw new UserException("No phenotype data provided for sample "+sample);
                    }
                    String fid = mVals.containsKey("fid") ? mVals.get("fid") : String.format("dummy_%d",++dummyID);
                    String pid = mVals.containsKey("dad") ? mVals.get("dad") : String.format("dummy_%d",++dummyID);
                    String mid = mVals.containsKey("mom") ? mVals.get("mom") : String.format("dummy_%d",++dummyID);
                    String sex = mVals.containsKey("sex") ? mVals.get("sex") : "3";
                    String pheno = mVals.get("phenotype");
                    outFam.printf("%s\t%s\t%s\t%s\t%s\t%s%n",fid,sample,pid,mid,sex,pheno);
                } else {
                    // even if a fam file is input, we can't diverge the bed file from the fam file, which
                    // could lead to a malformed plink trio. Fail fast if there's any extra sample in the VCF.
                    if ( ! sampleMetaValues.containsKey(sample) ) {
                        throw new UserException("No metadata provided for sample "+sample);
                    }
                    Map<String,String> mVals = sampleMetaValues.get(sample);
                    String fid = mVals.containsKey("fid") ? mVals.get("fid") : String.format("dummy_%d",++dummyID);
                    String pid = mVals.containsKey("dad") ? mVals.get("dad") : String.format("dummy_%d",++dummyID);
                    String mid = mVals.containsKey("mom") ? mVals.get("mom") : String.format("dummy_%d",++dummyID);
View Full Code Here

            return 0;
        }
        try {
            validateVariantSite(vc,ref,context);
        } catch (TribbleException e) {
            throw new UserException("Input VCF file is invalid; we cannot guarantee the resulting ped file. "+
            "Please run ValidateVariants for more detailed information. This error is: "+e.getMessage());
        }

        String refOut;
        String altOut;
View Full Code Here

        if ( (o instanceof String) ) {
            return Double.parseDouble((String) o);
        } else if ( (o instanceof Double) ) {
            return (Double) o;
        } else {
            throw new UserException("Allele frequency appears to be neither String nor Double. Please check the header of your VCF.");
        }
    }
View Full Code Here

        try {
            if ( metaDataFile.getAbsolutePath().endsWith(".fam") ) {
                for ( String line : new XReadLines(metaDataFile) ) {
                    String[] famSplit = line.split("\\s+");
                    if ( famSplit.length != 6 ) {
                        throw new UserException("Line of the fam file is malformatted. Expected 6 entries. Line is "+line);
                    }
                    String sid = famSplit[1];
                    String fid = famSplit[0];
                    String mom = famSplit[2];
                    String dad = famSplit[3];
                    String sex = famSplit[4];
                    String pheno = famSplit[5];
                    HashMap<String,String> values = new HashMap<String, String>();
                    values.put("mom",mom);
                    values.put("dad",dad);
                    values.put("fid",fid);
                    values.put("sex",sex);
                    values.put("phenotype",pheno);
                    metaValues.put(sid,values);
                }
            } else {
                for ( String line : new XReadLines(metaDataFile) ) {
                    logger.debug(line);
                    String[] split = line.split("\\s+");
                    String sampleID = split[0];
                    String keyVals = split[1];
                    HashMap<String,String> values = new HashMap<String, String>();
                    for ( String kvp : keyVals.split(";") ) {
                        String[] kvp_split = kvp.split("=");
                        values.put(kvp_split[0],kvp_split[1]);
                    }
                    metaValues.put(sampleID,values);
                }
            }
        } catch (FileNotFoundException e) {
            throw new UserException("Meta data file not found: "+metaDataFile.getAbsolutePath(),e);
        }

        return metaValues;
    }
View Full Code Here

        samples.removeAll(XLsamplesFromFile);
        samples.removeAll(XLsampleNames);
        NO_SAMPLES_SPECIFIED = NO_SAMPLES_SPECIFIED && XLsampleNames.isEmpty() && XLsamplesFromFile.isEmpty();

        if ( samples.size() == 0 && !NO_SAMPLES_SPECIFIED )
            throw new UserException("All samples requested to be included were also requested to be excluded.");

        if ( ! NO_SAMPLES_SPECIFIED )
            for ( String sample : samples )
            logger.info("Including sample '" + sample + "'");
View Full Code Here

                        }
                    }
                } catch (IllegalArgumentException e) {
                    /*The IAE thrown by htsjdk already includes an informative error message ("Invalid JEXL
                      expression detected...")*/
                    throw new UserException(e.getMessage());
                }
                if ( !failedJexlMatch &&
                        !justRead &&
                        ( !SELECT_RANDOM_FRACTION || GenomeAnalysisEngine.getRandomGenerator().nextDouble() < fractionRandom ) ) {
                    vcfWriter.add(sub);
View Full Code Here

        return mostSignificantEffect.getAnnotations();
    }

    private void validateRodBinding ( RodBinding<VariantContext> snpEffRodBinding ) {
        if ( snpEffRodBinding == null || ! snpEffRodBinding.isBound() ) {
            throw new UserException("The SnpEff annotator requires that a SnpEff VCF output file be provided " +
                                    "as a rodbinding on the command line via the --snpEffFile option, but " +
                                    "no SnpEff rodbinding was found.");
        }
    }
View Full Code Here

        }
    }

    private void checkSnpEffVersionAndCommandLine( final VCFHeaderLine snpEffVersionLine, final VCFHeaderLine snpEffCommandLine ) {
        if ( snpEffVersionLine == null || snpEffVersionLine.getValue() == null || snpEffVersionLine.getValue().trim().length() == 0 ) {
            throw new UserException(String.format("Could not find a %s entry in the VCF header for the SnpEff input file, " +
                                                  "and so could not verify that the file was generated by a supported version of SnpEff (%s)",
                                                  SNPEFF_VCF_HEADER_VERSION_LINE_KEY, supportedSnpEffVersionsString()));
        }

        if ( snpEffCommandLine == null || snpEffCommandLine.getValue() == null || snpEffCommandLine.getValue().trim().length() == 0 ) {
            throw new UserException(String.format("Could not find a %s entry in the VCF header for the SnpEff input file, " +
                                                  "which should be added by all supported versions of SnpEff (%s)",
                                                  SNPEFF_VCF_HEADER_COMMAND_LINE_KEY, supportedSnpEffVersionsString()));
        }

        String snpEffVersionString = snpEffVersionLine.getValue().replaceAll("\"", "").split(" ")[0];

        if ( ! isSupportedSnpEffVersion(snpEffVersionString, snpEffCommandLine.getValue()) ) {
            throw new UserException(String.format("The version of SnpEff used to generate the SnpEff input file (%s) " +
                                                  "is not currently supported by the GATK, and was not run in GATK " +
                                                  "compatibility mode. Supported versions are: %s",
                                                  snpEffVersionString, supportedSnpEffVersionsString()));
        }
    }
View Full Code Here

        return rhs;
    }

    private void fail() {
        if ( exceptionToThrow.equals("UserException") ) {
            throw new UserException("UserException");
        } else if ( exceptionToThrow.equals("NullPointerException") ) {
            throw new NullPointerException();
        } else if ( exceptionToThrow.equals("ReviewedGATKException") ) {
            throw new ReviewedGATKException("ReviewedGATKException");
        } else if ( exceptionToThrow.equals("SamError1") ) {
View Full Code Here

TOP

Related Classes of org.broadinstitute.gatk.utils.exceptions.UserException$IncompatibleReadFiltersException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.