Mercurial > repos > jjohnson > gatk2
view reduce_reads.xml @ 28:6ef8eb568700 draft
Move GATK tool_dependency to a repository dependency, use 2 env vars: GATK2_NUM_THREADS and GATK2_NUM_CPU_THREADS for site threading values
author | Jim Johnson <jj@umn.edu> |
---|---|
date | Mon, 18 Feb 2013 15:48:46 -0600 |
parents | 7533db8dfb5b |
children | 76f8ca47b810 |
line wrap: on
line source
<tool id="gatk2_reduce_reads" name="Reduce Reads" version="0.0.5"> <description>in BAM files</description> <requirements> <requirement type="package" version="2.3">gatk</requirement> <requirement type="package" version="0.1.18">samtools</requirement> </requirements> <command interpreter="python">gatk2_wrapper.py --max_jvm_heap_fraction "1" --stdout "${output_log}" -d "-I" "${reference_source.input_bam}" "${reference_source.input_bam.ext}" "gatk_input" #if str( $reference_source.input_bam.metadata.bam_index ) != "None": -d "" "${reference_source.input_bam.metadata.bam_index}" "bam_index" "gatk_input" ##hardcode galaxy ext type as bam_index #end if -p 'java -jar "\$GATK2_PATH/GenomeAnalysisTK.jar" -T "ReduceReads" -o "${output_bam}" ## \$GATK2_SITE_OPTIONS ## \$GATK2_NUM_THREADS ##-et "NO_ET" -K "/data/galaxy/appList/GenomeAnalysisTK-2.0-36-gf5c1c1a/gatk2_key_file" ##ET no phone home ##--num_threads 4 ##not supported yet ##-log "${output_log}" ##don't use this to log to file, instead directly capture stdout #if $reference_source.reference_source_selector != "history": -R "${reference_source.ref_file.fields.path}" #end if #if str($input_recal) != 'None': --BQSR "${input_recal}" #end if --disable_bam_indexing ' ##start standard gatk options #if $gatk_param_type.gatk_param_type_selector == "advanced": #for $pedigree in $gatk_param_type.pedigree: -p '--pedigree "${pedigree.pedigree_file}"' #end for #for $pedigree_string in $gatk_param_type.pedigree_string_repeat: -p '--pedigreeString "${pedigree_string.pedigree_string}"' #end for -p '--pedigreeValidationType "${gatk_param_type.pedigree_validation_type}"' #for $read_filter in $gatk_param_type.read_filter: -p '--read_filter "${read_filter.read_filter_type.read_filter_type_selector}" ###raise Exception( str( dir( $read_filter ) ) ) #for $name, $param in $read_filter.read_filter_type.iteritems(): #if $name not in [ "__current_case__", "read_filter_type_selector" ]: #if hasattr( $param.input, 'truevalue' ): ${param} #else: --${name} "${param}" #end if #end if #end for ' #end for #for $interval_count, $input_intervals in enumerate( $gatk_param_type.input_interval_repeat ): -d "--intervals" "${input_intervals.input_intervals}" "${input_intervals.input_intervals.ext}" "input_intervals_${interval_count}" #end for #for $interval_count, $input_intervals in enumerate( $gatk_param_type.input_exclude_interval_repeat ): -d "--excludeIntervals" "${input_intervals.input_exclude_intervals}" "${input_intervals.input_exclude_intervals.ext}" "input_exlude_intervals_${interval_count}" #end for -p '--interval_set_rule "${gatk_param_type.interval_set_rule}"' -p '--downsampling_type "${gatk_param_type.downsampling_type.downsampling_type_selector}"' #if str( $gatk_param_type.downsampling_type.downsampling_type_selector ) != "NONE": -p '--${gatk_param_type.downsampling_type.downsample_to_type.downsample_to_type_selector} "${gatk_param_type.downsampling_type.downsample_to_type.downsample_to_value}"' #end if -p ' --baq "${gatk_param_type.baq}" --baqGapOpenPenalty "${gatk_param_type.baq_gap_open_penalty}" ${gatk_param_type.use_original_qualities} --defaultBaseQualities "${gatk_param_type.default_base_qualities}" --validation_strictness "${gatk_param_type.validation_strictness}" --interval_merging "${gatk_param_type.interval_merging}" ${gatk_param_type.disable_experimental_low_memory_sharding} ${gatk_param_type.fix_misencoded_quality_scores} ${gatk_param_type.non_deterministic_random_seed} ' #for $rg_black_list_count, $rg_black_list in enumerate( $gatk_param_type.read_group_black_list_repeat ): #if $rg_black_list.read_group_black_list_type.read_group_black_list_type_selector == "file": -d "--read_group_black_list" "${rg_black_list.read_group_black_list_type.read_group_black_list}" "txt" "input_read_group_black_list_${rg_black_list_count}" #else -p '--read_group_black_list "${rg_black_list.read_group_black_list_type.read_group_black_list}"' #end if #end for #end if #if str( $reference_source.reference_source_selector ) == "history": -d "-R" "${reference_source.ref_file}" "${reference_source.ref_file.ext}" "gatk_input" #end if ##end standard gatk options ##start analysis specific options #if $analysis_param_type.analysis_param_type_selector == "advanced": -p ' #if $analysis_param_type.context_size.__str__.strip() != '': --context_size $analysis_param_type.context_size #end if #if $analysis_param_type.downsample_coverage.__str__.strip() != '': --downsample_coverage $analysis_param_type.downsample_coverage #end if #if $analysis_param_type.minimum_del_proportion_to_trigger_variant.__str__.strip() != '': --minimum_del_proportion_to_trigger_variant $analysis_param_type.minimum_del_proportion_to_trigger_variant #end if #if $analysis_param_type.minimum_mapping_quality.__str__.strip() != '': --minimum_mapping_quality $analysis_param_type.minimum_mapping_quality #end if #if $analysis_param_type.minimum_tail_qualities.__str__.strip() != '': --minimum_tail_qualities $analysis_param_type.minimum_tail_qualities #end if #if $analysis_param_type.minimum_base_quality_to_consider.__str__.strip() != '': --minimum_base_quality_to_consider $analysis_param_type.minimum_base_quality_to_consider #end if #if $analysis_param_type.minimum_alt_proportion_to_trigger_variant.__str__.strip() != '': --minimum_alt_proportion_to_trigger_variant $analysis_param_type.minimum_alt_proportion_to_trigger_variant #end if $analysis_param_type.allow_polyploid_reduction $analysis_param_type.dont_compress_read_names $analysis_param_type.dont_hardclip_low_qual_tails $analysis_param_type.dont_simplify_reads $analysis_param_type.dont_use_softclipped_bases $analysis_param_type.hard_clip_to_interval $analysis_param_type.dont_hardclip_adaptor_sequences ' #end if </command> <inputs> <param name="input_recal" type="data" format="csv" optional="true" label="Covariates table recalibration file" help="-BQSR,--BQSR &lt;recal_file&gt;" > <help>The input covariates table file which enables on-the-fly base quality score recalibration. Enables on-the-fly recalibrate of base qualities. The covariates tables are produced by the BaseQualityScoreRecalibrator tool. Please be aware that one should only run recalibration with the covariates file created on the same input bam(s). </help> </param> <conditional name="reference_source"> <param name="reference_source_selector" type="select" label="Choose the source for the reference list"> <option value="cached">Locally cached</option> <option value="history">History</option> </param> <when value="cached"> <param name="input_bam" type="data" format="bam" label="BAM file" help="-I,--input_file &lt;input_file&gt;"> <validator type="unspecified_build" /> <validator type="dataset_metadata_in_data_table" table_name="gatk2_picard_indexes" metadata_name="dbkey" metadata_column="dbkey" message="Sequences are not currently available for the specified build." /> <!-- fixme!!! this needs to be a select --> </param> <param name="ref_file" type="select" label="Using reference genome" help="-R,--reference_sequence &lt;reference_sequence&gt;" > <options from_data_table="gatk2_picard_indexes"> <filter type="data_meta" key="dbkey" ref="input_bam" column="dbkey"/> </options> <validator type="no_options" message="A built-in reference genome is not available for the build associated with the selected input file"/> </param> </when> <when value="history"> <param name="input_bam" type="data" format="bam" label="BAM file" help="-I,--input_file &lt;input_file&gt;" /> <param name="ref_file" type="data" format="fasta" label="Using reference file" help="-R,--reference_sequence &lt;reference_sequence&gt;"> <options> <filter type="data_meta" key="dbkey" ref="input_bam" /> </options> </param> </when> </conditional> <conditional name="gatk_param_type"> <param name="gatk_param_type_selector" type="select" label="Basic or Advanced GATK options"> <option value="basic" selected="True">Basic</option> <option value="advanced">Advanced</option> </param> <when value="basic"> <!-- Do nothing here --> </when> <when value="advanced"> <repeat name="pedigree" title="Pedigree file" help="-ped,--pedigree &lt;pedigree&gt;"> <param name="pedigree_file" type="data" format="txt" label="Pedigree files for samples"/> </repeat> <repeat name="pedigree_string_repeat" title="Pedigree string" help="-pedString,--pedigreeString &lt;pedigreeString&gt;"> <param name="pedigree_string" type="text" value="" label="Pedigree string for samples"/> </repeat> <param name="pedigree_validation_type" type="select" label="How strict should we be in validating the pedigree information" help="-pedValidationType,--pedigreeValidationType &lt;pedigreeValidationType&gt;"> <option value="STRICT" selected="True">STRICT</option> <option value="SILENT">SILENT</option> </param> <repeat name="read_filter" title="Read Filter" help="-rf,--read_filter &lt;read_filter&gt;"> <conditional name="read_filter_type"> <param name="read_filter_type_selector" type="select" label="Read Filter Type"> <option value="BadCigar">BadCigar</option> <option value="BadMate">BadMate</option> <option value="DuplicateRead">DuplicateRead</option> <option value="FailsVendorQualityCheck">FailsVendorQualityCheck</option> <option value="MalformedRead">MalformedRead</option> <option value="MappingQuality">MappingQuality</option> <option value="MappingQualityUnavailable">MappingQualityUnavailable</option> <option value="MappingQualityZero">MappingQualityZero</option> <option value="MateSameStrand">MateSameStrand</option> <option value="MaxInsertSize">MaxInsertSize</option> <option value="MaxReadLength" selected="True">MaxReadLength</option> <option value="MissingReadGroup">MissingReadGroup</option> <option value="NoOriginalQualityScores">NoOriginalQualityScores</option> <option value="NotPrimaryAlignment">NotPrimaryAlignment</option> <option value="Platform454">Platform454</option> <option value="Platform">Platform</option> <option value="PlatformUnit">PlatformUnit</option> <option value="ReadGroupBlackList">ReadGroupBlackList</option> <option value="ReadName">ReadName</option> <option value="ReadStrand">ReadStrand</option> <option value="ReassignMappingQuality">ReassignMappingQuality</option> <option value="Sample">Sample</option> <option value="SingleReadGroup">SingleReadGroup</option> <option value="UnmappedRead">UnmappedRead</option> </param> <when value="BadCigar"> <!-- no extra options --> </when> <when value="BadMate"> <!-- no extra options --> </when> <when value="DuplicateRead"> <!-- no extra options --> </when> <when value="FailsVendorQualityCheck"> <!-- no extra options --> </when> <when value="MalformedRead"> <!-- no extra options --> </when> <when value="MappingQuality"> <param name="min_mapping_quality_score" type="integer" value="10" label="Minimum read mapping quality required to consider a read for calling"/> </when> <when value="MappingQualityUnavailable"> <!-- no extra options --> </when> <when value="MappingQualityZero"> <!-- no extra options --> </when> <when value="MateSameStrand"> <!-- no extra options --> </when> <when value="MaxInsertSize"> <param name="maxInsertSize" type="integer" value="1000000" label="Discard reads with insert size greater than the specified value"/> </when> <when value="MaxReadLength"> <param name="maxReadLength" type="integer" value="76" label="Max Read Length"/> </when> <when value="MissingReadGroup"> <!-- no extra options --> </when> <when value="NoOriginalQualityScores"> <!-- no extra options --> </when> <when value="NotPrimaryAlignment"> <!-- no extra options --> </when> <when value="Platform454"> <!-- no extra options --> </when> <when value="Platform"> <param name="PLFilterName" type="text" value="" label="Discard reads with RG:PL attribute containing this string"/> </when> <when value="PlatformUnit"> <!-- no extra options --> </when> <when value="ReadGroupBlackList"> <!-- no extra options --> </when> <when value="ReadName"> <param name="readName" type="text" value="" label="Filter out all reads except those with this read name"/> </when> <when value="ReadStrand"> <param name="filterPositive" type="boolean" truevalue="--filterPositive" falsevalue="" label="Discard reads on the forward strand"/> </when> <when value="ReassignMappingQuality"> <param name="default_mapping_quality" type="integer" value="60" label="Default read mapping quality to assign to all reads"/> </when> <when value="Sample"> <param name="sample_to_keep" type="text" value="" label="The name of the sample(s) to keep, filtering out all others"/> </when> <when value="SingleReadGroup"> <param name="read_group_to_keep" type="integer" value="76" label="The name of the read group to keep, filtering out all others"/> </when> <when value="UnmappedRead"> <!-- no extra options --> </when> </conditional> </repeat> <repeat name="input_interval_repeat" title="Operate on Genomic intervals" help="-L,--intervals &lt;intervals&gt;"> <param name="input_intervals" type="data" format="bed,gatk_interval,picard_interval_list,vcf" label="Genomic intervals" /> </repeat> <repeat name="input_exclude_interval_repeat" title="Exclude Genomic intervals" help="-XL,--excludeIntervals &lt;excludeIntervals&gt;"> <param name="input_exclude_intervals" type="data" format="bed,gatk_interval,picard_interval_list,vcf" label="Genomic intervals" /> </repeat> <param name="interval_set_rule" type="select" label="Interval set rule" help="-isr,--interval_set_rule &lt;interval_set_rule&gt;"> <option value="UNION" selected="True">UNION</option> <option value="INTERSECTION">INTERSECTION</option> </param> <conditional name="downsampling_type"> <param name="downsampling_type_selector" type="select" label="Type of reads downsampling to employ at a given locus" help="-dt,--downsampling_type &lt;downsampling_type&gt;"> <option value="NONE" selected="True">NONE</option> <option value="ALL_READS">ALL_READS</option> <option value="BY_SAMPLE">BY_SAMPLE</option> </param> <when value="NONE"> <!-- no more options here --> </when> <when value="ALL_READS"> <conditional name="downsample_to_type"> <param name="downsample_to_type_selector" type="select" label="Downsample method"> <option value="downsample_to_fraction" selected="True">Downsample by Fraction</option> <option value="downsample_to_coverage">Downsample by Coverage</option> </param> <when value="downsample_to_fraction"> <param name="downsample_to_value" type="float" label="Fraction [0.0-1.0] of reads to downsample to" value="1" min="0" max="1" help="-dfrac,--downsample_to_fraction &lt;downsample_to_fraction&gt;"/> </when> <when value="downsample_to_coverage"> <param name="downsample_to_value" type="integer" label="Coverage to downsample to at any given locus" value="0" help="-dcov,--downsample_to_coverage &lt;downsample_to_coverage&gt;"/> </when> </conditional> </when> <when value="BY_SAMPLE"> <conditional name="downsample_to_type"> <param name="downsample_to_type_selector" type="select" label="Downsample method"> <option value="downsample_to_fraction" selected="True">Downsample by Fraction</option> <option value="downsample_to_coverage">Downsample by Coverage</option> </param> <when value="downsample_to_fraction"> <param name="downsample_to_value" type="float" label="Fraction [0.0-1.0] of reads to downsample to" value="1" min="0" max="1" help="-dfrac,--downsample_to_fraction &lt;downsample_to_fraction&gt;"/> </when> <when value="downsample_to_coverage"> <param name="downsample_to_value" type="integer" label="Coverage to downsample to at any given locus" value="0" help="-dcov,--downsample_to_coverage &lt;downsample_to_coverage&gt;"/> </when> </conditional> </when> </conditional> <param name="baq" type="select" label="Type of BAQ calculation to apply in the engine" help="-baq,--baq &lt;baq&gt;"> <option value="OFF" selected="True">OFF</option> <option value="CALCULATE_AS_NECESSARY">CALCULATE_AS_NECESSARY</option> <option value="RECALCULATE">RECALCULATE</option> </param> <param name="baq_gap_open_penalty" type="float" label="BAQ gap open penalty (Phred Scaled)" value="40" help="Default value is 40. 30 is perhaps better for whole genome call sets. -baqGOP,--baqGapOpenPenalty &lt;baqGapOpenPenalty&gt;" /> <param name="use_original_qualities" type="boolean" truevalue="--useOriginalQualities" falsevalue="" label="Use the original base quality scores from the OQ tag" help="-OQ,--useOriginalQualities" /> <param name="default_base_qualities" type="integer" label="Value to be used for all base quality scores, when some are missing" value="-1" help="-DBQ,--defaultBaseQualities &lt;defaultBaseQualities&gt;"/> <param name="validation_strictness" type="select" label="How strict should we be with validation" help="-S,--validation_strictness &lt;validation_strictness&gt;"> <option value="STRICT" selected="True">STRICT</option> <option value="LENIENT">LENIENT</option> <option value="SILENT">SILENT</option> <!-- <option value="DEFAULT_STRINGENCY">DEFAULT_STRINGENCY</option> listed in docs, but not valid value...--> </param> <param name="interval_merging" type="select" label="Interval merging rule" help="-im,--interval_merging &lt;interval_merging&gt;"> <option value="ALL" selected="True">ALL</option> <option value="OVERLAPPING_ONLY">OVERLAPPING_ONLY</option> </param> <repeat name="read_group_black_list_repeat" title="Read group black list" help="-rgbl,--read_group_black_list &lt;read_group_black_list&gt;"> <conditional name="read_group_black_list_type"> <param name="read_group_black_list_type_selector" type="select" label="Type of reads read group black list"> <option value="file" selected="True">Filters in file</option> <option value="text">Specify filters as a string</option> </param> <when value="file"> <param name="read_group_black_list" type="data" format="txt" label="Read group black list file" /> </when> <when value="text"> <param name="read_group_black_list" type="text" value="tag:string" label="Read group black list tag:string" /> </when> </conditional> </repeat> <param name="disable_experimental_low_memory_sharding" type="boolean" truevalue="--disable_experimental_low_memory_sharding" falsevalue="" label="Disable experimental low-memory sharding functionality." checked="False" help="--disable_experimental_low_memory_sharding"/> <param name="non_deterministic_random_seed" type="boolean" truevalue="--nonDeterministicRandomSeed" falsevalue="" label="Makes the GATK behave non deterministically, that is, the random numbers generated will be different in every run" checked="False" help="-ndrs,--nonDeterministicRandomSeed"/> <param name="fix_misencoded_quality_scores" type="boolean" truevalue="--fix_misencoded_quality_scores" falsevalue="" label="Fix mis-encoded base quality scores. Q0 == ASCII 33 according to the SAM specification, whereas Illumina encoding starts at Q64. The idea here is simple: we just iterate over all reads and subtract 31 from every quality score." checked="False" help="-fixMisencodedQuals / --fix_misencoded_quality_scores"/> </when> </conditional> <conditional name="analysis_param_type"> <param name="analysis_param_type_selector" type="select" label="Basic or Advanced Analysis options"> <option value="basic" selected="True">Basic</option> <option value="advanced">Advanced</option> </param> <when value="basic"> <!-- Do nothing here --> </when> <when value="advanced"> <param name="allow_polyploid_reduction" type="boolean" checked="False" truevalue="-polyploid" falsevalue="" label="Allow polyploid-based reduction" help="--allow_polyploid_reduction / -polyploid Allow the experimental polyploid-based reduction capabilities"/> <param name="context_size" type="integer" value="10" optional="true" label="context_size" help="The number of bases to keep around mismatches (potential variation)"> </param> <param name="dont_compress_read_names" type="boolean" checked="False" truevalue="-nocmp_names" falsevalue="" label="Do not compress read names." help="--dont_compress_read_names / -nocmp_names By default, ReduceReads will compress read names to numbers and guarantee uniqueness and reads with similar name will still have similar compressed names. Note: If you scatter/gather there is no guarantee that read name uniqueness will be maintained -- in this case we recommend not compressing."/> <param name="dont_hardclip_low_qual_tails" type="boolean" checked="False" truevalue="-noclip_tail" falsevalue="" label="Do not hard clip the low quality tails of the reads" help="--dont_hardclip_low_qual_tails / -noclip_tail This option overrides the argument of minimum tail quality"/> <param name="dont_simplify_reads" type="boolean" checked="False" truevalue="-nosimplify" falsevalue="" label="Do not simplify read" help="--dont_simplify_reads / -nosimplify Do not simplify read (strip away all extra information of the read -- anything other than bases, quals and read group)."/> <param name="dont_use_softclipped_bases" type="boolean" checked="False" truevalue="-no_soft" falsevalue="" label="Do not use high quality soft-clipped bases" help="--dont_use_softclipped_bases / -no_soft Do not use high quality soft-clipped bases. By default, ReduceReads will hard clip away any low quality soft clipped base left by the aligner and use the high quality soft clipped bases in it's traversal algorithm to identify variant regions. The minimum quality for soft clipped bases is the same as the minimum base quality to consider (minqual)"/> <param name="downsample_coverage" type="integer" value="250" optional="true" label="Downsample the coverage of a variable region" help="Downsamples the coverage of a variable region approximately (guarantees the minimum to be equal to this). A value of 0 turns downsampling off."> </param> <param name="hard_clip_to_interval" type="boolean" checked="False" truevalue="-clip_int" falsevalue="" label="Hard clip all incoming reads" help="--hard_clip_to_interval / -clip_int Optionally hard clip all incoming reads to the desired intervals. The hard clips will happen exactly at the interval border."/> <param name="minimum_del_proportion_to_trigger_variant" type="float" value="0.05" optional="true" label="Minimum proportion of indels in a site to trigger a variant region" help="--minimum_del_proportion_to_trigger_variant / -mindel Minimum proportion of indels in a site to trigger a variant region. Anything below this will be considered consensus. "> </param> <param name="minimum_mapping_quality" type="integer" value="20" optional="true" label="Minimum mapping quality for consensus read" help="--minimum_mapping_quality / -minmap The minimum mapping quality to be considered for the consensus synthetic read. Reads that have mapping quality below this threshold will not be counted towards consensus, but are still counted towards variable regions."> </param> <param name="minimum_tail_qualities" type="integer" value="2" optional="true" label="Minimum tail quality" help="--minimum_tail_qualities / -mintail Reads have notoriously low quality bases on the tails (left and right). Consecutive bases with quality lower than this threshold will be hard clipped off before entering the reduce reads algorithm."> <validator type="in_range" message="value between 0 and 127" min="0" max="127"/> </param> <param name="minimum_base_quality_to_consider" type="integer" value="20" optional="true" label="Minimum mapping quality for consensus read" help="--minimum_mapping_quality / -minmap The minimum mapping quality to be considered for the consensus synthetic read. Reads that have mapping quality below this threshold will not be counted towards consensus, but are still counted towards variable regions."> <validator type="in_range" message="value between 0 and 127" min="0" max="127"/> </param> <param name="minimum_alt_proportion_to_trigger_variant" type="float" value="0.05" optional="true" label="Minimum proportion of mismatches in a site to trigger a variant region" help="--minimum_alt_proportion_to_trigger_variant / -minvar Minimum proportion of mismatches in a site to trigger a variant region. Anything below this will be considered consensus."> <validator type="in_range" message="value between 0.00 and 1.00" min="0.0" max="1.0"/> </param> <param name="dont_hardclip_adaptor_sequences" type="boolean" checked="False" truevalue="-noclip_ad" falsevalue="" label="Do not hard clip adaptor sequences" help="--dont_hardclip_adaptor_sequences / -noclip_ad Do not hard clip adaptor sequences. Note: You don't have to turn this on for reads that are not mate paired. The program will behave correctly in those cases."/> </when> </conditional> </inputs> <outputs> <data format="bam" name="output_bam" label="${tool.name} on ${on_string} (BAM)" /> <data format="txt" name="output_log" label="${tool.name} on ${on_string} (log)" /> </outputs> <tests> <test> <param name="input_recal" value="gatk/gatk_count_covariates/gatk_count_covariates_out_1.csv" ftype="csv" /> <param name="reference_source_selector" value="history" /> <param name="ref_file" value="phiX.fasta" ftype="fasta" /> <param name="input_bam" value="gatk/gatk_indel_realigner/gatk_indel_realigner_out_1.bam" ftype="bam" /> <param name="gatk_param_type_selector" value="basic" /> <param name="analysis_param_type_selector" value="basic" /> <output name="output_bam" file="gatk/gatk_table_recalibration/gatk_table_recalibration_out_1.bam" ftype="bam" lines_diff="4" /> <output name="output_log" file="gatk/gatk_table_recalibration/gatk_table_recalibration_out_1.log.contains" compare="contains" /> </test> </tests> <help> **What it does** ReduceReads Reduces the BAM file using read based compression that keeps only essential information for variant calling This walker will generated reduced versions of the BAM files that still follow the BAM spec and contain all the information necessary for the GSA variant calling pipeline. Some options allow you to tune in how much compression you want to achieve. The default values have been shown to reduce a typical whole exome BAM file 100x. The higher the coverage, the bigger the savings in file size and performance of the downstream tools. For more information on using read based compression in the GATK, see this `tool specific page <http://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_sting_gatk_walkers_compression_reducereads_ReduceReads.html>`_. To learn about best practices for variant detection using GATK, see this `overview <http://www.broadinstitute.org/gatk/guide/topic?name=best-practices>`_. If you encounter errors, please view the `GATK FAQ <http://www.broadinstitute.org/gatk/guide/topic?name=faqs>`_. ------ **Inputs** GenomeAnalysisTK: PrintReads accepts an aligned BAM and a recalibration CSV input files. **Outputs** The output is in BAM format. Go `here <http://www.broadinstitute.org/gatk/guide/topic?name=intro>`_ for details on GATK file formats. ------- **Settings**:: --allow_polyploid_reduction / -polyploid ( boolean with default value false ) Allow the experimental polyploid-based reduction capabilities of this tool --context_size / -cs ( int with default value 10 ) The number of bases to keep around mismatches (potential variation) --dont_compress_read_names / -nocmp_names ( boolean with default value false ) Do not compress read names. By default, ReduceReads will compress read names to numbers and guarantee uniqueness and reads with similar name will still have similar compressed names. Note: If you scatter/gather there is no guarantee that read name uniqueness will be maintained -- in this case we recommend not compressing. --dont_hardclip_low_qual_tails / -noclip_tail ( boolean with default value false ) Do not hard clip the low quality tails of the reads. This option overrides the argument of minimum tail quality. --dont_simplify_reads / -nosimplify ( boolean with default value false ) Do not simplify read (strip away all extra information of the read -- anything other than bases, quals and read group). --dont_use_softclipped_bases / -no_soft ( boolean with default value false ) Do not use high quality soft-clipped bases. By default, ReduceReads will hard clip away any low quality soft clipped base left by the aligner and use the high quality soft clipped bases in it's traversal algorithm to identify variant regions. The minimum quality for soft clipped bases is the same as the minimum base quality to consider (minqual) --downsample_coverage / -ds ( int with default value 250 ) Downsamples the coverage of a variable region approximately (guarantees the minimum to be equal to this). A value of 0 turns downsampling off. --hard_clip_to_interval / -clip_int ( boolean with default value false ) Optionally hard clip all incoming reads to the desired intervals. The hard clips will happen exactly at the interval border. -mindel / --minimum_del_proportion_to_trigger_variant ( double with default value 0.05 ) Minimum proportion of indels in a site to trigger a variant region. Anything below this will be considered consensus. --minimum_mapping_quality / -minmap ( int with default value 20 ) The minimum mapping quality to be considered for the consensus synthetic read. Reads that have mapping quality below this threshold will not be counted towards consensus, but are still counted towards variable regions. --minimum_tail_qualities / -mintail ( byte with default value 2 ) Reads have notoriously low quality bases on the tails (left and right). Consecutive bases with quality lower than this threshold will be hard clipped off before entering the reduce reads algorithm. -minqual / --minimum_base_quality_to_consider ( byte with default value 20 ) The minimum base quality to be considered for the consensus synthetic read. Reads that have base quality below this threshold will not be counted towards consensus, but are still counted towards variable regions. -minvar / --minimum_alt_proportion_to_trigger_variant ( double with default value 0.05 ) Minimum proportion of mismatches in a site to trigger a variant region. Anything below this will be considered consensus. -noclip_ad / --dont_hardclip_adaptor_sequences ( boolean with default value false ) Do not hard clip adaptor sequences. Note: You don't have to turn this on for reads that are not mate paired. The program will behave correctly in those cases. ------ **Citation** For the underlying tool, please cite `DePristo MA, Banks E, Poplin R, Garimella KV, Maguire JR, Hartl C, Philippakis AA, del Angel G, Rivas MA, Hanna M, McKenna A, Fennell TJ, Kernytsky AM, Sivachenko AY, Cibulskis K, Gabriel SB, Altshuler D, Daly MJ. A framework for variation discovery and genotyping using next-generation DNA sequencing data. Nat Genet. 2011 May;43(5):491-8. <http://www.ncbi.nlm.nih.gov/pubmed/21478889>`_ Please also site `McKenna A, Hanna M, Banks E, Sivachenko A, Cibulskis K, Kernytsky A, Garimella K, Altshuler D, Gabriel S, Daly M, DePristo MA (2010). The Genome Analysis Toolkit: a MapReduce framework for analyzing next-generation DNA sequencing data. Genome Res. 20:1297-303. Epub 2010 Jul 19. <http://www.ncbi.nlm.nih.gov/pubmed/20644199>`_ If you use this tool in Galaxy, please cite `Blankenberg D, Von Kuster G, Coraor N, Ananda G, Lazarus R, Mangan M, Nekrutenko A, Taylor J. Galaxy: a web-based genome analysis tool for experimentalists. Curr Protoc Mol Biol. 2010 Jan;Chapter 19:Unit 19.10.1-21. <http://www.ncbi.nlm.nih.gov/pubmed/20069535>`_ </help> </tool>