Mercurial > repos > devteam > basecoverage
changeset 6:4d584cf5ced5 draft default tip
planemo upload for repository https://github.com/galaxyproject/tools-devteam/tree/master/tool_collections/gops/basecoverage commit 200bd4645dd768eb6ee1aab7d181b76d34d13d4c
author | devteam |
---|---|
date | Mon, 13 Jun 2022 16:26:51 +0000 |
parents | 37652c34b3bf |
children | |
files | gops_basecoverage.py operation_filter.py |
diffstat | 2 files changed, 26 insertions(+), 52 deletions(-) [+] |
line wrap: on
line diff
--- a/gops_basecoverage.py Thu Jun 22 18:37:49 2017 -0400 +++ b/gops_basecoverage.py Mon Jun 13 16:26:51 2022 +0000 @@ -8,7 +8,6 @@ from __future__ import print_function import fileinput -import sys from bx.cookbook import doc_optparse from bx.intervals.io import NiceReaderWrapper @@ -16,33 +15,33 @@ from bx.tabular.io import ParseError from galaxy.tools.util.galaxyops import fail, parse_cols_arg, skipped -assert sys.version_info[:2] >= ( 2, 4 ) - def main(): - options, args = doc_optparse.parse( __doc__ ) + options, args = doc_optparse.parse(__doc__) try: - chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg( options.cols1 ) + chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg(options.cols1) in_fname, out_fname = args - except: + except Exception: doc_optparse.exception() - g1 = NiceReaderWrapper( fileinput.FileInput( in_fname ), - chrom_col=chr_col_1, - start_col=start_col_1, - end_col=end_col_1, - strand_col=strand_col_1, - fix_strand=True ) + g1 = NiceReaderWrapper( + fileinput.FileInput(in_fname), + chrom_col=chr_col_1, + start_col=start_col_1, + end_col=end_col_1, + strand_col=strand_col_1, + fix_strand=True + ) try: bases = base_coverage(g1) except ParseError as exc: - fail( "Invalid file format: %s" % str( exc ) ) - out_file = open( out_fname, "w" ) - out_file.write( "%s\n" % str( bases ) ) + fail("Invalid file format: %s" % str(exc)) + out_file = open(out_fname, "w") + out_file.write("%s\n" % str(bases)) out_file.close() if g1.skipped > 0: - print(skipped( g1, filedesc="" )) + print(skipped(g1, filedesc="")) if __name__ == "__main__":
--- a/operation_filter.py Thu Jun 22 18:37:49 2017 -0400 +++ b/operation_filter.py Mon Jun 13 16:26:51 2022 +0000 @@ -1,42 +1,35 @@ # runs after the job (and after the default post-filter) -from galaxy.jobs.handler import JOB_ERROR from galaxy.tools.parameters import DataToolParameter -# Older py compatibility -try: - set() -except: - from sets import Set as set - -def validate_input( trans, error_map, param_values, page_param_map ): +def validate_input(trans, error_map, param_values, page_param_map): dbkeys = set() data_param_names = set() data_params = 0 for name, param in page_param_map.items(): - if isinstance( param, DataToolParameter ): + if isinstance(param, DataToolParameter): # for each dataset parameter if param_values.get(name, None) is not None: - dbkeys.add( param_values[name].dbkey ) + dbkeys.add(param_values[name].dbkey) data_params += 1 # check meta data try: param = param_values[name] - if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ): + if isinstance(param.datatype, trans.app.datatypes_registry.get_datatype_by_extension('gff').__class__): # TODO: currently cannot validate GFF inputs b/c they are not derived from interval. pass else: # Validate interval datatype. - int( param.metadata.startCol ) - int( param.metadata.endCol ) - int( param.metadata.chromCol ) + int(param.metadata.startCol) + int(param.metadata.endCol) + int(param.metadata.chromCol) if param.metadata.strandCol is not None: - int( param.metadata.strandCol ) - except: + int(param.metadata.strandCol) + except Exception: error_msg = "The attributes of this dataset are not properly set. " + \ "Click the pencil icon in the history item to set the chrom, start, end and strand columns." error_map[name] = error_msg - data_param_names.add( name ) - if len( dbkeys ) > 1: + data_param_names.add(name) + if len(dbkeys) > 1: for name in data_param_names: error_map[name] = "All datasets must belong to same genomic build, " \ "this dataset is linked to build '%s'" % param_values[name].dbkey @@ -45,22 +38,7 @@ error_map[name] = "A dataset of the appropriate type is required" -# Commented out by INS, 5/30/2007. What is the PURPOSE of this? -def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): - """Verify the output data after each run""" - for data in out_data.values(): - try: - if stderr and len( stderr ) > 0: - raise Exception( stderr ) - except Exception: - data.blurb = JOB_ERROR - data.state = JOB_ERROR - - def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): - exec_after_process( - app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr) - # strip strand column if clusters were merged for data in out_data.values(): if param_dict['returntype'] is True: @@ -72,9 +50,6 @@ def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): - exec_after_process( - app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr) - # strip strand column if clusters were merged if param_dict["returntype"] == '1': for data in out_data.values():