comparison operation_filter.py @ 6:4d584cf5ced5 draft default tip

planemo upload for repository https://github.com/galaxyproject/tools-devteam/tree/master/tool_collections/gops/basecoverage commit 200bd4645dd768eb6ee1aab7d181b76d34d13d4c
author devteam
date Mon, 13 Jun 2022 16:26:51 +0000
parents 37652c34b3bf
children
comparison
equal deleted inserted replaced
5:37652c34b3bf 6:4d584cf5ced5
1 # runs after the job (and after the default post-filter) 1 # runs after the job (and after the default post-filter)
2 from galaxy.jobs.handler import JOB_ERROR
3 from galaxy.tools.parameters import DataToolParameter 2 from galaxy.tools.parameters import DataToolParameter
4 3
5 # Older py compatibility
6 try:
7 set()
8 except:
9 from sets import Set as set
10 4
11 5 def validate_input(trans, error_map, param_values, page_param_map):
12 def validate_input( trans, error_map, param_values, page_param_map ):
13 dbkeys = set() 6 dbkeys = set()
14 data_param_names = set() 7 data_param_names = set()
15 data_params = 0 8 data_params = 0
16 for name, param in page_param_map.items(): 9 for name, param in page_param_map.items():
17 if isinstance( param, DataToolParameter ): 10 if isinstance(param, DataToolParameter):
18 # for each dataset parameter 11 # for each dataset parameter
19 if param_values.get(name, None) is not None: 12 if param_values.get(name, None) is not None:
20 dbkeys.add( param_values[name].dbkey ) 13 dbkeys.add(param_values[name].dbkey)
21 data_params += 1 14 data_params += 1
22 # check meta data 15 # check meta data
23 try: 16 try:
24 param = param_values[name] 17 param = param_values[name]
25 if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ): 18 if isinstance(param.datatype, trans.app.datatypes_registry.get_datatype_by_extension('gff').__class__):
26 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval. 19 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval.
27 pass 20 pass
28 else: # Validate interval datatype. 21 else: # Validate interval datatype.
29 int( param.metadata.startCol ) 22 int(param.metadata.startCol)
30 int( param.metadata.endCol ) 23 int(param.metadata.endCol)
31 int( param.metadata.chromCol ) 24 int(param.metadata.chromCol)
32 if param.metadata.strandCol is not None: 25 if param.metadata.strandCol is not None:
33 int( param.metadata.strandCol ) 26 int(param.metadata.strandCol)
34 except: 27 except Exception:
35 error_msg = "The attributes of this dataset are not properly set. " + \ 28 error_msg = "The attributes of this dataset are not properly set. " + \
36 "Click the pencil icon in the history item to set the chrom, start, end and strand columns." 29 "Click the pencil icon in the history item to set the chrom, start, end and strand columns."
37 error_map[name] = error_msg 30 error_map[name] = error_msg
38 data_param_names.add( name ) 31 data_param_names.add(name)
39 if len( dbkeys ) > 1: 32 if len(dbkeys) > 1:
40 for name in data_param_names: 33 for name in data_param_names:
41 error_map[name] = "All datasets must belong to same genomic build, " \ 34 error_map[name] = "All datasets must belong to same genomic build, " \
42 "this dataset is linked to build '%s'" % param_values[name].dbkey 35 "this dataset is linked to build '%s'" % param_values[name].dbkey
43 if data_params != len(data_param_names): 36 if data_params != len(data_param_names):
44 for name in data_param_names: 37 for name in data_param_names:
45 error_map[name] = "A dataset of the appropriate type is required" 38 error_map[name] = "A dataset of the appropriate type is required"
46 39
47 40
48 # Commented out by INS, 5/30/2007. What is the PURPOSE of this?
49 def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
50 """Verify the output data after each run"""
51 for data in out_data.values():
52 try:
53 if stderr and len( stderr ) > 0:
54 raise Exception( stderr )
55 except Exception:
56 data.blurb = JOB_ERROR
57 data.state = JOB_ERROR
58
59
60 def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): 41 def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
61 exec_after_process(
62 app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
63
64 # strip strand column if clusters were merged 42 # strip strand column if clusters were merged
65 for data in out_data.values(): 43 for data in out_data.values():
66 if param_dict['returntype'] is True: 44 if param_dict['returntype'] is True:
67 data.metadata.chromCol = 1 45 data.metadata.chromCol = 1
68 data.metadata.startCol = 2 46 data.metadata.startCol = 2
70 # merge always clobbers strand 48 # merge always clobbers strand
71 data.metadata.strandCol = None 49 data.metadata.strandCol = None
72 50
73 51
74 def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): 52 def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
75 exec_after_process(
76 app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
77
78 # strip strand column if clusters were merged 53 # strip strand column if clusters were merged
79 if param_dict["returntype"] == '1': 54 if param_dict["returntype"] == '1':
80 for data in out_data.values(): 55 for data in out_data.values():
81 data.metadata.strandCol = None 56 data.metadata.strandCol = None