Mercurial > repos > devteam > concat
annotate operation_filter.py @ 2:23abefbed3dd draft
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
| author | devteam |
|---|---|
| date | Tue, 13 Oct 2015 12:49:47 -0400 |
| parents | f256537913a1 |
| children | 9a8f22b1a3ed |
| rev | line source |
|---|---|
| 0 | 1 # runs after the job (and after the default post-filter) |
| 2 from galaxy.tools.parameters import DataToolParameter | |
| 3 | |
| 4 from galaxy.jobs.handler import JOB_ERROR | |
| 5 | |
| 6 # Older py compatibility | |
| 7 try: | |
| 8 set() | |
| 9 except: | |
| 10 from sets import Set as set | |
| 11 | |
| 12 | |
| 13 def validate_input( trans, error_map, param_values, page_param_map ): | |
| 14 dbkeys = set() | |
| 15 data_param_names = set() | |
| 16 data_params = 0 | |
| 17 for name, param in page_param_map.iteritems(): | |
| 18 if isinstance( param, DataToolParameter ): | |
| 19 # for each dataset parameter | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
20 if param_values.get(name, None) is not None: |
| 0 | 21 dbkeys.add( param_values[name].dbkey ) |
| 22 data_params += 1 | |
| 23 # check meta data | |
| 24 try: | |
| 25 param = param_values[name] | |
| 26 if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ): | |
| 27 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval. | |
| 28 pass | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
29 else: # Validate interval datatype. |
|
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
30 int( param.metadata.startCol ) |
|
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
31 int( param.metadata.endCol ) |
|
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
32 int( param.metadata.chromCol ) |
| 0 | 33 if param.metadata.strandCol is not None: |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
34 int( param.metadata.strandCol ) |
| 0 | 35 except: |
| 36 error_msg = "The attributes of this dataset are not properly set. " + \ | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
37 "Click the pencil icon in the history item to set the chrom, start, end and strand columns." |
| 0 | 38 error_map[name] = error_msg |
| 39 data_param_names.add( name ) | |
| 40 if len( dbkeys ) > 1: | |
| 41 for name in data_param_names: | |
| 42 error_map[name] = "All datasets must belong to same genomic build, " \ | |
| 43 "this dataset is linked to build '%s'" % param_values[name].dbkey | |
| 44 if data_params != len(data_param_names): | |
| 45 for name in data_param_names: | |
| 46 error_map[name] = "A dataset of the appropriate type is required" | |
| 47 | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
48 |
| 0 | 49 # Commented out by INS, 5/30/2007. What is the PURPOSE of this? |
| 50 def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): | |
| 51 """Verify the output data after each run""" | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
52 for data in out_data.values(): |
| 0 | 53 try: |
| 54 if stderr and len( stderr ) > 0: | |
| 55 raise Exception( stderr ) | |
| 56 | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
57 except Exception: |
| 0 | 58 data.blurb = JOB_ERROR |
| 59 data.state = JOB_ERROR | |
| 60 | |
| 61 | |
| 62 def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): | |
| 63 exec_after_process( | |
| 64 app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr) | |
| 65 | |
| 66 # strip strand column if clusters were merged | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
67 for data in out_data.values(): |
|
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
68 if param_dict['returntype'] is True: |
| 0 | 69 data.metadata.chromCol = 1 |
| 70 data.metadata.startCol = 2 | |
| 71 data.metadata.endCol = 3 | |
| 72 # merge always clobbers strand | |
| 73 data.metadata.strandCol = None | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
74 |
| 0 | 75 |
| 76 def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): | |
| 77 exec_after_process( | |
| 78 app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr) | |
| 79 | |
| 80 # strip strand column if clusters were merged | |
| 81 if param_dict["returntype"] == '1': | |
|
2
23abefbed3dd
planemo upload commit 33927a87ba2eee9bf0ecdd376a66241b17b3d734
devteam
parents:
0
diff
changeset
|
82 for data in out_data.values(): |
| 0 | 83 data.metadata.strandCol = None |
