Mercurial > repos > devteam > basecoverage
comparison operation_filter.py @ 6:9c5ff4695c97 draft default tip
planemo upload for repository https://github.com/galaxyproject/tools-devteam/tree/master/tool_collections/gops/basecoverage commit 200bd4645dd768eb6ee1aab7d181b76d34d13d4c
| author | devteam |
|---|---|
| date | Mon, 13 Jun 2022 16:27:23 +0000 |
| parents | 21898544368b |
| children |
comparison
equal
deleted
inserted
replaced
| 5:21898544368b | 6:9c5ff4695c97 |
|---|---|
| 1 # runs after the job (and after the default post-filter) | 1 # runs after the job (and after the default post-filter) |
| 2 from galaxy.jobs.handler import JOB_ERROR | |
| 3 from galaxy.tools.parameters import DataToolParameter | 2 from galaxy.tools.parameters import DataToolParameter |
| 4 | 3 |
| 5 # Older py compatibility | |
| 6 try: | |
| 7 set() | |
| 8 except: | |
| 9 from sets import Set as set | |
| 10 | 4 |
| 11 | 5 def validate_input(trans, error_map, param_values, page_param_map): |
| 12 def validate_input( trans, error_map, param_values, page_param_map ): | |
| 13 dbkeys = set() | 6 dbkeys = set() |
| 14 data_param_names = set() | 7 data_param_names = set() |
| 15 data_params = 0 | 8 data_params = 0 |
| 16 for name, param in page_param_map.items(): | 9 for name, param in page_param_map.items(): |
| 17 if isinstance( param, DataToolParameter ): | 10 if isinstance(param, DataToolParameter): |
| 18 # for each dataset parameter | 11 # for each dataset parameter |
| 19 if param_values.get(name, None) is not None: | 12 if param_values.get(name, None) is not None: |
| 20 dbkeys.add( param_values[name].dbkey ) | 13 dbkeys.add(param_values[name].dbkey) |
| 21 data_params += 1 | 14 data_params += 1 |
| 22 # check meta data | 15 # check meta data |
| 23 try: | 16 try: |
| 24 param = param_values[name] | 17 param = param_values[name] |
| 25 if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ): | 18 if isinstance(param.datatype, trans.app.datatypes_registry.get_datatype_by_extension('gff').__class__): |
| 26 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval. | 19 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval. |
| 27 pass | 20 pass |
| 28 else: # Validate interval datatype. | 21 else: # Validate interval datatype. |
| 29 int( param.metadata.startCol ) | 22 int(param.metadata.startCol) |
| 30 int( param.metadata.endCol ) | 23 int(param.metadata.endCol) |
| 31 int( param.metadata.chromCol ) | 24 int(param.metadata.chromCol) |
| 32 if param.metadata.strandCol is not None: | 25 if param.metadata.strandCol is not None: |
| 33 int( param.metadata.strandCol ) | 26 int(param.metadata.strandCol) |
| 34 except: | 27 except Exception: |
| 35 error_msg = "The attributes of this dataset are not properly set. " + \ | 28 error_msg = "The attributes of this dataset are not properly set. " + \ |
| 36 "Click the pencil icon in the history item to set the chrom, start, end and strand columns." | 29 "Click the pencil icon in the history item to set the chrom, start, end and strand columns." |
| 37 error_map[name] = error_msg | 30 error_map[name] = error_msg |
| 38 data_param_names.add( name ) | 31 data_param_names.add(name) |
| 39 if len( dbkeys ) > 1: | 32 if len(dbkeys) > 1: |
| 40 for name in data_param_names: | 33 for name in data_param_names: |
| 41 error_map[name] = "All datasets must belong to same genomic build, " \ | 34 error_map[name] = "All datasets must belong to same genomic build, " \ |
| 42 "this dataset is linked to build '%s'" % param_values[name].dbkey | 35 "this dataset is linked to build '%s'" % param_values[name].dbkey |
| 43 if data_params != len(data_param_names): | 36 if data_params != len(data_param_names): |
| 44 for name in data_param_names: | 37 for name in data_param_names: |
