comparison operation_filter.py @ 0:d958d5a0d1e8

Imported from capsule None
author devteam
date Tue, 01 Apr 2014 10:51:48 -0400
parents
children e0a23ab32d7f
comparison
equal deleted inserted replaced
-1:000000000000 0:d958d5a0d1e8
1 # runs after the job (and after the default post-filter)
2 import os
3 from galaxy.tools.parameters import DataToolParameter
4
5 from galaxy.jobs.handler import JOB_ERROR
6
7 # Older py compatibility
8 try:
9 set()
10 except:
11 from sets import Set as set
12
13 #def exec_before_process(app, inp_data, out_data, param_dict, tool=None):
14 # """Sets the name of the data"""
15 # dbkeys = sets.Set( [data.dbkey for data in inp_data.values() ] )
16 # if len(dbkeys) != 1:
17 # raise Exception, '<p><font color="yellow">Both Queries must be from the same genome build</font></p>'
18
19 def validate_input( trans, error_map, param_values, page_param_map ):
20 dbkeys = set()
21 data_param_names = set()
22 data_params = 0
23 for name, param in page_param_map.iteritems():
24 if isinstance( param, DataToolParameter ):
25 # for each dataset parameter
26 if param_values.get(name, None) != None:
27 dbkeys.add( param_values[name].dbkey )
28 data_params += 1
29 # check meta data
30 try:
31 param = param_values[name]
32 if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ):
33 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval.
34 pass
35 else: # Validate interval datatype.
36 startCol = int( param.metadata.startCol )
37 endCol = int( param.metadata.endCol )
38 chromCol = int( param.metadata.chromCol )
39 if param.metadata.strandCol is not None:
40 strandCol = int ( param.metadata.strandCol )
41 else:
42 strandCol = 0
43 except:
44 error_msg = "The attributes of this dataset are not properly set. " + \
45 "Click the pencil icon in the history item to set the chrom, start, end and strand columns."
46 error_map[name] = error_msg
47 data_param_names.add( name )
48 if len( dbkeys ) > 1:
49 for name in data_param_names:
50 error_map[name] = "All datasets must belong to same genomic build, " \
51 "this dataset is linked to build '%s'" % param_values[name].dbkey
52 if data_params != len(data_param_names):
53 for name in data_param_names:
54 error_map[name] = "A dataset of the appropriate type is required"
55
56 # Commented out by INS, 5/30/2007. What is the PURPOSE of this?
57 def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
58 """Verify the output data after each run"""
59 items = out_data.items()
60
61 for name, data in items:
62 try:
63 if stderr and len( stderr ) > 0:
64 raise Exception( stderr )
65
66 except Exception, exc:
67 data.blurb = JOB_ERROR
68 data.state = JOB_ERROR
69
70 ## def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
71 ## pass
72
73
74 def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
75 exec_after_process(
76 app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
77
78 # strip strand column if clusters were merged
79 items = out_data.items()
80 for name, data in items:
81 if param_dict['returntype'] == True:
82 data.metadata.chromCol = 1
83 data.metadata.startCol = 2
84 data.metadata.endCol = 3
85 # merge always clobbers strand
86 data.metadata.strandCol = None
87
88
89 def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
90 exec_after_process(
91 app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
92
93 # strip strand column if clusters were merged
94 if param_dict["returntype"] == '1':
95 items = out_data.items()
96 for name, data in items:
97 data.metadata.strandCol = None