Repository 'basecoverage'
hg clone https://toolshed.g2.bx.psu.edu/repos/devteam/basecoverage

Changeset 6:9c5ff4695c97 (2022-06-13)
Previous changeset 5:21898544368b (2022-06-13)
Commit message:
planemo upload for repository https://github.com/galaxyproject/tools-devteam/tree/master/tool_collections/gops/basecoverage commit 200bd4645dd768eb6ee1aab7d181b76d34d13d4c
modified:
gops_basecoverage.py
operation_filter.py
b
diff -r 21898544368b -r 9c5ff4695c97 gops_basecoverage.py
--- a/gops_basecoverage.py Mon Jun 13 16:20:48 2022 +0000
+++ b/gops_basecoverage.py Mon Jun 13 16:27:23 2022 +0000
[
@@ -8,7 +8,6 @@
 from __future__ import print_function
 
 import fileinput
-import sys
 
 from bx.cookbook import doc_optparse
 from bx.intervals.io import NiceReaderWrapper
@@ -16,33 +15,33 @@
 from bx.tabular.io import ParseError
 from galaxy.tools.util.galaxyops import fail, parse_cols_arg, skipped
 
-assert sys.version_info[:2] >= ( 2, 4 )
-
 
 def main():
-    options, args = doc_optparse.parse( __doc__ )
+    options, args = doc_optparse.parse(__doc__)
     try:
-        chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg( options.cols1 )
+        chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg(options.cols1)
         in_fname, out_fname = args
-    except:
+    except Exception:
         doc_optparse.exception()
 
-    g1 = NiceReaderWrapper( fileinput.FileInput( in_fname ),
-                            chrom_col=chr_col_1,
-                            start_col=start_col_1,
-                            end_col=end_col_1,
-                            strand_col=strand_col_1,
-                            fix_strand=True )
+    g1 = NiceReaderWrapper(
+        fileinput.FileInput(in_fname),
+        chrom_col=chr_col_1,
+        start_col=start_col_1,
+        end_col=end_col_1,
+        strand_col=strand_col_1,
+        fix_strand=True
+    )
 
     try:
         bases = base_coverage(g1)
     except ParseError as exc:
-        fail( "Invalid file format: %s" % str( exc ) )
-    out_file = open( out_fname, "w" )
-    out_file.write( "%s\n" % str( bases ) )
+        fail("Invalid file format: %s" % str(exc))
+    out_file = open(out_fname, "w")
+    out_file.write("%s\n" % str(bases))
     out_file.close()
     if g1.skipped > 0:
-        print(skipped( g1, filedesc="" ))
+        print(skipped(g1, filedesc=""))
 
 
 if __name__ == "__main__":
b
diff -r 21898544368b -r 9c5ff4695c97 operation_filter.py
--- a/operation_filter.py Mon Jun 13 16:20:48 2022 +0000
+++ b/operation_filter.py Mon Jun 13 16:27:23 2022 +0000
[
@@ -1,42 +1,35 @@
 # runs after the job (and after the default post-filter)
-from galaxy.jobs.handler import JOB_ERROR
 from galaxy.tools.parameters import DataToolParameter
 
-# Older py compatibility
-try:
-    set()
-except:
-    from sets import Set as set
 
-
-def validate_input( trans, error_map, param_values, page_param_map ):
+def validate_input(trans, error_map, param_values, page_param_map):
     dbkeys = set()
     data_param_names = set()
     data_params = 0
     for name, param in page_param_map.items():
-        if isinstance( param, DataToolParameter ):
+        if isinstance(param, DataToolParameter):
             # for each dataset parameter
             if param_values.get(name, None) is not None:
-                dbkeys.add( param_values[name].dbkey )
+                dbkeys.add(param_values[name].dbkey)
                 data_params += 1
                 # check meta data
                 try:
                     param = param_values[name]
-                    if isinstance( param.datatype, trans.app.datatypes_registry.get_datatype_by_extension( 'gff' ).__class__ ):
+                    if isinstance(param.datatype, trans.app.datatypes_registry.get_datatype_by_extension('gff').__class__):
                         # TODO: currently cannot validate GFF inputs b/c they are not derived from interval.
                         pass
                     else:  # Validate interval datatype.
-                        int( param.metadata.startCol )
-                        int( param.metadata.endCol )
-                        int( param.metadata.chromCol )
+                        int(param.metadata.startCol)
+                        int(param.metadata.endCol)
+                        int(param.metadata.chromCol)
                         if param.metadata.strandCol is not None:
-                            int( param.metadata.strandCol )
-                except:
+                            int(param.metadata.strandCol)
+                except Exception:
                     error_msg = "The attributes of this dataset are not properly set. " + \
                         "Click the pencil icon in the history item to set the chrom, start, end and strand columns."
                     error_map[name] = error_msg
-            data_param_names.add( name )
-    if len( dbkeys ) > 1:
+            data_param_names.add(name)
+    if len(dbkeys) > 1:
         for name in data_param_names:
             error_map[name] = "All datasets must belong to same genomic build, " \
                 "this dataset is linked to build '%s'" % param_values[name].dbkey