Repository 'coverage'
hg clone https://toolshed.g2.bx.psu.edu/repos/devteam/coverage

Changeset 4:e3363e087468 (2022-06-13)
Previous changeset 3:4ef9819dc7fb (2017-06-22)
Commit message:
planemo upload for repository https://github.com/galaxyproject/tools-devteam/tree/master/tool_collections/gops/coverage commit d7b1a60c0aecc46b7f625c3e32f882562b512fd9
modified:
coverage.xml
operation_filter.py
b
diff -r 4ef9819dc7fb -r e3363e087468 coverage.xml
--- a/coverage.xml Thu Jun 22 18:41:02 2017 -0400
+++ b/coverage.xml Mon Jun 13 16:21:57 2022 +0000
b
@@ -18,7 +18,7 @@
         <param name="input2" type="data" format="interval" label="Is covered by" help="Second dataset" />
     </inputs>
     <outputs>
-        <data name="output" format_source="input1" metadata_source="input1" />
+        <data name="output" format="interval" metadata_source="input1" />
     </outputs>
     <tests>
         <test>
b
diff -r 4ef9819dc7fb -r e3363e087468 operation_filter.py
--- a/operation_filter.py Thu Jun 22 18:41:02 2017 -0400
+++ b/operation_filter.py Mon Jun 13 16:21:57 2022 +0000
[
@@ -45,22 +45,7 @@
             error_map[name] = "A dataset of the appropriate type is required"
 
 
-# Commented out by INS, 5/30/2007.  What is the PURPOSE of this?
-def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
-    """Verify the output data after each run"""
-    for data in out_data.values():
-        try:
-            if stderr and len( stderr ) > 0:
-                raise Exception( stderr )
-        except Exception:
-            data.blurb = JOB_ERROR
-            data.state = JOB_ERROR
-
-
 def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
-    exec_after_process(
-        app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
-
     # strip strand column if clusters were merged
     for data in out_data.values():
         if param_dict['returntype'] is True:
@@ -72,9 +57,6 @@
 
 
 def exec_after_cluster(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
-    exec_after_process(
-        app, inp_data, out_data, param_dict, tool=tool, stdout=stdout, stderr=stderr)
-
     # strip strand column if clusters were merged
     if param_dict["returntype"] == '1':
         for data in out_data.values():