Previous changeset 98:67628c7dc9f3 (2020-11-22) Next changeset 100:c749364c2283 (2020-11-23) |
Commit message:
Uploaded |
modified:
toolfactory/rgToolFactory2.py toolfactory/rgToolFactory2.xml |
added:
toolfactory/galaxy-tool-test toolfactory/testtf.sh toolfactory/whoosh.sh |
removed:
toolfactory/galaxyxml/__init__.py toolfactory/galaxyxml/tool/__init__.py toolfactory/galaxyxml/tool/__pycache__/__init__.cpython-36.pyc toolfactory/galaxyxml/tool/__pycache__/import_xml.cpython-36.pyc toolfactory/galaxyxml/tool/import_xml.py toolfactory/galaxyxml/tool/parameters/__init__.py toolfactory/galaxyxml/tool/parameters/__pycache__/__init__.cpython-36.pyc |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxy-tool-test --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/galaxy-tool-test Mon Nov 23 02:22:01 2020 +0000 |
[ |
b'@@ -0,0 +1,457 @@\n+#!/usr/bin/env python\n+\n+import argparse\n+import datetime as dt\n+import json\n+import logging\n+import os\n+import sys\n+import tempfile\n+from collections import namedtuple\n+from concurrent.futures import thread, ThreadPoolExecutor\n+\n+import yaml\n+\n+from galaxy.tool_util.verify.interactor import (\n+ DictClientTestConfig,\n+ GalaxyInteractorApi,\n+ verify_tool,\n+)\n+\n+DESCRIPTION = """Script to quickly run a tool test against a running Galaxy instance."""\n+DEFAULT_SUITE_NAME = "Galaxy Tool Tests"\n+ALL_TESTS = -1\n+ALL_TOOLS = "*"\n+ALL_VERSION = "*"\n+LATEST_VERSION = None\n+\n+\n+TestReference = namedtuple("TestReference", ["tool_id", "tool_version", "test_index"])\n+TestException = namedtuple("TestException", ["tool_id", "exception", "was_recorded"])\n+\n+\n+class Results:\n+\n+ def __init__(self, default_suitename, test_json, append=False):\n+ self.test_json = test_json or "-"\n+ test_results = []\n+ test_exceptions = []\n+ suitename = default_suitename\n+ if append:\n+ assert test_json != "-"\n+ with open(test_json) as f:\n+ previous_results = json.load(f)\n+ test_results = previous_results["tests"]\n+ if "suitename" in previous_results:\n+ suitename = previous_results["suitename"]\n+ self.test_results = test_results\n+ self.test_exceptions = test_exceptions\n+ self.suitename = suitename\n+\n+ def register_result(self, result):\n+ self.test_results.append(result)\n+\n+ def register_exception(self, test_exception):\n+ self.test_exceptions.append(test_exception)\n+\n+ def already_successful(self, test_reference):\n+ test_id = _test_id_for_reference(test_reference)\n+ for test_result in self.test_results:\n+ if test_result.get(\'id\') != test_id:\n+ continue\n+\n+ has_data = test_result.get(\'has_data\', False)\n+ if has_data:\n+ test_data = test_result.get("data", {})\n+ if \'status\' in test_data and test_data[\'status\'] == \'success\':\n+ return True\n+\n+ return False\n+\n+ def write(self):\n+ tests = sorted(self.test_results, key=lambda el: el[\'id\'])\n+ n_passed, n_failures, n_skips = 0, 0, 0\n+ n_errors = len([e for e in self.test_exceptions if not e.was_recorded])\n+ for test in tests:\n+ has_data = test.get(\'has_data\', False)\n+ if has_data:\n+ test_data = test.get("data", {})\n+ if \'status\' not in test_data:\n+ raise Exception(f"Test result data {test_data} doesn\'t contain a status key.")\n+ status = test_data[\'status\']\n+ if status == "success":\n+ n_passed += 1\n+ elif status == "error":\n+ n_errors += 1\n+ elif status == "skip":\n+ n_skips += 1\n+ elif status == "failure":\n+ n_failures += 1\n+ report_obj = {\n+ \'version\': \'0.1\',\n+ \'suitename\': self.suitename,\n+ \'results\': {\n+ \'total\': n_passed + n_failures + n_skips + n_errors,\n+ \'errors\': n_errors,\n+ \'failures\': n_failures,\n+ \'skips\': n_skips,\n+ },\n+ \'tests\': tests,\n+ }\n+ if self.test_json == "-":\n+ print(json.dumps(report_obj))\n+ else:\n+ with open(self.test_json, "w") as f:\n+ json.dump(report_obj, f)\n+\n+ def info_message(self):\n+ messages = []\n+ passed_tests = self._tests_with_status(\'success\')\n+ messages.append("Passed tool tests ({}): {}".format(\n+ len(passed_tests),\n+ [t["id"] for t in passed_tests]\n+ ))\n+ failed_tests = self._tests_with_status(\'failure\')\n+ messages.append("Failed tool tests ({}): {}".format(\n+ len(failed_tests),\n+'..b' logger.setLevel(logging.DEBUG if verbose else logging.INFO)\n+ logger.addHandler(console)\n+\n+ if not log_file:\n+ # delete = false is chosen here because it is always nice to have a log file\n+ # ready if you need to debug. Not having the "if only I had set a log file"\n+ # moment after the fact.\n+ temp = tempfile.NamedTemporaryFile(prefix="ephemeris_", delete=False)\n+ log_file = temp.name\n+ file_handler = logging.FileHandler(log_file)\n+ logger.addHandler(file_handler)\n+ logger.info(f"Storing log file in: {log_file}")\n+ return logger\n+\n+\n+def _arg_parser():\n+ parser = argparse.ArgumentParser(description=DESCRIPTION)\n+ parser.add_argument(\'-u\', \'--galaxy-url\', default="http://localhost:8080", help=\'Galaxy URL\')\n+ parser.add_argument(\'-k\', \'--key\', default=None, help=\'Galaxy User API Key\')\n+ parser.add_argument(\'-a\', \'--admin-key\', default=None, help=\'Galaxy Admin API Key\')\n+ parser.add_argument(\'--force_path_paste\', default=False, action="store_true", help=\'This requires Galaxy-side config option "allow_path_paste" enabled. Allows for fetching test data locally. Only for admins.\')\n+ parser.add_argument(\'-t\', \'--tool-id\', default=ALL_TOOLS, help=\'Tool ID\')\n+ parser.add_argument(\'--tool-version\', default=None, help=\'Tool Version (if tool id supplied). Defaults to just latest version, use * to test all versions\')\n+ parser.add_argument(\'-i\', \'--test-index\', default=ALL_TESTS, type=int, help=\'Tool Test Index (starting at 0) - by default all tests will run.\')\n+ parser.add_argument(\'-o\', \'--output\', default=None, help=\'directory to dump outputs to\')\n+ parser.add_argument(\'--append\', default=False, action="store_true", help="Extend a test record json (created with --output-json) with additional tests.")\n+ parser.add_argument(\'--skip-successful\', default=False, action="store_true", help="When used with --append, skip previously run successful tests.")\n+ parser.add_argument(\'-j\', \'--output-json\', default=None, help=\'output metadata json\')\n+ parser.add_argument(\'--verbose\', default=False, action="store_true", help="Verbose logging.")\n+ parser.add_argument(\'-c\', \'--client-test-config\', default=None, help="Test config YAML to help with client testing")\n+ parser.add_argument(\'--suite-name\', default=DEFAULT_SUITE_NAME, help="Suite name for tool test output")\n+ parser.add_argument(\'--with-reference-data\', dest="with_reference_data", default=False, action="store_true")\n+ parser.add_argument(\'--skip-with-reference-data\', dest="with_reference_data", action="store_false", help="Skip tests the Galaxy server believes use data tables or loc files.")\n+ parser.add_argument(\'--history-per-suite\', dest="history_per_test_case", default=False, action="store_false", help="Create new history per test suite (all tests in same history).")\n+ parser.add_argument(\'--history-per-test-case\', dest="history_per_test_case", action="store_true", help="Create new history per test case.")\n+ parser.add_argument(\'--no-history-cleanup\', default=False, action="store_true", help="Perserve histories created for testing.")\n+ parser.add_argument(\'--parallel-tests\', default=1, type=int, help="Parallel tests.")\n+ parser.add_argument(\'--retries\', default=0, type=int, help="Retry failed tests.")\n+ parser.add_argument(\'--page-size\', default=0, type=int, help="If positive, use pagination and just run one \'page\' to tool tests.")\n+ parser.add_argument(\'--page-number\', default=0, type=int, help="If page size is used, run this \'page\' of tests - starts with 0.")\n+ parser.add_argument(\'--download-attempts\', default=1, type=int, help="Galaxy may return a transient 500 status code for download if test results are written but not yet accessible.")\n+ parser.add_argument(\'--download-sleep\', default=1, type=int, help="If download attempts is greater than 1, the amount to sleep between download attempts.")\n+ return parser\n+\n+\n+if __name__ == "__main__":\n+ main()\n' |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/__init__.py --- a/toolfactory/galaxyxml/__init__.py Sun Nov 22 06:29:33 2020 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
[ |
@@ -1,64 +0,0 @@ -from builtins import object -from builtins import str - -from lxml import etree - - -class GalaxyXML(object): - def __init__(self): - self.root = etree.Element("root") - - def export(self): - return etree.tostring(self.root, pretty_print=True, encoding="unicode") - - -class Util(object): - @classmethod - def coerce(cls, data, kill_lists=False): - """Recursive data sanitisation - """ - if isinstance(data, dict): - return {k: cls.coerce(v, kill_lists=kill_lists) for k, v in list(data.items()) if v is not None} - elif isinstance(data, list): - if kill_lists: - return cls.coerce(data[0]) - else: - return [cls.coerce(v, kill_lists=kill_lists) for v in data] - else: - return cls.coerce_value(data) - - @classmethod - def coerce_value(cls, obj): - """Make everything a string! - """ - if isinstance(obj, bool): - if obj: - return "true" - else: - return "false" - elif isinstance(obj, str): - return obj - else: - return str(obj) - - @classmethod - def clean_kwargs(cls, params, final=False): - if "kwargs" in params: - kwargs = params["kwargs"] - for k in kwargs: - params[k] = kwargs[k] - del params["kwargs"] - if "self" in params: - del params["self"] - - if "__class__" in params: - del params["__class__"] - - # There will be more params, it would be NICE to use a whitelist - # instead of a blacklist, but until we have more data let's just - # blacklist stuff we see commonly. - if final: - for blacklist in ("positional",): - if blacklist in params: - del params[blacklist] - return params |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/tool/__init__.py --- a/toolfactory/galaxyxml/tool/__init__.py Sun Nov 22 06:29:33 2020 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
[ |
@@ -1,184 +0,0 @@ -import copy -import logging - -from galaxyxml import GalaxyXML, Util -from galaxyxml.tool.parameters import XMLParam - -from lxml import etree - -VALID_TOOL_TYPES = ("data_source", "data_source_async") -VALID_URL_METHODS = ("get", "post") - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -class Tool(GalaxyXML): - - def __init__( - self, - name, - id, - version, - description, - executable, - hidden=False, - tool_type=None, - URL_method=None, - workflow_compatible=True, - interpreter=None, - version_command="interpreter filename.exe --version", - command_override=None, - ): - - self.executable = executable - self.interpreter = interpreter - self.command_override = command_override - kwargs = { - "name": name, - "id": id, - "version": version, - "hidden": hidden, - "workflow_compatible": workflow_compatible, - } - self.version_command = version_command - - # Remove some of the default values to make tools look a bit nicer - if not hidden: - del kwargs["hidden"] - if workflow_compatible: - del kwargs["workflow_compatible"] - - kwargs = Util.coerce(kwargs) - self.root = etree.Element("tool", **kwargs) - - if tool_type is not None: - if tool_type not in VALID_TOOL_TYPES: - raise Exception("Tool type must be one of %s" % ",".join(VALID_TOOL_TYPES)) - else: - kwargs["tool_type"] = tool_type - - if URL_method is not None: - if URL_method in VALID_URL_METHODS: - kwargs["URL_method"] = URL_method - else: - raise Exception("URL_method must be one of %s" % ",".join(VALID_URL_METHODS)) - - description_node = etree.SubElement(self.root, "description") - description_node.text = description - - def add_comment(self, comment_txt): - comment = etree.Comment(comment_txt) - self.root.insert(0, comment) - - def append_version_command(self): - version_command = etree.SubElement(self.root, "version_command") - try: - version_command.text = etree.CDATA(self.version_command) - except Exception: - pass - - def append(self, sub_node): - if issubclass(type(sub_node), XMLParam): - self.root.append(sub_node.node) - else: - self.root.append(sub_node) - - def clean_command_string(self, command_line): - clean = [] - for x in command_line: - if x is not [] and x is not [""]: - clean.append(x) - - return "\n".join(clean) - - def export(self, keep_old_command=False): # noqa - - export_xml = copy.deepcopy(self) - - try: - export_xml.append(export_xml.edam_operations) - except Exception: - pass - - try: - export_xml.append(export_xml.edam_topics) - except Exception: - pass - - try: - export_xml.append(export_xml.requirements) - except Exception: - pass - - try: - export_xml.append(export_xml.configfiles) - except Exception: - pass - - if self.command_override: - command_line = self.command_override - else: - command_line = [] - try: - command_line.append(export_xml.inputs.cli()) - except Exception as e: - logger.warning(str(e)) - - try: - command_line.append(export_xml.outputs.cli()) - except Exception: - pass - - # Add stdio section - stdio = etree.SubElement(export_xml.root, "stdio") - etree.SubElement(stdio, "exit_code", range="1:", level="fatal") - - # Append version command - export_xml.append_version_command() - - # Steal interpreter from kwargs - command_kwargs = {} - if export_xml.interpreter is not None: - command_kwargs["interpreter"] = export_xml.interpreter - - # Add command section - command_node = etree.SubElement(export_xml.root, "command", **command_kwargs) - - if keep_old_command: - if getattr(self, "command", None): - command_node.text = etree.CDATA(export_xml.command) - else: - logger.warning("The tool does not have any old command stored. " + "Only the command line is written.") - command_node.text = export_xml.executable - else: - if self.command_override: - actual_cli = export_xml.clean_command_string(command_line) - else: - actual_cli = "%s %s" % (export_xml.executable, export_xml.clean_command_string(command_line)) - command_node.text = etree.CDATA(actual_cli.strip()) - - try: - export_xml.append(export_xml.inputs) - except Exception: - pass - - try: - export_xml.append(export_xml.outputs) - except Exception: - pass - - try: - export_xml.append(export_xml.tests) - except Exception: - pass - - help_element = etree.SubElement(export_xml.root, "help") - help_element.text = etree.CDATA(export_xml.help) - - try: - export_xml.append(export_xml.citations) - except Exception: - pass - - return super(Tool, export_xml).export() |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/tool/__pycache__/__init__.cpython-36.pyc |
b |
Binary file toolfactory/galaxyxml/tool/__pycache__/__init__.cpython-36.pyc has changed |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/tool/__pycache__/import_xml.cpython-36.pyc |
b |
Binary file toolfactory/galaxyxml/tool/__pycache__/import_xml.cpython-36.pyc has changed |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/tool/import_xml.py --- a/toolfactory/galaxyxml/tool/import_xml.py Sun Nov 22 06:29:33 2020 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
[ |
b'@@ -1,713 +0,0 @@\n-import logging\n-import xml.etree.ElementTree as ET\n-\n-import galaxyxml.tool as gxt\n-import galaxyxml.tool.parameters as gxtp\n-\n-logging.basicConfig(level=logging.INFO)\n-logger = logging.getLogger(__name__)\n-\n-\n-class GalaxyXmlParser(object):\n- """\n- Class to import content from an existing Galaxy XML wrapper.\n- """\n-\n- def _init_tool(self, xml_root):\n- """\n- Init tool from existing xml tool.\n-\n- :param xml_root: root of the galaxy xml file.\n- :type xml_root: :class:`xml.etree._Element`\n- """\n- version_cmd = None\n- description = None\n- for child in xml_root:\n- if child.tag == "description":\n- description = child.text\n- elif child.tag == "command":\n- executable = child.text.split()[0]\n- command = child.text\n- elif child.tag == "version_command":\n- version_cmd = child.text\n-\n- tool = gxt.Tool(\n- xml_root.attrib["name"],\n- xml_root.attrib["id"],\n- xml_root.attrib.get("version", None),\n- description,\n- executable,\n- hidden=xml_root.attrib.get("hidden", False),\n- tool_type=xml_root.attrib.get("tool_type", None),\n- URL_method=xml_root.attrib.get("URL_method", None),\n- workflow_compatible=xml_root.attrib.get("workflow_compatible", True),\n- version_command=version_cmd,\n- )\n- tool.command = command\n- return tool\n-\n- def _load_description(self, tool, desc_root):\n- """\n- <description> is already loaded during initiation.\n-\n- :param tool: Tool object from galaxyxml.\n- :type tool: :class:`galaxyxml.tool.Tool`\n- :param desc_root: root of <description> tag.\n- :type desc_root: :class:`xml.etree._Element`\n- """\n- logger.info("<description> is loaded during initiation of the object.")\n-\n- def _load_version_command(self, tool, vers_root):\n- """\n- <version_command> is already loaded during initiation.\n-\n- :param tool: Tool object from galaxyxml.\n- :type tool: :class:`galaxyxml.tool.Tool`\n- :param vers_root: root of <version_command> tag.\n- :type vers_root: :class:`xml.etree._Element`\n- """\n- logger.info("<version_command> is loaded during initiation of the object.")\n-\n- def _load_stdio(self, tool, stdio_root):\n- """\n- So far, <stdio> is automatically generated by galaxyxml.\n-\n- :param tool: Tool object from galaxyxml.\n- :type tool: :class:`galaxyxml.tool.Tool`\n- :param desc_root: root of <stdio> tag.\n- :type desc_root: :class:`xml.etree._Element`\n- """\n- logger.info("<stdio> is not loaded but automatically generated by galaxyxml.")\n-\n- def _load_command(self, tool, desc_root):\n- """\n- <command> is already loaded during initiation.\n-\n- :param tool: Tool object from galaxyxml.\n- :type tool: :class:`galaxyxml.tool.Tool`\n- :param desc_root: root of <command> tag.\n- :type desc_root: :class:`xml.etree._Element`\n- """\n- logger.info("<command> is loaded during initiation of the object.")\n-\n- def _load_help(self, tool, help_root):\n- """\n- Load the content of the <help> into the tool.\n-\n- :param tool: Tool object from galaxyxml.\n- :type tool: :class:`galaxyxml.tool.Tool`\n- :param requirements_root: root of <help> tag.\n- :type requirements_root: :class:`xml.etree._Element`\n- """\n- tool.help = help_root.text\n-\n- def _load_requirements(self, tool, requirements_root):\n- """\n- Add <requirements> to the tool.\n-\n- :param tool: Tool object from galaxyxml.\n- :type tool: :class:`galaxyxml.tool.Tool`\n- :param requirements_root: root of <requirements> tag.\n- :type requirements_root: :class:`xml.etree._Element`\n- """'..b')\n- outputs_root.append(collection)\n-\n- def _load_discover_datasets(self, root, disc_root):\n- """\n- Add <discover_datasets> to root (<collection>).\n-\n- :param root: root to append <collection> to.\n- :param disc_root: root of <discover_datasets> tag.\n- :param disc_root: :class:`xml.etree._Element`\n- """\n- root.append(\n- gxtp.DiscoverDatasets(\n- disc_root.attrib["pattern"],\n- directory=disc_root.attrib.get("directory", None),\n- format=disc_root.attrib.get("format", None),\n- ext=disc_root.attrib.get("ext", None),\n- visible=disc_root.attrib.get("visible", None),\n- )\n- )\n-\n- def _load_filter(self, root, filter_root):\n- """\n- Add <filter> to root (<collection> or <data>).\n-\n- :param root: root to append <collection> to.\n- :param coll_root: root of <filter> tag.\n- :param coll_root: :class:`xml.etree._Element`\n- """\n- root.append(gxtp.OutputFilter(filter_root.text))\n-\n- def load_outputs(self, root, outputs_root):\n- """\n- Add <outputs> to the root.\n-\n- :param root: root to attach <outputs> to (<tool>).\n- :param tests_root: root of <outputs> tag.\n- :type tests_root: :class:`xml.etree._Element`\n- """\n- for out_child in outputs_root:\n- try:\n- getattr(self, "_load_{}".format(out_child.tag))(root, out_child)\n- except AttributeError:\n- logger.warning(out_child.tag + " tag is not processed for <outputs>.")\n-\n-\n-class TestsParser(object):\n- """\n- Class to parse content of the <tests> tag from a Galaxy XML wrapper.\n- """\n-\n- def _load_param(self, test_root, param_root):\n- """\n- Add <param> to the <test>.\n-\n- :param root: <test> root to append <param> to.\n- :param repeat_root: root of <param> tag.\n- :param repeat_root: :class:`xml.etree._Element`\n- """\n- test_root.append(\n- gxtp.TestParam(\n- param_root.attrib["name"],\n- value=param_root.attrib.get("value", None),\n- ftype=param_root.attrib.get("ftype", None),\n- dbkey=param_root.attrib.get("dbkey", None),\n- )\n- )\n-\n- def _load_output(self, test_root, output_root):\n- """\n- Add <output> to the <test>.\n-\n- :param root: <test> root to append <output> to.\n- :param repeat_root: root of <output> tag.\n- :param repeat_root: :class:`xml.etree._Element`\n- """\n- test_root.append(\n- gxtp.TestOutput(\n- name=output_root.attrib.get("name", None),\n- file=output_root.attrib.get("file", None),\n- ftype=output_root.attrib.get("ftype", None),\n- sort=output_root.attrib.get("sort", None),\n- value=output_root.attrib.get("value", None),\n- md5=output_root.attrib.get("md5", None),\n- checksum=output_root.attrib.get("checksum", None),\n- compare=output_root.attrib.get("compare", None),\n- lines_diff=output_root.attrib.get("lines_diff", None),\n- delta=output_root.attrib.get("delta", None),\n- )\n- )\n-\n- def load_tests(self, root, tests_root):\n- """\n- Add <tests> to the root.\n-\n- :param root: root to attach <tests> to (<tool>).\n- :param tests_root: root of <tests> tag.\n- :type tests_root: :class:`xml.etree._Element`\n- """\n- for test_root in tests_root:\n- test = gxtp.Test()\n- for test_child in test_root:\n- try:\n- getattr(self, "_load_{}".format(test_child.tag))(test, test_child)\n- except AttributeError:\n- logger.warning(test_child.tag + " tag is not processed within <test>.")\n- root.append(test)\n' |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/tool/parameters/__init__.py --- a/toolfactory/galaxyxml/tool/parameters/__init__.py Sun Nov 22 06:29:33 2020 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
[ |
b'@@ -1,739 +0,0 @@\n-from builtins import object\n-from builtins import str\n-\n-from galaxyxml import Util\n-\n-from lxml import etree\n-\n-\n-\n-class XMLParam(object):\n- name = "node"\n-\n- def __init__(self, *args, **kwargs):\n- # http://stackoverflow.com/a/12118700\n- self.children = []\n- kwargs = {k: v for k, v in list(kwargs.items()) if v is not None}\n- kwargs = Util.coerce(kwargs, kill_lists=True)\n- kwargs = Util.clean_kwargs(kwargs, final=True)\n- self.node = etree.Element(self.name, **kwargs)\n-\n- def append(self, sub_node):\n- if self.acceptable_child(sub_node):\n- # If one of ours, they aren\'t etree nodes, they\'re custom objects\n- if issubclass(type(sub_node), XMLParam):\n- self.node.append(sub_node.node)\n- self.children.append(sub_node)\n- else:\n- raise Exception(\n- "Child was unacceptable to parent (%s is not appropriate for %s)" % (type(self), type(sub_node))\n- )\n- else:\n- raise Exception(\n- "Child was unacceptable to parent (%s is not appropriate for %s)" % (type(self), type(sub_node))\n- )\n-\n- def validate(self):\n- # Very few need validation, but some nodes we may want to have\n- # validation routines on. Should only be called when DONE.\n- for child in self.children:\n- # If any child fails to validate return false.\n- if not child.validate():\n- return False\n- return True\n-\n- def cli(self):\n- lines = []\n- for child in self.children:\n- lines.append(child.command_line())\n- # lines += child.command_line()\n- return "\\n".join(lines)\n-\n- def command_line(self):\n- return None\n-\n-\n-class RequestParamTranslation(XMLParam):\n- name = "request_param_translation"\n-\n- def __init__(self, **kwargs):\n- self.node = etree.Element(self.name)\n-\n- def acceptable_child(self, child):\n- return isinstance(child, RequestParamTranslation)\n-\n-\n-class RequestParam(XMLParam):\n- name = "request_param"\n-\n- def __init__(self, galaxy_name, remote_name, missing, **kwargs):\n- # TODO: bulk copy locals into self.attr?\n- self.galaxy_name = galaxy_name\n- # http://stackoverflow.com/a/1408860\n- params = Util.clean_kwargs(locals().copy())\n- super(RequestParam, self).__init__(**params)\n-\n- def acceptable_child(self, child):\n- return isinstance(child, AppendParam) and self.galaxy_name == "URL"\n-\n-\n-class AppendParam(XMLParam):\n- name = "append_param"\n-\n- def __init__(self, separator="&", first_separator="?", join="=", **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- super(AppendParam, self).__init__(**params)\n-\n- def acceptable_child(self, child):\n- return isinstance(child, AppendParamValue)\n-\n-\n-class AppendParamValue(XMLParam):\n- name = "value"\n-\n- def __init__(self, name="_export", missing="1", **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- super(AppendParamValue, self).__init__(**params)\n-\n- def acceptable_child(self, child):\n- return False\n-\n-\n-class EdamOperations(XMLParam):\n- name = "edam_operations"\n-\n- def acceptable_child(self, child):\n- return issubclass(type(child), EdamOperation)\n-\n- def has_operation(self, edam_operation):\n- """\n- Check the presence of a given edam_operation.\n-\n- :type edam_operation: STRING\n- """\n- for operation in self.children:\n- if operation.node.text == edam_operation:\n- return True\n- return False\n-\n-\n-class EdamOperation(XMLParam):\n- name = "edam_operation"\n-\n- def __init__(self, value):\n- super(EdamOperation, self).__init__()\n- self.node.text = str(value)\n-\n-\n-class EdamTopics(XMLParam):\n- name = "edam_topics"\n-\n- def acceptable_child(self, chil'..b'e"):\n- return self.command_line_override\n- else:\n- return "%s%s%s" % (self.flag(), self.space_between_arg, self.mako_name())\n-\n- def mako_name(self):\n- return "$" + self.mako_identifier\n-\n- def flag(self):\n- flag = "-" * self.num_dashes\n- return flag + self.mako_identifier\n-\n- def acceptable_child(self, child):\n- return isinstance(child, OutputFilter) or isinstance(child, ChangeFormat) or isinstance(child, DiscoverDatasets)\n-\n-\n-class OutputFilter(XMLParam):\n- name = "filter"\n-\n- def __init__(self, text, **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- del params["text"]\n- super(OutputFilter, self).__init__(**params)\n- self.node.text = text\n-\n- def acceptable_child(self, child):\n- return False\n-\n-\n-class ChangeFormat(XMLParam):\n- name = "change_format"\n-\n- def __init__(self, **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- super(ChangeFormat, self).__init__(**params)\n-\n- def acceptable_child(self, child):\n- return isinstance(child, ChangeFormatWhen)\n-\n-\n-class ChangeFormatWhen(XMLParam):\n- name = "when"\n-\n- def __init__(self, input, format, value, **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- super(ChangeFormatWhen, self).__init__(**params)\n-\n- def acceptable_child(self, child):\n- return False\n-\n-\n-class OutputCollection(XMLParam):\n- name = "collection"\n-\n- def __init__(\n- self,\n- name,\n- type=None,\n- label=None,\n- format_source=None,\n- type_source=None,\n- structured_like=None,\n- inherit_format=None,\n- **kwargs\n- ):\n- params = Util.clean_kwargs(locals().copy())\n- super(OutputCollection, self).__init__(**params)\n-\n- def acceptable_child(self, child):\n- return isinstance(child, OutputData) or isinstance(child, OutputFilter) or isinstance(child, DiscoverDatasets)\n-\n-\n-class DiscoverDatasets(XMLParam):\n- name = "discover_datasets"\n-\n- def __init__(self, pattern, directory=None, format=None, ext=None, visible=None, **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- super(DiscoverDatasets, self).__init__(**params)\n-\n-\n-class Tests(XMLParam):\n- name = "tests"\n-\n- def acceptable_child(self, child):\n- return issubclass(type(child), Test)\n-\n-\n-class Test(XMLParam):\n- name = "test"\n-\n- def acceptable_child(self, child):\n- return isinstance(child, TestParam) or isinstance(child, TestOutput)\n-\n-\n-class TestParam(XMLParam):\n- name = "param"\n-\n- def __init__(self, name, value=None, ftype=None, dbkey=None, **kwargs):\n- params = Util.clean_kwargs(locals().copy())\n- super(TestParam, self).__init__(**params)\n-\n-\n-class TestOutput(XMLParam):\n- name = "output"\n-\n- def __init__(\n- self,\n- name=None,\n- file=None,\n- ftype=None,\n- sort=None,\n- value=None,\n- md5=None,\n- checksum=None,\n- compare=None,\n- lines_diff=None,\n- delta=None,\n- **kwargs\n- ):\n- params = Util.clean_kwargs(locals().copy())\n- super(TestOutput, self).__init__(**params)\n-\n-\n-class Citations(XMLParam):\n- name = "citations"\n-\n- def acceptable_child(self, child):\n- return issubclass(type(child), Citation)\n-\n- def has_citation(self, type, value):\n- """\n- Check the presence of a given citation.\n-\n- :type type: STRING\n- :type value: STRING\n- """\n- for citation in self.children:\n- if citation.node.attrib["type"] == type and citation.node.text == value:\n- return True\n- return False\n-\n-\n-class Citation(XMLParam):\n- name = "citation"\n-\n- def __init__(self, type, value):\n- passed_kwargs = {}\n- passed_kwargs["type"] = type\n- super(Citation, self).__init__(**passed_kwargs)\n- self.node.text = str(value)\n' |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/galaxyxml/tool/parameters/__pycache__/__init__.cpython-36.pyc |
b |
Binary file toolfactory/galaxyxml/tool/parameters/__pycache__/__init__.cpython-36.pyc has changed |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/rgToolFactory2.py --- a/toolfactory/rgToolFactory2.py Sun Nov 22 06:29:33 2020 +0000 +++ b/toolfactory/rgToolFactory2.py Mon Nov 23 02:22:01 2020 +0000 |
[ |
b'@@ -23,6 +23,7 @@\n # well well. sh run_tests.sh --id rgtf2 --report_file tool_tests_tool_conf.html functional.test_toolbox\n # does the needful. Use GALAXY_TEST_SAVE /foo to save outputs - only the tar.gz - not the rest sadly\n # GALAXY_TEST_NO_CLEANUP GALAXY_TEST_TMP_DIR=wherever\n+# planemo test --engine docker_galaxy --test_data ./test-data/ --docker_extra_volume ./test-data rgToolFactory2.xml\n \n import argparse\n import datetime\n@@ -670,6 +671,93 @@\n logging.debug("run done")\n return retval\n \n+\n+ def gal_tool_test(self):\n+ """\n+ This handy script writes test outputs even if they don\'t exist\n+ galaxy-tool-test [-h] [-u GALAXY_URL] [-k KEY] [-a ADMIN_KEY] [--force_path_paste] [-t TOOL_ID] [--tool-version TOOL_VERSION]\n+ [-i TEST_INDEX] [-o OUTPUT] [--append] [-j OUTPUT_JSON] [--verbose] [-c CLIENT_TEST_CONFIG]\n+ galaxy-tool-test -u http://localhost:8080 -a 3c9afe09f1b7892449d266109639c104 -o /tmp/foo -t hello -j /tmp/foo/hello.json --verbose\n+ handy - just leaves outputs in -o\n+ """\n+ if os.path.exists(self.tlog):\n+ tout = open(self.tlog, "a")\n+ else:\n+ tout = open(self.tlog, "w")\n+ testouts = tempfile.mkdtemp(suffix=None, prefix="tftemp")\n+ dummy, tfile = tempfile.mkstemp()\n+ cll = [\n+ os.path.join(self.args.tool_dir,"galaxy-tool-test"),\n+ "-u",\n+ self.args.galaxy_url,\n+ "-k",\n+ self.args.galaxy_api_key,\n+ "-t",\n+ self.args.tool_name,\n+ "-o",\n+ testouts,\n+ ]\n+ subp = subprocess.run(\n+ cll, shell=False, stderr=dummy, stdout=dummy\n+ )\n+ outfiles = []\n+ for p in self.outfiles:\n+ oname = p[ONAMEPOS]\n+ outfiles.append(oname)\n+ with os.scandir(testouts) as outs:\n+ for entry in outs:\n+ if not entry.is_file():\n+ continue\n+ dest = os.path.join(self.tooloutdir, entry.name)\n+ src = os.path.join(testouts, entry.name)\n+ shutil.copyfile(src, dest)\n+ dest = os.path.join(self.testdir, entry.name)\n+ src = os.path.join(testouts, entry.name)\n+ shutil.copyfile(src, dest)\n+ dest = os.path.join(self.repdir,f"{entry.name}_sample")\n+ tout.write(f"## found and moved output {entry.name} to {dest}\\n")\n+ tout.close()\n+ shutil.rmtree(testouts)\n+ return subp.returncode\n+\n+ def gal_test(self):\n+ """\n+ Uses the built in galaxy tool tester run_test.sh\n+\n+ export GALAXY_TEST_SAVE="./foo" && export GALAXY_TEST_NO_CLEANUP="1" \\\n+ && export GALAXY_TEST_TMP_DIR=./foo && sh run_tests.sh --id rgtf2 --report_file tool_tests_tool_conf.html functional.test_toolbox\n+\n+ """\n+ testdir = tempfile.mkdtemp(suffix=None, prefix="tftemp")\n+ tool_test_rep = f"{self.tool_name}_galaxy_test_report_html.html"\n+ if os.path.exists(self.tlog):\n+ tout = open(self.tlog, "a")\n+ else:\n+ tout = open(self.tlog, "w")\n+\n+ ourenv = os.environ\n+ ourenv["GALAXY_TEST_SAVE"] = testdir\n+ ourenv["GALAXY_TEST_NO_CLEANUP"] = "1"\n+ ourenv["GALAXY_TEST_TMP_DIR"] = testdir\n+\n+ cll = [\n+ "sh", f"{self.args.galaxy_root}/run_tests.sh", "--id", self.args.tool_name,\n+ "--report_file", os.path.join(testdir,tool_test_rep), "functional.test_toolbox",\n+ ]\n+ subp = subprocess.run(\n+ cll, env = ourenv,\n+ shell=False, cwd=self.args.galaxy_root, stderr=tout, stdout=tout\n+ )\n+ src = os.path.join(testdir, tool_test_rep)\n+ if os.path.isfile(src):\n+ dest = os.path.join(self.repdir, tool_test_rep)\n+ shutil.copyfile(src, dest)\n+ else:\n+ tout.write(f"### {src} not found\\n")\n+ tout.close()\n+ return subp.return'..b',\n- "--report_file", tool_test_rep_path, "functional.test_toolbox",\n- ]\n- subp = subprocess.run(\n- cll, env = ourenv,\n- shell=False, cwd=self.testdir, stderr=tout, stdout=tout\n- )\n- tout.close()\n- return subp.returncode\n-\n-\n-\n def writeShedyml(self):\n """for planemo"""\n yuser = self.args.user_email.split("@")[0]\n@@ -1112,57 +1132,23 @@\n \n for p in self.outfiles:\n oname = p[ONAMEPOS]\n- src = os.path.join(self.testdir,oname)\n- if os.path.isfile(src):\n- dest = os.path.join(self.testdir, "%s_sample" % oname)\n- shutil.copyfile(src, dest)\n- dest = os.path.join(self.repdir, "%s.sample" % (oname))\n- shutil.copyfile(src, dest)\n- else:\n- print(\n- "### problem - output file %s not found in tooloutdir %s"\n- % (src, self.tooloutdir)\n- )\n+ tdest = os.path.join(self.testdir, "%s_sample" % oname)\n+ if not os.path.isfile(tdest):\n+ src = os.path.join(self.testdir,oname)\n+ if os.path.isfile(src):\n+ shutil.copyfile(src, tdest)\n+ dest = os.path.join(self.repdir, "%s.sample" % (oname))\n+ shutil.copyfile(src, dest)\n+ else:\n+ print(\n+ "### problem - output file %s not found in testdir %s"\n+ % (tdest, self.testdir)\n+ )\n tf = tarfile.open(self.newtarpath, "w:gz")\n tf.add(name=self.tooloutdir, arcname=self.tool_name, filter=exclude_function)\n tf.close()\n shutil.copyfile(self.newtarpath, self.args.new_tool)\n \n- def fakeToolTar(self):\n- """move fake outputs into test-data and prepare the tarball"""\n- excludeme = "tool_test_output"\n-\n- def exclude_function(tarinfo):\n- filename = tarinfo.name\n- return (\n- None\n- if filename.startswith(excludeme)\n- else tarinfo\n- )\n-\n- for p in self.outfiles:\n- oname = p[ONAMEPOS]\n- src = os.path.join(self.testdir,oname)\n- if os.path.isfile(src):\n- dest = os.path.join(self.testdir, "%s_sample" % oname)\n- shutil.copyfile(src, dest)\n- dest = os.path.join(self.repdir, "%s.sample" % (oname))\n- shutil.copyfile(src, dest)\n- else:\n- with open(src,\'w\') as fayk:\n- fayk.write(\'fake!\\n\')\n- dest = os.path.join(self.testdir, "%s_sample" % oname)\n- shutil.copyfile(src, dest)\n- print(\n- "### problem - output file %s not found in tooloutdir %s so faked"\n- % (src, self.tooloutdir)\n- )\n- tf = tarfile.open(self.newtarpath, "w:gz")\n- tf.add(name=self.tooloutdir, arcname=self.tool_name, filter=exclude_function)\n- tf.close()\n- shutil.copyfile(self.newtarpath, self.args.new_tool)\n-\n-\n def moveRunOutputs(self):\n """need to move planemo or run outputs into toolfactory collection"""\n with os.scandir(self.tooloutdir) as outs:\n@@ -1254,12 +1240,16 @@\n r.moveRunOutputs()\n r.makeToolTar()\n else:\n- r.fakeToolTar()\n+ r.makeToolTar()\n+ #r.planemo_shedLoad()\n r.shedLoad()\n r.eph_galaxy_load()\n- retcode = r.gal_test(genoutputs=True) # this fails\n+ retcode = r.gal_tool_test() # writes outputs\n r.makeToolTar()\n- retcode = r.gal_test(genoutputs=False)\n+ #r.planemo_shedLoad()\n+ r.shedLoad()\n+ r.eph_galaxy_load()\n+ retcode = r.gal_test()\n r.moveRunOutputs()\n r.makeToolTar()\n print(f"second galaxy_test returned {retcode}")\n' |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/rgToolFactory2.xml --- a/toolfactory/rgToolFactory2.xml Sun Nov 22 06:29:33 2020 +0000 +++ b/toolfactory/rgToolFactory2.xml Mon Nov 23 02:22:01 2020 +0000 |
[ |
@@ -135,8 +135,6 @@ <requirement type="package" version="0.4.11">galaxyxml</requirement> <requirement type="package" version="0.14.0">bioblend</requirement> <requirement type="package" version="0.10.6">ephemeris</requirement> - <requirement type="package" version="0.72.0">planemo</requirement> - <requirement type="package" version="4.3.1">docker-py</requirement> </requirements> <command ><![CDATA[ |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/testtf.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/testtf.sh Mon Nov 23 02:22:01 2020 +0000 |
b |
@@ -0,0 +1,2 @@ +planemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/toolfactory &>foo + |
b |
diff -r 67628c7dc9f3 -r d4d88d393285 toolfactory/whoosh.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/whoosh.sh Mon Nov 23 02:22:01 2020 +0000 |
[ |
@@ -0,0 +1,16 @@ +# using the galaxy venv this seems to work +. /galaxy_venv/bin/activate +python scripts/tool_shed/build_ts_whoosh_index.py -c config/tool_shed.yml --config-section tool_shed +# eeesh. /etc/galaxy is where the actual galaxy.yml lives - despite other configs being where they might be expected +# fix tool_shed.yml to 0.0.0.0 and admin email +# add tgz to datatypes :( +# need to source a venv in /export/tool_deps/toolfactorydeps/0.01/fubar/toolfactorydeps/9e9428fe9134/env.sh +# as nothing was done by the setup_virtualenv step apparently. +# gcc and friends for planemo pip installation +# File "/galaxy-central/lib/galaxy/tool_util/verify/interactor.py", line 595, in <listcomp> +# test_user = [user for user in all_users if user["email"] == email][0] +# add local to tool_sheds_conf.xml +# <tool_sheds> +# <tool_shed name="local" url="http://localhost:9009"/> +# mercurial > 5.5!! + |