Mercurial > repos > fubar > toolfactory_gtn
changeset 6:efefe43f23c8 draft default tip
Uploaded
author | fubar |
---|---|
date | Fri, 30 Apr 2021 02:10:32 +0000 |
parents | e2c8c2fa192d |
children | |
files | toolfactory/ToolFactory.py toolfactory/ToolFactory.xml toolfactory/ToolFactory_tester.xml toolfactory/install-history.py toolfactory/install_tf_demos.py toolfactory/testclass.py toolfactory/toolwatcher.py |
diffstat | 7 files changed, 373 insertions(+), 211 deletions(-) [+] |
line wrap: on
line diff
--- a/toolfactory/ToolFactory.py Tue Apr 27 23:33:49 2021 +0000 +++ b/toolfactory/ToolFactory.py Fri Apr 30 02:10:32 2021 +0000 @@ -28,6 +28,7 @@ import tarfile import tempfile import time +import urllib from bioblend import ConnectionError from bioblend import galaxy @@ -70,13 +71,14 @@ citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) return citation_tuples -class ToolConfUpdater(): + +class Tool_Conf_Updater(): # update config/tool_conf.xml with a new tool unpacked in /tools # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! # if in a container possibly not so courageous. # Fine on your own laptop but security red flag for most production instances - def __init__(self, args, tool_conf_path, new_tool_archive_path, new_tool_name, tool_dir): + def __init__(self, args, tool_conf_path, new_tool_archive_path, new_tool_name, tool_dir): self.args = args self.tool_conf_path = os.path.join(args.galaxy_root,tool_conf_path) self.tool_dir = os.path.join(args.galaxy_root, tool_dir) @@ -138,7 +140,7 @@ if False and self.args.packages and self.args.packages > '': self.install_deps() -class ScriptRunner: +class Tool_Factory: """Wrapper for an arbitrary script uses galaxyxml @@ -194,9 +196,7 @@ ) self.args = args self.cleanuppar() - self.lastclredirect = None self.lastxclredirect = None - self.cl = [] self.xmlcl = [] self.is_positional = self.args.parampass == "positional" if self.args.sysexe: @@ -209,7 +209,6 @@ self.executeme = [self.args.packages.split(",")[0].split(":")[0].strip(), ] else: self.executeme = None - aCL = self.cl.append aXCL = self.xmlcl.append assert args.parampass in [ "0", @@ -252,13 +251,10 @@ self.test_override = None if self.args.script_path: for ex in self.executeme: - aCL(ex) aXCL(ex) - aCL(self.sfile) aXCL("$runme") else: for ex in self.executeme: - aCL(ex) aXCL(ex) if self.args.parampass == "0": @@ -273,35 +269,23 @@ def clsimple(self): """no parameters or repeats - uses < and > for i/o""" - aCL = self.cl.append aXCL = self.xmlcl.append if len(self.infiles) > 0: - aCL("<") - aCL(self.infiles[0]["infilename"]) aXCL("<") aXCL("$%s" % self.infiles[0]["infilename"]) if len(self.outfiles) > 0: - aCL(">") - aCL(self.outfiles[0]["name"]) aXCL(">") aXCL("$%s" % self.outfiles[0]["name"]) if self.args.cl_user_suffix: # DIY CL end clp = shlex.split(self.args.cl_user_suffix) for c in clp: - aCL(c) aXCL(c) def prepargp(self): - clsuffix = [] xclsuffix = [] for i, p in enumerate(self.infiles): nam = p["infilename"] if p["origCL"].strip().upper() == "STDIN": - appendme = [ - nam, - nam, - "< %s" % nam, - ] xappendme = [ nam, nam, @@ -312,16 +296,12 @@ over = "" if rep: over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' - appendme = [p["CL"], p["CL"], ""] xappendme = [p["CL"], "$%s" % p["CL"], over] - clsuffix.append(appendme) xclsuffix.append(xappendme) for i, p in enumerate(self.outfiles): if p["origCL"].strip().upper() == "STDOUT": - self.lastclredirect = [">", p["name"]] self.lastxclredirect = [">", "$%s" % p["name"]] else: - clsuffix.append([p["name"], p["name"], ""]) xclsuffix.append([p["name"], "$%s" % p["name"], ""]) for p in self.addpar: nam = p["name"] @@ -330,40 +310,27 @@ over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' else: over = p["override"] - clsuffix.append([p["CL"], nam, over]) xclsuffix.append([p["CL"], '"$%s"' % nam, over]) for p in self.selpar: - clsuffix.append([p["CL"], p["name"], p["override"]]) xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) self.xclsuffix = xclsuffix - self.clsuffix = clsuffix def prepclpos(self): - clsuffix = [] xclsuffix = [] for i, p in enumerate(self.infiles): if p["origCL"].strip().upper() == "STDIN": - appendme = [ - "999", - p["infilename"], - "< $%s" % p["infilename"], - ] xappendme = [ "999", p["infilename"], "< $%s" % p["infilename"], ] else: - appendme = [p["CL"], p["infilename"], ""] xappendme = [p["CL"], "$%s" % p["infilename"], ""] - clsuffix.append(appendme) xclsuffix.append(xappendme) for i, p in enumerate(self.outfiles): if p["origCL"].strip().upper() == "STDOUT": - self.lastclredirect = [">", p["name"]] self.lastxclredirect = [">", "$%s" % p["name"]] else: - clsuffix.append([p["CL"], p["name"], ""]) xclsuffix.append([p["CL"], "$%s" % p["name"], ""]) for p in self.addpar: nam = p["name"] @@ -371,15 +338,11 @@ if rep: print(f'### warning. Repeats for {nam} ignored - not permitted in positional parameter command lines!') over = p["override"] - clsuffix.append([p["CL"], nam, over]) xclsuffix.append([p["CL"], '"$%s"' % nam, over]) for p in self.selpar: - clsuffix.append([p["CL"], p["name"], p["override"]]) xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) - clsuffix.sort() xclsuffix.sort() self.xclsuffix = xclsuffix - self.clsuffix = clsuffix def prepScript(self): rx = open(self.args.script_path, "r").readlines() @@ -443,12 +406,6 @@ def clpositional(self): # inputs in order then params - aCL = self.cl.append - for (k, v, koverride) in self.clsuffix: - if " " in v: - aCL("%s" % v) - else: - aCL(v) aXCL = self.xmlcl.append for (k, v, koverride) in self.xclsuffix: aXCL(v) @@ -458,13 +415,11 @@ if self.args.cl_user_suffix: # DIY CL end clp = shlex.split(self.args.cl_user_suffix) for c in clp: - aCL(c) aXCL(c) def clargparse(self): """argparse style""" - aCL = self.cl.append aXCL = self.xmlcl.append # inputs then params in argparse named form @@ -479,22 +434,12 @@ k = "--%s" % k aXCL(k) aXCL(v) - for (k, v, koverride) in self.clsuffix: - if koverride > "": - k = koverride - elif len(k.strip()) == 1: - k = "-%s" % k - else: - k = "--%s" % k - aCL(k) - aCL(v) if self.lastxclredirect: aXCL(self.lastxclredirect[0]) aXCL(self.lastxclredirect[1]) if self.args.cl_user_suffix: # DIY CL end clp = shlex.split(self.args.cl_user_suffix) for c in clp: - aCL(c) aXCL(c) def getNdash(self, newname): @@ -857,80 +802,6 @@ xf.close() # ready for the tarball - def run(self): #noqa - """ - generate test outputs by running a command line - won't work if command or test override in play - planemo is the - easiest way to generate test outputs for that case so is - automagically selected - """ - scl = " ".join(self.cl) - err = None - logname = f"{self.tool_name}_runner_log" - if self.args.parampass != "0": - if self.lastclredirect: - logf = open(self.lastclredirect[1], "wb") # is name of an output file - else: - logf = open(logname,'w') - logf.write("No dependencies so sending CL = '%s' to the fast direct runner instead of planemo to generate tests" % scl) - subp = subprocess.run( - self.cl, shell=False, stdout=logf, stderr=logf - ) - logf.close() - retval = subp.returncode - else: # work around special case - stdin and write to stdout - if len(self.infiles) > 0: - sti = open(self.infiles[0]["name"], "rb") - else: - sti = sys.stdin - if len(self.outfiles) > 0: - sto = open(self.outfiles[0]["name"], "wb") - else: - sto = sys.stdout - subp = subprocess.run( - self.cl, shell=False, stdout=sto, stdin=sti - ) - retval = subp.returncode - sto.close() - sti.close() - if retval != 0 and err: # problem - sys.stderr.write(err) - for p in self.outfiles: - oname = p["name"] - tdest = os.path.join(self.testdir, "%s_sample" % oname) - if not os.path.isfile(tdest): - if os.path.isfile(oname): - shutil.copyfile(oname, tdest) - dest = os.path.join(self.repdir, "%s.sample.%s" % (oname,p['format'])) - shutil.copyfile(oname, dest) - else: - if report_fail: - tout.write( - "###Tool may have failed - output file %s not found in testdir after planemo run %s." - % (oname, self.testdir) - ) - for p in self.infiles: - pth = p["name"] - dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) - shutil.copyfile(pth, dest) - dest = os.path.join(self.repdir, "%s_sample.%s" % (p["infilename"],p["format"])) - shutil.copyfile(pth, dest) - with os.scandir('.') as outs: - for entry in outs: - newname = entry.name - if not entry.is_file() or entry.name.endswith('_sample'): - continue - if not (entry.name.endswith('.html') or entry.name.endswith('.gz') or entry.name.endswith(".tgz")): - fname, ext = os.path.splitext(entry.name) - if len(ext) > 1: - newname = f"{fname}_{ext[1:]}.txt" - else: - newname = f"{fname}.txt" - dest = os.path.join(self.repdir, newname) - src = entry.name - shutil.copyfile(src, dest) - return retval - def writeShedyml(self): """for planemo""" yuser = self.args.user_email.split("@")[0] @@ -1068,6 +939,7 @@ a("--galaxy_venv", default="/galaxy_venv") a("--collection", action="append", default=[]) a("--include_tests", default=False, action="store_true") + a("--admin_only", default=False, action="store_true") a("--install", default=False, action="store_true") a("--run_test", default=False, action="store_true") a("--local_tools", default='tools') # relative to $__root_dir__ @@ -1079,30 +951,20 @@ a("--toolshed_api_key", default="fakekey") a("--galaxy_api_key", default="8993d65865e6d6d1773c2c34a1cc207d") args = parser.parse_args() - assert not args.bad_user, ( - 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy \ + if args.admin_only: + assert not args.bad_user, ( + 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy \ admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' - % (args.bad_user, args.bad_user) + % (args.bad_user, args.bad_user) ) assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" - assert ( - args.sysexe or args.packages - ), "## Tool Factory wrapper expects an interpreter \ -or an executable package in --sysexe or --packages" - print('Hello from',os.getcwd()) - r = ScriptRunner(args) + r = Tool_Factory(args) r.writeShedyml() r.makeTool() r.makeToolTar() - if args.run_test: - if not args.packages or args.packages.strip() == "bash": - r.run() - r.makeToolTar() - else: - tt = ToolTester(report_dir=r.repdir, in_tool_archive=r.newtarpath, new_tool_archive=r.args.new_tool, galaxy_root=args.galaxy_root, include_tests=False) if args.install: #try: - tcu = ToolConfUpdater(args=args, tool_dir=args.local_tools, + tcu = Tool_Conf_Updater(args=args, tool_dir=args.local_tools, new_tool_archive_path=r.newtarpath, tool_conf_path=args.tool_conf_path, new_tool_name=r.tool_name) #except Exception:
--- a/toolfactory/ToolFactory.xml Tue Apr 27 23:33:49 2021 +0000 +++ b/toolfactory/ToolFactory.xml Fri Apr 30 02:10:32 2021 +0000 @@ -28,24 +28,16 @@ </repeat> </xml> <xml name="tool_metadata"> - <param name="tool_version" label="Tool Version - bump this to warn users trying to redo old analyses" type="text" value="0.01" + <param name="tool_version" label="Tool Version - bump this to warn users trying to redo old analyses" type="text" value="0.01" help="If you change your script and regenerate the 'same' tool, you should inform Galaxy (and users) by changing (bumping is traditional) this number"/> - <param name="tool_desc" label="Tool Synopsis" type="text" value="" - help="Supply a brief tool description for the Galaxy tool menu entry" /> - <param name="install" label="Attempt to install in the host Galaxy" - help="This will fail unless running in a very unusual configuration such as a specialised Docker container" - type="boolean" checked="True" truevalue="1" falsevalue="0" /> - <conditional name="do_test"> - <param name="run_test" label="Run planemo test on the new archive to add test outputs making a proper tool - takes time" - help="Archives must be run and updated using planemo to populate all the test outputs. Will be very quick if no dependencies (eg a bash script). May take a very long time depending on dependencies" - type="boolean" checked="False" truevalue="1" falsevalue="0" /> - <when value="0"> - <param name="tail" type="hidden" value="not_tested"/> - </when> - <when value="1"> - <param name="tail" type="hidden" value="tested"/> - </when> - </conditional> + <param name="tool_desc" label="Tool Synopsis" type="text" value="" + help="Supply a brief tool description for the Galaxy tool menu entry" /> + <param name="install" label="Attempt to install in the host Galaxy" + help="This will fail unless running in a very unusual configuration such as a specialised Docker container" + type="boolean" checked="False" truevalue="1" falsevalue="0" /> + <param name="run_test" label="Finalise new archive with test outputs. Runs externally. Outputs will appear in history when ready" + help="Archives must be run and updated using planemo to populate all the test outputs. May take a long time depending on dependencies" + type="boolean" checked="True" truevalue="1" falsevalue="0" /> <param name="help_text" label="Tool form documentation and help text for users" type="text" area="true" value="**What it Does**" help="Supply user documentation to appear on the new tool form as reStructured text - http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html" > @@ -232,7 +224,7 @@ #if $install: --install #end if - #if $do_test.run_test: + #if $run_test: --run_test #end if #if $io_param.ppass.parampass != '0': @@ -405,11 +397,7 @@ <expand macro="tool_metadata" /> </inputs> <outputs> - <data format="toolshed.gz" name="new_tool" label="${tool_name}_${do_test.tail}_toolshed.gz" /> - <collection name="TF_run_report" type="list" label="${tool_name} test run outputs"> - <filter>do_test['run_test']</filter> - <discover_datasets pattern="__name_and_ext__" directory="TF_run_report" /> - </collection> + <data format="toolshed.gz" name="new_tool" label="${tool_name}_not_tested.toolshed.gz" /> </outputs> <tests> <test> @@ -432,7 +420,6 @@ <param name="choosescript" value="yes" /> <param name="script_path" value="$runme"/> <param name="install" value="0"/> - <param name="run_test" value="1"/> <output name="new_tool" file="toolfactory_pyrevpos_tgz_sample" compare="sim_size" delta="6000" /> </test> </tests>
--- a/toolfactory/ToolFactory_tester.xml Tue Apr 27 23:33:49 2021 +0000 +++ b/toolfactory/ToolFactory_tester.xml Fri Apr 30 02:10:32 2021 +0000 @@ -10,6 +10,7 @@ </stdio> <version_command><![CDATA[echo "1"]]></version_command> <command><![CDATA[ +mkdir 'TF_run_report'; python $runme --in_tool_archive @@ -17,9 +18,9 @@ --new_tested_tool_archive $new_tested_tool_archive --galaxy_root -"$galaxyroot" +"$__root_dir__" > -$tf_archive_tester_log; +"TF_run_report/${in_tool_archive.name}_test_log.txt" ]]></command> <configfiles> <configfile name="runme"><![CDATA[#raw @@ -61,9 +62,6 @@ return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) class ToolTester(): - # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! - # if in a container possibly not so courageous. - # Fine on your own laptop but security red flag for most production instances # uncompress passed tar, run planemo and rebuild a new tarball with tests def __init__(self, args=None, in_tool_archive='/galaxy-central/tools/newtool/newtool_toolshed.gz', new_tool_archive=None): @@ -97,8 +95,10 @@ def call_planemo(self,xmlpath,ourdir): penv = os.environ - #penv['HOME'] = os.path.join(self.args.galaxy_root,'planemo') - #penv["GALAXY_VIRTUAL_ENV"] = os.path.join(penv['HOME'],'.planemo','gx_venv_3.9') + penv['HOME'] = os.path.join(self.args.galaxy_root,'planemo') + newpath = f"{penv['HOME']}:{penv['PATH']}" + penv['PATH'] = newpath + penv["GALAXY_VIRTUAL_ENV"] = os.path.join(self.args.galaxy_root,'.venv') penv["PIP_CACHE_DIR"] = os.path.join(self.args.galaxy_root,'pipcache') toolfile = os.path.split(xmlpath)[1] tool_name = self.tool_name @@ -106,9 +106,11 @@ cll = [ "planemo", "test", - "--biocontainers", + "--no_cleanup", + "--test_data", + os.path.abspath(ourdir), "--test_output", - os.path.abspath(tool_test_output), + os.path.abspath(self.tool_test_output), "--galaxy_root", self.args.galaxy_root, "--update_test_data", @@ -119,6 +121,7 @@ cll, #capture_output=True, encoding='utf8', + cwd = os.path.abspath(self.tool_name), env = penv, shell=False, ) @@ -130,12 +133,6 @@ x = os.path.split(xreal)[1] xout = os.path.join(self.tooloutdir,x) shutil.copyfile(xreal, xout) - # for p in self.infiles: - # pth = p["name"] - # dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) - # shutil.copyfile(pth, dest) - # dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) - # shutil.copyfile(pth, dest) def makeToolTar(self): """move outputs into test-data and prepare the tarball""" @@ -174,7 +171,7 @@ shutil.copyfile(src, dest) with os.scandir('.') as outs: for entry in outs: - if not entry.is_file(): + if not entry.is_file() or entry.name == "conda_activate.log": continue if "." in entry.name: _, ext = os.path.splitext(entry.name) @@ -212,8 +209,6 @@ def update_tests(self,ourdir): for xmlf in self.ourxmls: capture = self.call_planemo(xmlf,ourdir) - #sys.stderr.write('%s, stdout=%s, stderr=%s' % (xmlf, capture.stdout, capture.stdout)) - #print('%s, stdout=%s, stderr=%s' % (capture.stdout, capture.stdout,xmlf)) def main(): """ @@ -239,11 +234,9 @@ <inputs> <param name="new_tool_name" value="" type="hidden"/> <param name="in_tool_archive" type="data" optional="false" label="Select a no_test tarfile to test and update for a toolshed" help="" format="toolshed.gz" multiple="false"/> - <param name="galaxyroot" type="text" value="/home/ross/gal21" label="Galaxy root for planemo to use - MUST be made available in the Galaxy job runner configuration" help=""/> </inputs> <outputs> <data name="new_tested_tool_archive" format="toolshed.gz" label="${in_tool_archive.name.split('_')[0]}_tested_toolshed.gz" hidden="false"/> - <data name="tf_archive_tester_log" format="txt" label="${in_tool_archive.name}_test_log" hidden="false"/> <collection name="TF_run_report" type="list" label="${in_tool_archive.name} test Run reports"> <discover_datasets pattern="__name_and_ext__" directory="TF_run_report" visible="false"/> </collection> @@ -251,9 +244,7 @@ <tests> <test> <output name="new_tested_tool_archive" value="new_tested_tool_archive_sample" compare="sim_size" delta_frac="0.5"/> - <output name="tf_archive_tester_log" value="tf_archive_tester_log_sample" compare="sim_size" delta_frac="0.1"/> <param name="in_tool_archive" value="in_tool_archive_sample"/> - <param name="galaxyroot" value="/home/ross/gal21"/> <output_collection name="TF_run_report"/> </test> </tests> @@ -265,6 +256,15 @@ Script:: + # see https://github.com/fubar2/toolfactory + # + # copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012 + # + # all rights reserved + # Licensed under the LGPL + # suggestions for improvement and bug fixes welcome at + # https://github.com/fubar2/toolfactory + import argparse import copy import os @@ -275,19 +275,20 @@ import tempfile import time import xml.etree.ElementTree as ET + + myversion = "V2.2 April 2021" verbose = True debug = True toolFactoryURL = "https://github.com/fubar2/toolfactory" + def timenow(): """return current time as a string""" return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) class ToolTester(): - # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! - # if in a container possibly not so courageous. - # Fine on your own laptop but security red flag for most production instances # uncompress passed tar, run planemo and rebuild a new tarball with tests + def __init__(self, args=None, in_tool_archive='/galaxy-central/tools/newtool/newtool_toolshed.gz', new_tool_archive=None): self.args = args self.new_tool_archive = new_tool_archive @@ -298,7 +299,6 @@ ourdir = os.path.commonpath(flist) # eg pyrevpos self.tool_name = ourdir ourxmls = [x for x in flist if x.lower().endswith('.xml') and os.path.split(x)[0] == ourdir] - # planemo_test/planemo_test.xml assert len(ourxmls) > 0 self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] res = tff.extractall() @@ -319,13 +319,14 @@ def call_planemo(self,xmlpath,ourdir): penv = os.environ - penv['HOME'] = '/home/ross/galaxy-release_21.01' + penv["PIP_CACHE_DIR"] = os.path.join(self.args.galaxy_root,'pipcache') toolfile = os.path.split(xmlpath)[1] tool_name = self.tool_name tool_test_output = f"{tool_name}_planemo_test_report.html" cll = [ "planemo", "test", + "--biocontainers", "--test_output", os.path.abspath(tool_test_output), "--galaxy_root", @@ -336,7 +337,7 @@ print(cll) p = subprocess.run( cll, - capture_output=True, + #capture_output=True, encoding='utf8', env = penv, shell=False, @@ -349,22 +350,18 @@ x = os.path.split(xreal)[1] xout = os.path.join(self.tooloutdir,x) shutil.copyfile(xreal, xout) - # for p in self.infiles: - # pth = p["name"] - # dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) - # shutil.copyfile(pth, dest) - # dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) - # shutil.copyfile(pth, dest) def makeToolTar(self): """move outputs into test-data and prepare the tarball""" excludeme = "_planemo_test_report.html" + def exclude_function(tarinfo): filename = tarinfo.name return None if filename.endswith(excludeme) else tarinfo + newtar = 'new_%s_toolshed.gz' % self.tool_name ttf = tarfile.open(newtar, "w:gz") - ttf.add(name=self.tooloutdir, + ttf.add(name=self.tool_name, arcname=self.tool_name, filter=exclude_function) ttf.close() @@ -425,16 +422,16 @@ src = os.path.join(self.testdir, entry.name) shutil.copyfile(src, dest) + def update_tests(self,ourdir): for xmlf in self.ourxmls: capture = self.call_planemo(xmlf,ourdir) - #sys.stderr.write('%s, stdout=%s, stderr=%s' % (xmlf, capture.stdout, capture.stdout)) - print('%s, stdout=%s, stderr=%s' % (capture.stdout, capture.stdout,xmlf)) def main(): """ This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml + """ parser = argparse.ArgumentParser() a = parser.add_argument @@ -444,9 +441,11 @@ args = parser.parse_args() print('Hello from',os.getcwd()) tt = ToolTester(args=args, in_tool_archive=args.in_tool_archive, new_tool_archive=args.new_tested_tool_archive) + if __name__ == "__main__": main() + ]]></help> <citations> <citation type="doi">10.1093/bioinformatics/bts573</citation>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/install-history.py Fri Apr 30 02:10:32 2021 +0000 @@ -0,0 +1,41 @@ +import argparse +import os + + +from bioblend import galaxy + + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument("-g", "--galaxy", help='URL of target galaxy') + parser.add_argument("-p", "--password", help='Galaxy admin password') + parser.add_argument("-e", "--email", help='Galaxy admin email') + parser.add_argument("-a", "--key", help='Galaxy admin key', default=None) + parser.add_argument("-i", "--history_path", help='Path to history gz files to be loaded') + return parser + +def main(): + """ + load a folder of histories or a single gz + """ + args = _parser().parse_args() + if args.key: + gi = galaxy.GalaxyInstance(url=args.galaxy, key=args.key) + else: + gi = galaxy.GalaxyInstance(url=args.galaxy, email=args.email, password=args.password) + hdir = args.history_path + # h = gi.histories.get_most_recently_used_history() + if os.path.isdir(hdir): + for fp in os.listdir(hdir): + hp = os.path.join(hdir,fp) + if os.path.isfile(hp): + x = gi.histories.import_history(file_path=hp, url=None) + print('installed ',hp,'res=',x) + else: + x = gi.histories.import_history(file_path=hdir, url=None) + print('installed',hdir,'res=',x) + + +if __name__ == "__main__": + main() +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/install_tf_demos.py Fri Apr 30 02:10:32 2021 +0000 @@ -0,0 +1,42 @@ +import argparse +import urllib.request + +from bioblend import galaxy + +WF = "https://drive.google.com/uc?export=download&id=13xE8o7tucHGNA0qYkEP98FfUGl2wdOU5" +HIST = ( + "https://zenodo.org/record/4686436/files/TFdemo_wf_april13_planemo.ga?download=1" +) +WF_FILE = "tf_workflow.ga" +HIST_FILE = "tf_history.tgz" + + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-g", "--galaxy", help="URL of target galaxy", default="http://localhost:8080" + ) + parser.add_argument("-a", "--key", help="Galaxy admin key", default="8993d65865e6d6d1773c2c34a1cc207d") + return parser + + +def main(): + """ + load the planemo tool_factory demonstration history and tool generating workflow + fails in planemo served galaxies because there seems to be no user in trans? + """ + args = _parser().parse_args() + urllib.request.urlretrieve(WF, WF_FILE) + urllib.request.urlretrieve(HIST, HIST_FILE) + assert args.key, "Need an administrative key for the target Galaxy supplied please" + gi = galaxy.GalaxyInstance( + url=args.galaxy, key=args.key, email="planemo@galaxyproject.org" + ) + x = gi.workflows.import_workflow_from_local_path(WF_FILE, publish=True) + print(f"installed {WF_FILE} Returned = {x}\n") + x = gi.histories.import_history(file_path=HIST_FILE) + print(f"installed {HIST_FILE} Returned = {x}\n") + + +if __name__ == "__main__": + main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/testclass.py Fri Apr 30 02:10:32 2021 +0000 @@ -0,0 +1,108 @@ +import argparse +import json +import os +import requests + + +from bioblend import galaxy + +class ToolTester(): + # test a newly installed tool using bioblend + """ + + https://github.com/nsoranzo/bioblend-tutorial/blob/master/historical_exercises/api-scripts.exercises/run_tool.py +import sys +import json +import requests +import output + +BASE_URL = 'http://localhost:8080' + +# ----------------------------------------------------------------------------- +def run_tool( tool_id, history_id, **kwargs ): + full_url = BASE_URL + '/api/tools' + + #EXERCISE: POST... + +# ----------------------------------------------------------------------------- +if __name__ == '__main__': + # e.g. ./run_tool.py Filter1 ebfb8f50c6abde6d '{ "input" : { "src": "hda", "id": "77f74776fd03cbc5" }, "cond" : "c6>=100.0" }' + # e.g. ./run_tool.py sort1 f597429621d6eb2b '{ "input": { "src": "hda", "id": "b472e2eb553fa0d1" }, "column": "c6", "style": "alpha", "column_set_0|other_column" : "c2", "column_set_0|other_style": "num" }' + tool_id, history_id = sys.argv[1:3] + params = json.loads( sys.argv[3] ) if len( sys.argv ) >= 4 else {} + response = run_tool( tool_id, history_id, **params ) + output.output_response( response ) + + + def get_testdata(self,urlin,fout): + ''' + grab a test file + GET /api/tools/{tool_id}/test_data_download?tool_version={tool_version}&filename={filename} + http://localhost:8080/api/tools/tacrev/test_data_download?tool_version=2.00&filename=in + ''' + """ + def __init__(self, args): + self.galaxy = args.galaxy + self.key = args.key + self.tool_id = args.tool_id + + def run_test(self): + """ + GET /api/tools/{tool_id}/test_data_download?tool_version={tool_version}&filename={filename} + http://localhost:8080/api/tools/tacrev/test_data_download?tool_version=2.00&filename=input1 + """ + inputs = {} + gi = galaxy.GalaxyInstance(url=self.galaxy, key=self.key, verify=False) + chistory = gi.histories.get_most_recently_used_history() + chistory_id = chistory['id'] + #contents = gi.histories.show_history(chistory_id, contents=True) + #print('####chistory',chistory,'\n#### contents=',contents) + #history = gi.histories.create_history(name=f"{self.tool_id}_test_history") + #new_hist_id = history['id'] + fapi = ''.join([self.galaxy, '/api/tools/', self.tool_id, '/build']) + build = gi.make_get_request(url=fapi,params={"history_id":chistory_id}).json() + fapi = ''.join([self.galaxy, '/api/tools/', self.tool_id, '/test_data']) + test_data = requests.get(fapi, params={'key':self.key, 'history_id':chistory_id})# gi.make_get_request(url=fapi,params={"history_id":chistory_id,'key':self.key}).json() + print(test_data) + testinputs = test_data.json()[0].get('inputs',None) + print('testinputs',testinputs) + stateinputs = build.get('state_inputs',None) # 'input1': {'values': [{'id': '7b326180327c3fcc', 'src': 'hda'}]}} + if testinputs: + for k in testinputs.keys(): + v = testinputs[k] + if '|' in k: + nk = k.split('|')[-1] + inputs[nk] = v + else: + inputs[k] = v + if stateinputs: + print('stateinputs',stateinputs) + for k in stateinputs.keys(): + inp = stateinputs[k] + if isinstance(inp,dict): + if inp.get('values',None): + for anin in inp['values']: + if anin.get('id', None) and anin.get('src', None): + gi.histories.copy_dataset(chistory_id, anin['id'], source=anin['src']) + print('******copied id', anin['id']) + up = {k:anin} + print(up) + inputs.update(up) # replace the input def + print('after state inputs', inputs) + fapi = ''.join([self.galaxy, '/api/tools']) + r = gi.tools.run_tool(chistory_id, self.tool_id, inputs, input_format='legacy') + print(f"Called test on {self.tool_id} - got {r}") + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument("-g", "--galaxy", help='URL of target galaxy',default="http://localhost:8080") + parser.add_argument("-a", "--key", help='Galaxy admin key', default="13073fde17d06591ce36e596e3c29904") + parser.add_argument("-t", "--tool_id", help='Tool id to test', default="plotter") + return parser + + +if __name__ == "__main__": + args = _parser().parse_args() + tt = ToolTester(args) + tt.run_test() +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/toolwatcher.py Fri Apr 30 02:10:32 2021 +0000 @@ -0,0 +1,123 @@ +#!/usr/bin/python +from datetime import datetime, timedelta +from io import BytesIO as BIO +import logging +import os +import subprocess +import tarfile +import time +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler +from watchdog.events import PatternMatchingEventHandler + +class ToolHandler(PatternMatchingEventHandler): + + def __init__(self, watchme): + PatternMatchingEventHandler.__init__(self, patterns=['*.xml'], + ignore_directories=False, case_sensitive=False) + self.last_modified = datetime.now() + self.tool_dir = watchme + self.work_dir = os.getcwd() + self.galaxy_root = os.path.split(watchme)[0] + logging.info(self.galaxy_root) + self.tar_dir = os.path.join(self.galaxy_root, 'tooltardir') + if not os.path.exists(self.tar_dir): + os.mkdir(self.tar_dir) + + def on_created(self, event): + self.on_modified(event) + + def on_modified(self, event): + if datetime.now() - self.last_modified < timedelta(seconds=1): + return + else: + if os.path.exists(event.src_path): + self.last_modified = datetime.now() + logging.info(f"{event.src_path} was {event.event_type}") + p = self.planemo_test(event.src_path) + if p: + if p.returncode == 0: + newtarpath = self.makeToolTar(event.src_path) + logging.info('### Tested toolshed tarball %s written' % newtarpath) + else: + logging.debug('### planemo stdout:') + logging.debug(p.stdout) + logging.debug('### planemo stderr:') + logging.debug(p.stderr) + logging.info('### Planemo call return code =' % p.returncode) + else: + logging.info('Directory %s deleted' % event.src_path) + + def planemo_test(self, xml_path): + toolpath, toolfile = os.path.split(xml_path) + dirlist = os.listdir(toolpath) + toolname = os.path.basename(toolpath) + logging.info('### test dirlist %s, path %s toolname %s' % (dirlist, xml_path, toolname)) + xmls = [x for x in dirlist if os.path.splitext(x)[1] == '.xml'] + if not len(xmls) > 0: + logging.warning('Found no xml files after change to %s' % xml_path) + return None + tool_test_output = os.path.join(toolpath, f"{toolname}_planemo_test_report.html") + cll = [ + "planemo", + "test", + "--test_output", + tool_test_output, + "--galaxy_root", + self.galaxy_root, + "--update_test_data", + xml_path, + ] + logging.info('### calling %s' % ' '.join(cll)) + p = subprocess.run( + cll, + cwd = toolpath, + shell=False, + capture_output=True, + encoding='utf8', + ) + return p + + def makeToolTar(self, xml_path): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + tooldir, xml_file = os.path.split(xml_path) + os.chdir(self.tool_dir) + toolname = os.path.splitext(xml_file)[0] + newtarpath = os.path.join(self.tar_dir, '%s_toolshed.gz' % toolname) + tf = tarfile.open(newtarpath, "w:gz") + tf.add( + name=toolname, + arcname=toolname, + filter=exclude_function, + ) + tf.close() + os.chdir(self.work_dir) + return newtarpath + + +if __name__ == "__main__": + watchme = '/home/ross/gal21/tools' + logging.basicConfig(level=logging.INFO, + #filename = os.path.join(watchme,"toolwatcher.log") + #filemode = "w", + format='%(asctime)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S') + event_handler = ToolHandler(watchme=watchme) + observer = Observer() + observer.schedule(event_handler, path=watchme, recursive=True) + observer.start() + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + observer.stop() + observer.join() + + +