2
|
1 # replace with shebang for biocontainer
|
|
2 # see https://github.com/fubar2/toolfactory
|
|
3 #
|
|
4 # copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012
|
|
5 #
|
|
6 # all rights reserved
|
|
7 # Licensed under the LGPL
|
|
8 # suggestions for improvement and bug fixes welcome at
|
|
9 # https://github.com/fubar2/toolfactory
|
|
10 #
|
|
11 # July 2020: BCC was fun and I feel like rip van winkle after 5 years.
|
|
12 # Decided to
|
|
13 # 1. Fix the toolfactory so it works - done for simplest case
|
|
14 # 2. Fix planemo so the toolfactory function works
|
|
15 # 3. Rewrite bits using galaxyxml functions where that makes sense - done
|
|
16 #
|
|
17 # uses planemo in a biodocker sort of image as a requirement
|
|
18 # otherwise planemo seems to leak dependencies back into the
|
|
19 # calling venv. Hilarity ensues.
|
|
20
|
|
21
|
|
22 import argparse
|
|
23 import copy
|
6
|
24 import json
|
2
|
25 import logging
|
|
26 import os
|
|
27 import re
|
|
28 import shutil
|
|
29 import subprocess
|
|
30 import sys
|
|
31 import tarfile
|
|
32 import tempfile
|
|
33 import time
|
|
34
|
|
35
|
|
36 from bioblend import ConnectionError
|
|
37 from bioblend import toolshed
|
|
38
|
|
39 import docker
|
|
40
|
|
41 import galaxyxml.tool as gxt
|
|
42 import galaxyxml.tool.parameters as gxtp
|
|
43
|
|
44 import lxml
|
|
45
|
|
46 import yaml
|
|
47
|
|
48
|
|
49 myversion = "V2.1 July 2020"
|
|
50 verbose = True
|
|
51 debug = True
|
|
52 toolFactoryURL = "https://github.com/fubar2/toolfactory"
|
|
53 foo = len(lxml.__version__)
|
|
54 # fug you, flake8. Say my name!
|
|
55 FAKEEXE = "~~~REMOVE~~~ME~~~"
|
|
56 # need this until a PR/version bump to fix galaxyxml prepending the exe even
|
|
57 # with override.
|
|
58
|
|
59
|
|
60 def timenow():
|
|
61 """return current time as a string"""
|
|
62 return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time()))
|
|
63
|
|
64
|
|
65 cheetah_escape_table = {"$": "\\$", "#": "\\#"}
|
|
66
|
|
67
|
|
68 def cheetah_escape(text):
|
|
69 """Produce entities within text."""
|
|
70 return "".join([cheetah_escape_table.get(c, c) for c in text])
|
|
71
|
|
72
|
|
73 def parse_citations(citations_text):
|
|
74 """"""
|
|
75 citations = [c for c in citations_text.split("**ENTRY**") if c.strip()]
|
|
76 citation_tuples = []
|
|
77 for citation in citations:
|
|
78 if citation.startswith("doi"):
|
|
79 citation_tuples.append(("doi", citation[len("doi") :].strip()))
|
|
80 else:
|
|
81 citation_tuples.append(("bibtex", citation[len("bibtex") :].strip()))
|
|
82 return citation_tuples
|
|
83
|
|
84
|
|
85 class ScriptRunner:
|
|
86 """Wrapper for an arbitrary script
|
|
87 uses galaxyxml
|
|
88
|
|
89 """
|
|
90
|
|
91 def __init__(self, args=None):
|
|
92 """
|
|
93 prepare command line cl for running the tool here
|
|
94 and prepare elements needed for galaxyxml tool generation
|
|
95 """
|
|
96 self.ourcwd = os.getcwd()
|
|
97 self.ourenv = copy.deepcopy(os.environ)
|
6
|
98 self.collections = []
|
|
99 if len(args.collection) > 0:
|
|
100 try:
|
|
101 self.collections = [
|
|
102 json.loads(x) for x in args.collection if len(x.strip()) > 1
|
|
103 ]
|
|
104 except Exception:
|
|
105 print(
|
|
106 f"--collections parameter {str(args.collection)} is malformed - should be a dictionary"
|
|
107 )
|
|
108 try:
|
|
109 self.infiles = [
|
|
110 json.loads(x) for x in args.input_files if len(x.strip()) > 1
|
|
111 ]
|
|
112 except Exception:
|
|
113 print(
|
|
114 f"--input_files parameter {str(args.input_files)} is malformed - should be a dictionary"
|
|
115 )
|
|
116 try:
|
|
117 self.outfiles = [
|
|
118 json.loads(x) for x in args.output_files if len(x.strip()) > 1
|
|
119 ]
|
|
120 except Exception:
|
|
121 print(
|
|
122 f"--output_files parameter {args.output_files} is malformed - should be a dictionary"
|
|
123 )
|
|
124 try:
|
|
125 self.addpar = [
|
|
126 json.loads(x) for x in args.additional_parameters if len(x.strip()) > 1
|
|
127 ]
|
|
128 except Exception:
|
|
129 print(
|
|
130 f"--additional_parameters {args.additional_parameters} is malformed - should be a dictionary"
|
|
131 )
|
|
132 try:
|
|
133 self.selpar = [
|
|
134 json.loads(x) for x in args.selecttext_parameters if len(x.strip()) > 1
|
|
135 ]
|
|
136 except Exception:
|
|
137 print(
|
|
138 f"--selecttext_parameters {args.selecttext_parameters} is malformed - should be a dictionary"
|
|
139 )
|
2
|
140 self.args = args
|
|
141 self.cleanuppar()
|
|
142 self.lastclredirect = None
|
|
143 self.lastxclredirect = None
|
|
144 self.cl = []
|
|
145 self.xmlcl = []
|
|
146 self.is_positional = self.args.parampass == "positional"
|
|
147 if self.args.sysexe:
|
|
148 self.executeme = self.args.sysexe
|
|
149 else:
|
|
150 if self.args.packages:
|
|
151 self.executeme = self.args.packages.split(",")[0].split(":")[0].strip()
|
|
152 else:
|
|
153 self.executeme = None
|
|
154 aCL = self.cl.append
|
|
155 aXCL = self.xmlcl.append
|
|
156 assert args.parampass in [
|
|
157 "0",
|
|
158 "argparse",
|
|
159 "positional",
|
|
160 ], 'args.parampass must be "0","positional" or "argparse"'
|
|
161 self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name)
|
|
162 self.tool_id = self.tool_name
|
|
163 self.newtool = gxt.Tool(
|
|
164 self.tool_name,
|
|
165 self.tool_id,
|
|
166 self.args.tool_version,
|
|
167 self.args.tool_desc,
|
|
168 FAKEEXE,
|
|
169 )
|
|
170 self.newtarpath = "toolfactory_%s.tgz" % self.tool_name
|
|
171 self.tooloutdir = "./tfout"
|
|
172 self.repdir = "./TF_run_report_tempdir"
|
|
173 self.testdir = os.path.join(self.tooloutdir, "test-data")
|
|
174 if not os.path.exists(self.tooloutdir):
|
|
175 os.mkdir(self.tooloutdir)
|
|
176 if not os.path.exists(self.testdir):
|
|
177 os.mkdir(self.testdir)
|
|
178 if not os.path.exists(self.repdir):
|
|
179 os.mkdir(self.repdir)
|
|
180 self.tinputs = gxtp.Inputs()
|
|
181 self.toutputs = gxtp.Outputs()
|
|
182 self.testparam = []
|
|
183 if self.args.script_path:
|
|
184 self.prepScript()
|
|
185 if self.args.command_override:
|
|
186 scos = open(self.args.command_override, "r").readlines()
|
|
187 self.command_override = [x.rstrip() for x in scos]
|
|
188 else:
|
|
189 self.command_override = None
|
|
190 if self.args.test_override:
|
|
191 stos = open(self.args.test_override, "r").readlines()
|
|
192 self.test_override = [x.rstrip() for x in stos]
|
|
193 else:
|
|
194 self.test_override = None
|
|
195 if self.args.cl_prefix: # DIY CL start
|
|
196 clp = self.args.cl_prefix.split(" ")
|
|
197 for c in clp:
|
|
198 aCL(c)
|
|
199 aXCL(c)
|
|
200 else:
|
|
201 if self.args.script_path:
|
|
202 aCL(self.executeme)
|
|
203 aCL(self.sfile)
|
|
204 aXCL(self.executeme)
|
|
205 aXCL("$runme")
|
|
206 else:
|
|
207 aCL(self.executeme)
|
|
208 aXCL(self.executeme)
|
|
209 self.elog = os.path.join(self.repdir, "%s_error_log.txt" % self.tool_name)
|
|
210 self.tlog = os.path.join(self.repdir, "%s_runner_log.txt" % self.tool_name)
|
|
211
|
|
212 if self.args.parampass == "0":
|
|
213 self.clsimple()
|
|
214 else:
|
|
215 if self.args.parampass == "positional":
|
6
|
216 self.prepclpos()
|
2
|
217 self.clpositional()
|
|
218 else:
|
6
|
219 self.prepargp()
|
2
|
220 self.clargparse()
|
|
221
|
6
|
222 def clsimple(self):
|
|
223 """no parameters - uses < and > for i/o"""
|
|
224 aCL = self.cl.append
|
|
225 aXCL = self.xmlcl.append
|
|
226 if len(self.infiles) > 0:
|
|
227 aCL("<")
|
|
228 aCL(self.infiles[0]["infilename"])
|
|
229 aXCL("<")
|
|
230 aXCL("$%s" % self.infiles[0]["infilename"])
|
|
231 if len(self.outfiles) > 0:
|
|
232 aCL(">")
|
|
233 aCL(self.outfiles[0]["name"])
|
|
234 aXCL(">")
|
|
235 aXCL("$%s" % self.outfiles[0]["name"])
|
|
236
|
|
237 def prepargp(self):
|
|
238 clsuffix = []
|
|
239 xclsuffix = []
|
|
240 for i, p in enumerate(self.infiles):
|
|
241 if p["origCL"].strip().upper() == "STDIN":
|
|
242 appendme = [
|
|
243 p["infilename"],
|
|
244 p["infilename"],
|
|
245 "< %s" % p["infilename"],
|
|
246 ]
|
|
247 xappendme = [
|
|
248 p["infilename"],
|
|
249 p["infilename"],
|
|
250 "< $%s" % p["infilename"],
|
|
251 ]
|
|
252 else:
|
|
253 appendme = [p["CL"], p["CL"], ""]
|
|
254 xappendme = [p["CL"], "$%s" % p["CL"], ""]
|
|
255 clsuffix.append(appendme)
|
|
256 xclsuffix.append(xappendme)
|
|
257 for i, p in enumerate(self.outfiles):
|
|
258 if p["origCL"].strip().upper() == "STDOUT":
|
|
259 self.lastclredirect = [">", p["name"]]
|
|
260 self.lastxclredirect = [">", "$%s" % p["name"]]
|
|
261 else:
|
|
262 clsuffix.append([p["name"], p["name"], ""])
|
|
263 xclsuffix.append([p["name"], "$%s" % p["name"], ""])
|
|
264 for p in self.addpar:
|
|
265 clsuffix.append([p["CL"], p["name"], p["override"]])
|
|
266 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]])
|
|
267 for p in self.selpar:
|
|
268 clsuffix.append([p["CL"], p["name"], p["override"]])
|
|
269 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]])
|
|
270 clsuffix.sort()
|
|
271 xclsuffix.sort()
|
|
272 self.xclsuffix = xclsuffix
|
|
273 self.clsuffix = clsuffix
|
|
274
|
|
275 def prepclpos(self):
|
|
276 clsuffix = []
|
|
277 xclsuffix = []
|
|
278 for i, p in enumerate(self.infiles):
|
|
279 if p["origCL"].strip().upper() == "STDIN":
|
|
280 appendme = [
|
|
281 "999",
|
|
282 p["infilename"],
|
|
283 "< $%s" % p["infilename"],
|
|
284 ]
|
|
285 xappendme = [
|
|
286 "999",
|
|
287 p["infilename"],
|
|
288 "< $%s" % p["infilename"],
|
|
289 ]
|
|
290 else:
|
|
291 appendme = [p["CL"], p["infilename"], ""]
|
|
292 xappendme = [p["CL"], "$%s" % p["infilename"], ""]
|
|
293 clsuffix.append(appendme)
|
|
294 xclsuffix.append(xappendme)
|
|
295 for i, p in enumerate(self.outfiles):
|
|
296 if p["origCL"].strip().upper() == "STDOUT":
|
|
297 self.lastclredirect = [">", p["name"]]
|
|
298 self.lastxclredirect = [">", "$%s" % p["name"]]
|
|
299 else:
|
|
300 clsuffix.append([p["CL"], p["name"], ""])
|
|
301 xclsuffix.append([p["CL"], "$%s" % p["name"], ""])
|
|
302 for p in self.addpar:
|
|
303 clsuffix.append([p["CL"], p["name"], p["override"]])
|
|
304 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]])
|
|
305 for p in self.selpar:
|
|
306 clsuffix.append([p["CL"], p["name"], p["override"]])
|
|
307 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]])
|
|
308 clsuffix.sort()
|
|
309 xclsuffix.sort()
|
|
310 self.xclsuffix = xclsuffix
|
|
311 self.clsuffix = clsuffix
|
|
312
|
2
|
313 def prepScript(self):
|
|
314 rx = open(self.args.script_path, "r").readlines()
|
|
315 rx = [x.rstrip() for x in rx]
|
|
316 rxcheck = [x.strip() for x in rx if x.strip() > ""]
|
|
317 assert len(rxcheck) > 0, "Supplied script is empty. Cannot run"
|
|
318 self.script = "\n".join(rx)
|
|
319 fhandle, self.sfile = tempfile.mkstemp(
|
|
320 prefix=self.tool_name, suffix="_%s" % (self.executeme)
|
|
321 )
|
|
322 tscript = open(self.sfile, "w")
|
|
323 tscript.write(self.script)
|
|
324 tscript.close()
|
|
325 self.escapedScript = [cheetah_escape(x) for x in rx]
|
|
326 self.spacedScript = [f" {x}" for x in rx if x.strip() > ""]
|
|
327 art = "%s.%s" % (self.tool_name, self.executeme)
|
|
328 artifact = open(art, "wb")
|
|
329 artifact.write(bytes("\n".join(self.escapedScript), "utf8"))
|
|
330 artifact.close()
|
|
331
|
|
332 def cleanuppar(self):
|
|
333 """ positional parameters are complicated by their numeric ordinal"""
|
|
334 if self.args.parampass == "positional":
|
|
335 for i, p in enumerate(self.infiles):
|
|
336 assert (
|
6
|
337 p["CL"].isdigit() or p["CL"].strip().upper() == "STDIN"
|
2
|
338 ), "Positional parameters must be ordinal integers - got %s for %s" % (
|
6
|
339 p["CL"],
|
|
340 p["label"],
|
2
|
341 )
|
|
342 for i, p in enumerate(self.outfiles):
|
|
343 assert (
|
6
|
344 p["CL"].isdigit() or p["CL"].strip().upper() == "STDOUT"
|
2
|
345 ), "Positional parameters must be ordinal integers - got %s for %s" % (
|
6
|
346 p["CL"],
|
|
347 p["name"],
|
2
|
348 )
|
|
349 for i, p in enumerate(self.addpar):
|
|
350 assert p[
|
6
|
351 "CL"
|
2
|
352 ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % (
|
6
|
353 p["CL"],
|
|
354 p["name"],
|
2
|
355 )
|
|
356 for i, p in enumerate(self.infiles):
|
|
357 infp = copy.copy(p)
|
6
|
358 infp["origCL"] = infp["CL"]
|
|
359 if self.args.parampass in ["positional", "0"]:
|
|
360 infp["infilename"] = infp["label"].replace(" ", "_")
|
|
361 else:
|
|
362 infp["infilename"] = infp["CL"]
|
2
|
363 self.infiles[i] = infp
|
|
364 for i, p in enumerate(self.outfiles):
|
6
|
365 p["origCL"] = p["CL"] # keep copy
|
2
|
366 self.outfiles[i] = p
|
|
367 for i, p in enumerate(self.addpar):
|
6
|
368 p["origCL"] = p["CL"]
|
2
|
369 self.addpar[i] = p
|
|
370
|
|
371 def clpositional(self):
|
|
372 # inputs in order then params
|
|
373 aCL = self.cl.append
|
6
|
374 for (k, v, koverride) in self.clsuffix:
|
2
|
375 if " " in v:
|
|
376 aCL("%s" % v)
|
|
377 else:
|
|
378 aCL(v)
|
|
379 aXCL = self.xmlcl.append
|
6
|
380 for (k, v, koverride) in self.xclsuffix:
|
2
|
381 aXCL(v)
|
|
382 if self.lastxclredirect:
|
|
383 aXCL(self.lastxclredirect[0])
|
|
384 aXCL(self.lastxclredirect[1])
|
|
385
|
|
386 def clargparse(self):
|
|
387 """argparse style"""
|
|
388 aCL = self.cl.append
|
|
389 aXCL = self.xmlcl.append
|
|
390 # inputs then params in argparse named form
|
|
391
|
6
|
392 for (k, v, koverride) in self.xclsuffix:
|
2
|
393 if koverride > "":
|
|
394 k = koverride
|
|
395 elif len(k.strip()) == 1:
|
|
396 k = "-%s" % k
|
|
397 else:
|
|
398 k = "--%s" % k
|
|
399 aXCL(k)
|
|
400 aXCL(v)
|
6
|
401 for (k, v, koverride) in self.clsuffix:
|
2
|
402 if koverride > "":
|
|
403 k = koverride
|
|
404 elif len(k.strip()) == 1:
|
|
405 k = "-%s" % k
|
|
406 else:
|
|
407 k = "--%s" % k
|
|
408 aCL(k)
|
|
409 aCL(v)
|
|
410
|
|
411 def getNdash(self, newname):
|
|
412 if self.is_positional:
|
|
413 ndash = 0
|
|
414 else:
|
|
415 ndash = 2
|
|
416 if len(newname) < 2:
|
|
417 ndash = 1
|
|
418 return ndash
|
|
419
|
|
420 def doXMLparam(self):
|
|
421 """flake8 made me do this..."""
|
6
|
422 for p in self.outfiles:
|
|
423 newname = p["name"]
|
|
424 newfmt = p["format"]
|
|
425 newcl = p["CL"]
|
|
426 test = p["test"]
|
|
427 oldcl = p["origCL"]
|
2
|
428 test = test.strip()
|
|
429 ndash = self.getNdash(newcl)
|
|
430 aparm = gxtp.OutputData(
|
5
|
431 name=newname, format=newfmt, num_dashes=ndash, label=newname
|
2
|
432 )
|
|
433 aparm.positional = self.is_positional
|
|
434 if self.is_positional:
|
|
435 if oldcl.upper() == "STDOUT":
|
|
436 aparm.positional = 9999999
|
|
437 aparm.command_line_override = "> $%s" % newname
|
|
438 else:
|
|
439 aparm.positional = int(oldcl)
|
|
440 aparm.command_line_override = "$%s" % newname
|
|
441 self.toutputs.append(aparm)
|
|
442 ld = None
|
|
443 if test.strip() > "":
|
|
444 if test.startswith("diff"):
|
|
445 c = "diff"
|
|
446 ld = 0
|
|
447 if test.split(":")[1].isdigit:
|
|
448 ld = int(test.split(":")[1])
|
|
449 tp = gxtp.TestOutput(
|
|
450 name=newname,
|
|
451 value="%s_sample" % newname,
|
|
452 compare=c,
|
|
453 lines_diff=ld,
|
|
454 )
|
|
455 elif test.startswith("sim_size"):
|
|
456 c = "sim_size"
|
|
457 tn = test.split(":")[1].strip()
|
|
458 if tn > "":
|
|
459 if "." in tn:
|
|
460 delta = None
|
|
461 delta_frac = min(1.0, float(tn))
|
|
462 else:
|
|
463 delta = int(tn)
|
|
464 delta_frac = None
|
|
465 tp = gxtp.TestOutput(
|
|
466 name=newname,
|
|
467 value="%s_sample" % newname,
|
|
468 compare=c,
|
|
469 delta=delta,
|
|
470 delta_frac=delta_frac,
|
|
471 )
|
6
|
472 else:
|
|
473 c = test
|
|
474 tp = gxtp.TestOutput(
|
|
475 name=newname,
|
|
476 value="%s_sample" % newname,
|
|
477 compare=c,
|
|
478 )
|
2
|
479 self.testparam.append(tp)
|
|
480 for p in self.infiles:
|
6
|
481 newname = p["infilename"]
|
|
482 newfmt = p["format"]
|
2
|
483 ndash = self.getNdash(newname)
|
6
|
484 if not len(p["label"]) > 0:
|
|
485 alab = p["CL"]
|
2
|
486 else:
|
6
|
487 alab = p["label"]
|
2
|
488 aninput = gxtp.DataParam(
|
|
489 newname,
|
|
490 optional=False,
|
|
491 label=alab,
|
6
|
492 help=p["help"],
|
2
|
493 format=newfmt,
|
|
494 multiple=False,
|
|
495 num_dashes=ndash,
|
|
496 )
|
|
497 aninput.positional = self.is_positional
|
6
|
498 if self.is_positional:
|
|
499 if p["origCL"].upper() == "STDIN":
|
|
500 aparm.positional = 9999998
|
|
501 aparm.command_line_override = "> $%s" % newname
|
|
502 else:
|
|
503 aparm.positional = int(p["origCL"])
|
|
504 aparm.command_line_override = "$%s" % newname
|
2
|
505 self.tinputs.append(aninput)
|
|
506 tparm = gxtp.TestParam(name=newname, value="%s_sample" % newname)
|
|
507 self.testparam.append(tparm)
|
|
508 for p in self.addpar:
|
6
|
509 newname = p["name"]
|
|
510 newval = p["value"]
|
|
511 newlabel = p["label"]
|
|
512 newhelp = p["help"]
|
|
513 newtype = p["type"]
|
|
514 newcl = p["CL"]
|
|
515 oldcl = p["origCL"]
|
2
|
516 if not len(newlabel) > 0:
|
|
517 newlabel = newname
|
|
518 ndash = self.getNdash(newname)
|
|
519 if newtype == "text":
|
|
520 aparm = gxtp.TextParam(
|
|
521 newname,
|
|
522 label=newlabel,
|
|
523 help=newhelp,
|
|
524 value=newval,
|
|
525 num_dashes=ndash,
|
|
526 )
|
|
527 elif newtype == "integer":
|
|
528 aparm = gxtp.IntegerParam(
|
|
529 newname,
|
|
530 label=newname,
|
|
531 help=newhelp,
|
|
532 value=newval,
|
|
533 num_dashes=ndash,
|
|
534 )
|
|
535 elif newtype == "float":
|
|
536 aparm = gxtp.FloatParam(
|
|
537 newname,
|
|
538 label=newname,
|
|
539 help=newhelp,
|
|
540 value=newval,
|
|
541 num_dashes=ndash,
|
|
542 )
|
6
|
543 elif newtype == "boolean":
|
|
544 aparm = gxtp.BooleanParam(
|
|
545 newname,
|
|
546 label=newname,
|
|
547 help=newhelp,
|
|
548 value=newval,
|
|
549 num_dashes=ndash,
|
|
550 )
|
2
|
551 else:
|
|
552 raise ValueError(
|
|
553 'Unrecognised parameter type "%s" for\
|
|
554 additional parameter %s in makeXML'
|
|
555 % (newtype, newname)
|
|
556 )
|
|
557 aparm.positional = self.is_positional
|
|
558 if self.is_positional:
|
|
559 aparm.positional = int(oldcl)
|
|
560 self.tinputs.append(aparm)
|
|
561 tparm = gxtp.TestParam(newname, value=newval)
|
|
562 self.testparam.append(tparm)
|
6
|
563 for p in self.selpar:
|
|
564 newname = p["name"]
|
|
565 newval = p["value"]
|
|
566 newlabel = p["label"]
|
|
567 newhelp = p["help"]
|
|
568 newtype = p["type"]
|
|
569 newcl = p["CL"]
|
|
570 if not len(newlabel) > 0:
|
|
571 newlabel = newname
|
|
572 ndash = self.getNdash(newname)
|
|
573 if newtype == "selecttext":
|
|
574 newtext = p["texts"]
|
|
575 aparm = gxtp.SelectParam(
|
|
576 newname,
|
|
577 label=newlabel,
|
|
578 help=newhelp,
|
|
579 num_dashes=ndash,
|
|
580 )
|
|
581 for i in range(len(newval)):
|
|
582 anopt = gxtp.SelectOption(
|
|
583 value=newval[i],
|
|
584 text=newtext[i],
|
|
585 )
|
|
586 aparm.append(anopt)
|
|
587 aparm.positional = self.is_positional
|
|
588 if self.is_positional:
|
|
589 aparm.positional = int(newcl)
|
|
590 self.tinputs.append(aparm)
|
|
591 tparm = gxtp.TestParam(newname, value=newval)
|
|
592 self.testparam.append(tparm)
|
|
593 else:
|
|
594 raise ValueError(
|
|
595 'Unrecognised parameter type "%s" for\
|
|
596 selecttext parameter %s in makeXML'
|
|
597 % (newtype, newname)
|
|
598 )
|
|
599 for p in self.collections:
|
|
600 newkind = p["kind"]
|
|
601 newname = p["name"]
|
|
602 newlabel = p["label"]
|
|
603 newdisc = p["discover"]
|
|
604 collect = gxtp.OutputCollection(newname, label=newlabel, type=newkind)
|
|
605 disc = gxtp.DiscoverDatasets(
|
|
606 pattern=newdisc, directory=f"{newname}", visible="false"
|
|
607 )
|
|
608 collect.append(disc)
|
|
609 self.toutputs.append(collect)
|
|
610 tparm = gxtp.TestOutput(newname, ftype="pdf")
|
|
611 self.testparam.append(tparm)
|
2
|
612
|
|
613 def doNoXMLparam(self):
|
|
614 """filter style package - stdin to stdout"""
|
|
615 if len(self.infiles) > 0:
|
6
|
616 alab = self.infiles[0]["label"]
|
2
|
617 if len(alab) == 0:
|
6
|
618 alab = self.infiles[0]["infilename"]
|
2
|
619 max1s = (
|
|
620 "Maximum one input if parampass is 0 but multiple input files supplied - %s"
|
|
621 % str(self.infiles)
|
|
622 )
|
|
623 assert len(self.infiles) == 1, max1s
|
6
|
624 newname = self.infiles[0]["infilename"]
|
2
|
625 aninput = gxtp.DataParam(
|
|
626 newname,
|
|
627 optional=False,
|
|
628 label=alab,
|
6
|
629 help=self.infiles[0]["help"],
|
|
630 format=self.infiles[0]["format"],
|
2
|
631 multiple=False,
|
|
632 num_dashes=0,
|
|
633 )
|
|
634 aninput.command_line_override = "< $%s" % newname
|
6
|
635 aninput.positional = True
|
2
|
636 self.tinputs.append(aninput)
|
|
637 tp = gxtp.TestParam(name=newname, value="%s_sample" % newname)
|
|
638 self.testparam.append(tp)
|
|
639 if len(self.outfiles) > 0:
|
6
|
640 newname = self.outfiles[0]["name"]
|
|
641 newfmt = self.outfiles[0]["format"]
|
2
|
642 anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0)
|
|
643 anout.command_line_override = "> $%s" % newname
|
|
644 anout.positional = self.is_positional
|
|
645 self.toutputs.append(anout)
|
6
|
646 tp = gxtp.TestOutput(name=newname, value="%s_sample" % newname)
|
2
|
647 self.testparam.append(tp)
|
|
648
|
|
649 def makeXML(self):
|
|
650 """
|
|
651 Create a Galaxy xml tool wrapper for the new script
|
|
652 Uses galaxyhtml
|
|
653 Hmmm. How to get the command line into correct order...
|
|
654 """
|
|
655 if self.command_override:
|
|
656 self.newtool.command_override = self.command_override # config file
|
|
657 else:
|
|
658 self.newtool.command_override = self.xmlcl
|
3
|
659 cite = gxtp.Citations()
|
|
660 acite = gxtp.Citation(type="doi", value="10.1093/bioinformatics/bts573")
|
|
661 cite.append(acite)
|
|
662 self.newtool.citations = cite
|
|
663 safertext = ""
|
2
|
664 if self.args.help_text:
|
|
665 helptext = open(self.args.help_text, "r").readlines()
|
|
666 safertext = "\n".join([cheetah_escape(x) for x in helptext])
|
3
|
667 if len(safertext.strip()) == 0:
|
|
668 safertext = (
|
|
669 "Ask the tool author (%s) to rebuild with help text please\n"
|
2
|
670 % (self.args.user_email)
|
|
671 )
|
3
|
672 if self.args.script_path:
|
|
673 if len(safertext) > 0:
|
|
674 safertext = safertext + "\n\n------\n" # transition allowed!
|
|
675 scr = [x for x in self.spacedScript if x.strip() > ""]
|
|
676 scr.insert(0, "\n\nScript::\n")
|
|
677 if len(scr) > 300:
|
|
678 scr = (
|
|
679 scr[:100]
|
|
680 + [" >300 lines - stuff deleted", " ......"]
|
|
681 + scr[-100:]
|
|
682 )
|
|
683 scr.append("\n")
|
|
684 safertext = safertext + "\n".join(scr)
|
|
685 self.newtool.help = safertext
|
|
686 self.newtool.version_command = f'echo "{self.args.tool_version}"'
|
2
|
687 requirements = gxtp.Requirements()
|
|
688 if self.args.packages:
|
|
689 for d in self.args.packages.split(","):
|
|
690 ver = ""
|
|
691 d = d.replace("==", ":")
|
|
692 d = d.replace("=", ":")
|
|
693 if ":" in d:
|
|
694 packg, ver = d.split(":")
|
|
695 else:
|
|
696 packg = d
|
|
697 requirements.append(
|
|
698 gxtp.Requirement("package", packg.strip(), ver.strip())
|
|
699 )
|
3
|
700 self.newtool.requirements = requirements
|
2
|
701 if self.args.parampass == "0":
|
|
702 self.doNoXMLparam()
|
|
703 else:
|
|
704 self.doXMLparam()
|
|
705 self.newtool.outputs = self.toutputs
|
|
706 self.newtool.inputs = self.tinputs
|
|
707 if self.args.script_path:
|
|
708 configfiles = gxtp.Configfiles()
|
|
709 configfiles.append(
|
|
710 gxtp.Configfile(name="runme", text="\n".join(self.escapedScript))
|
|
711 )
|
|
712 self.newtool.configfiles = configfiles
|
|
713 tests = gxtp.Tests()
|
|
714 test_a = gxtp.Test()
|
|
715 for tp in self.testparam:
|
|
716 test_a.append(tp)
|
|
717 tests.append(test_a)
|
|
718 self.newtool.tests = tests
|
|
719 self.newtool.add_comment(
|
|
720 "Created by %s at %s using the Galaxy Tool Factory."
|
|
721 % (self.args.user_email, timenow())
|
|
722 )
|
|
723 self.newtool.add_comment("Source in git at: %s" % (toolFactoryURL))
|
|
724 exml0 = self.newtool.export()
|
|
725 exml = exml0.replace(FAKEEXE, "") # temporary work around until PR accepted
|
|
726 if (
|
|
727 self.test_override
|
|
728 ): # cannot do this inside galaxyxml as it expects lxml objects for tests
|
|
729 part1 = exml.split("<tests>")[0]
|
|
730 part2 = exml.split("</tests>")[1]
|
6
|
731 fixed = "%s\n%s\n%s" % (part1, "\n".join(self.test_override), part2)
|
2
|
732 exml = fixed
|
|
733 # exml = exml.replace('range="1:"', 'range="1000:"')
|
|
734 xf = open("%s.xml" % self.tool_name, "w")
|
|
735 xf.write(exml)
|
|
736 xf.write("\n")
|
|
737 xf.close()
|
|
738 # ready for the tarball
|
|
739
|
|
740 def run(self):
|
|
741 """
|
|
742 generate test outputs by running a command line
|
|
743 won't work if command or test override in play - planemo is the
|
|
744 easiest way to generate test outputs for that case so is
|
|
745 automagically selected
|
|
746 """
|
|
747 scl = " ".join(self.cl)
|
|
748 err = None
|
|
749 if self.args.parampass != "0":
|
|
750 if os.path.exists(self.elog):
|
|
751 ste = open(self.elog, "a")
|
|
752 else:
|
|
753 ste = open(self.elog, "w")
|
|
754 if self.lastclredirect:
|
|
755 sto = open(self.lastclredirect[1], "wb") # is name of an output file
|
|
756 else:
|
|
757 if os.path.exists(self.tlog):
|
|
758 sto = open(self.tlog, "a")
|
|
759 else:
|
|
760 sto = open(self.tlog, "w")
|
|
761 sto.write(
|
|
762 "## Executing Toolfactory generated command line = %s\n" % scl
|
|
763 )
|
|
764 sto.flush()
|
|
765 subp = subprocess.run(
|
|
766 self.cl, env=self.ourenv, shell=False, stdout=sto, stderr=ste
|
|
767 )
|
|
768 sto.close()
|
|
769 ste.close()
|
|
770 retval = subp.returncode
|
|
771 else: # work around special case - stdin and write to stdout
|
|
772 if len(self.infiles) > 0:
|
6
|
773 sti = open(self.infiles[0]["name"], "rb")
|
2
|
774 else:
|
|
775 sti = sys.stdin
|
|
776 if len(self.outfiles) > 0:
|
6
|
777 sto = open(self.outfiles[0]["name"], "wb")
|
2
|
778 else:
|
|
779 sto = sys.stdout
|
|
780 subp = subprocess.run(
|
|
781 self.cl, env=self.ourenv, shell=False, stdout=sto, stdin=sti
|
|
782 )
|
|
783 sto.write("## Executing Toolfactory generated command line = %s\n" % scl)
|
|
784 retval = subp.returncode
|
|
785 sto.close()
|
|
786 sti.close()
|
|
787 if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0:
|
|
788 os.unlink(self.tlog)
|
|
789 if os.path.isfile(self.elog) and os.stat(self.elog).st_size == 0:
|
|
790 os.unlink(self.elog)
|
|
791 if retval != 0 and err: # problem
|
|
792 sys.stderr.write(err)
|
|
793 logging.debug("run done")
|
|
794 return retval
|
|
795
|
6
|
796 def shedLoad(self):
|
|
797 """
|
|
798 use bioblend to create new repository
|
|
799 or update existing
|
|
800
|
|
801 """
|
|
802 if os.path.exists(self.tlog):
|
|
803 sto = open(self.tlog, "a")
|
|
804 else:
|
|
805 sto = open(self.tlog, "w")
|
|
806
|
|
807 ts = toolshed.ToolShedInstance(
|
|
808 url=self.args.toolshed_url,
|
|
809 key=self.args.toolshed_api_key,
|
|
810 verify=False,
|
|
811 )
|
|
812 repos = ts.repositories.get_repositories()
|
|
813 rnames = [x.get("name", "?") for x in repos]
|
|
814 rids = [x.get("id", "?") for x in repos]
|
|
815 tfcat = "ToolFactory generated tools"
|
|
816 if self.tool_name not in rnames:
|
|
817 tscat = ts.categories.get_categories()
|
|
818 cnames = [x.get("name", "?").strip() for x in tscat]
|
|
819 cids = [x.get("id", "?") for x in tscat]
|
|
820 catID = None
|
|
821 if tfcat.strip() in cnames:
|
|
822 ci = cnames.index(tfcat)
|
|
823 catID = cids[ci]
|
|
824 res = ts.repositories.create_repository(
|
|
825 name=self.args.tool_name,
|
|
826 synopsis="Synopsis:%s" % self.args.tool_desc,
|
|
827 description=self.args.tool_desc,
|
|
828 type="unrestricted",
|
|
829 remote_repository_url=self.args.toolshed_url,
|
|
830 homepage_url=None,
|
|
831 category_ids=catID,
|
|
832 )
|
|
833 tid = res.get("id", None)
|
|
834 sto.write(f"#create_repository {self.args.tool_name} tid={tid} res={res}\n")
|
|
835 else:
|
|
836 i = rnames.index(self.tool_name)
|
|
837 tid = rids[i]
|
|
838 try:
|
|
839 res = ts.repositories.update_repository(
|
|
840 id=tid, tar_ball_path=self.newtarpath, commit_message=None
|
|
841 )
|
|
842 sto.write(f"#update res id {id} ={res}\n")
|
|
843 except ConnectionError:
|
|
844 sto.write(
|
|
845 "####### Is the toolshed running and the API key correct? Bioblend shed upload failed\n"
|
|
846 )
|
|
847 sto.close()
|
|
848
|
|
849 def eph_galaxy_load(self):
|
|
850 """
|
|
851 use ephemeris to load the new tool from the local toolshed after planemo uploads it
|
|
852 """
|
|
853 if os.path.exists(self.tlog):
|
|
854 tout = open(self.tlog, "a")
|
|
855 else:
|
|
856 tout = open(self.tlog, "w")
|
|
857 cll = [
|
|
858 "shed-tools",
|
|
859 "install",
|
|
860 "-g",
|
|
861 self.args.galaxy_url,
|
|
862 "--latest",
|
|
863 "-a",
|
|
864 self.args.galaxy_api_key,
|
|
865 "--name",
|
|
866 self.tool_name,
|
|
867 "--owner",
|
|
868 "fubar",
|
|
869 "--toolshed",
|
|
870 self.args.toolshed_url,
|
|
871 "--section_label",
|
|
872 "ToolFactory",
|
|
873 ]
|
|
874 tout.write("running\n%s\n" % " ".join(cll))
|
|
875 subp = subprocess.run(
|
|
876 cll,
|
|
877 env=self.ourenv,
|
|
878 cwd=self.ourcwd,
|
|
879 shell=False,
|
|
880 stderr=tout,
|
|
881 stdout=tout,
|
|
882 )
|
|
883 tout.write(
|
|
884 "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode)
|
|
885 )
|
|
886 tout.close()
|
|
887 return subp.returncode
|
|
888
|
|
889 def writeShedyml(self):
|
|
890 """for planemo"""
|
|
891 yuser = self.args.user_email.split("@")[0]
|
|
892 yfname = os.path.join(self.tooloutdir, ".shed.yml")
|
|
893 yamlf = open(yfname, "w")
|
|
894 odict = {
|
|
895 "name": self.tool_name,
|
|
896 "owner": yuser,
|
|
897 "type": "unrestricted",
|
|
898 "description": self.args.tool_desc,
|
|
899 "synopsis": self.args.tool_desc,
|
|
900 "category": "TF Generated Tools",
|
|
901 }
|
|
902 yaml.dump(odict, yamlf, allow_unicode=True)
|
|
903 yamlf.close()
|
|
904
|
|
905 def makeTool(self):
|
|
906 """write xmls and input samples into place"""
|
|
907 if self.args.parampass == 0:
|
|
908 self.doNoXMLparam()
|
|
909 else:
|
|
910 self.makeXML()
|
|
911 if self.args.script_path:
|
|
912 stname = os.path.join(self.tooloutdir, self.sfile)
|
|
913 if not os.path.exists(stname):
|
|
914 shutil.copyfile(self.sfile, stname)
|
|
915 xreal = "%s.xml" % self.tool_name
|
|
916 xout = os.path.join(self.tooloutdir, xreal)
|
|
917 shutil.copyfile(xreal, xout)
|
|
918 for p in self.infiles:
|
|
919 pth = p["name"]
|
|
920 dest = os.path.join(self.testdir, "%s_sample" % p["infilename"])
|
|
921 shutil.copyfile(pth, dest)
|
|
922 dest = os.path.join(self.repdir, "%s_sample" % p["infilename"])
|
|
923 shutil.copyfile(pth, dest)
|
|
924
|
|
925 def makeToolTar(self, report_fail=False):
|
|
926 """move outputs into test-data and prepare the tarball"""
|
|
927 excludeme = "_planemo_test_report.html"
|
|
928
|
|
929 def exclude_function(tarinfo):
|
|
930 filename = tarinfo.name
|
|
931 return None if filename.endswith(excludeme) else tarinfo
|
|
932
|
|
933 if os.path.exists(self.tlog):
|
|
934 tout = open(self.tlog, "a")
|
|
935 else:
|
|
936 tout = open(self.tlog, "w")
|
|
937 for p in self.outfiles:
|
|
938 oname = p["name"]
|
|
939 tdest = os.path.join(self.testdir, "%s_sample" % oname)
|
|
940 src = os.path.join(self.testdir, oname)
|
|
941 if not os.path.isfile(tdest):
|
|
942 if os.path.isfile(src):
|
|
943 shutil.copyfile(src, tdest)
|
|
944 dest = os.path.join(self.repdir, "%s.sample" % (oname))
|
|
945 shutil.copyfile(src, dest)
|
|
946 else:
|
|
947 if report_fail:
|
|
948 tout.write(
|
|
949 "###Tool may have failed - output file %s not found in testdir after planemo run %s."
|
|
950 % (tdest, self.testdir)
|
|
951 )
|
|
952 tf = tarfile.open(self.newtarpath, "w:gz")
|
|
953 tf.add(
|
|
954 name=self.tooloutdir,
|
|
955 arcname=self.tool_name,
|
|
956 filter=exclude_function,
|
|
957 )
|
|
958 tf.close()
|
|
959 shutil.copyfile(self.newtarpath, self.args.new_tool)
|
|
960
|
|
961 def moveRunOutputs(self):
|
|
962 """need to move planemo or run outputs into toolfactory collection"""
|
|
963 with os.scandir(self.tooloutdir) as outs:
|
|
964 for entry in outs:
|
|
965 if not entry.is_file():
|
|
966 continue
|
|
967 if "." in entry.name:
|
|
968 _, ext = os.path.splitext(entry.name)
|
|
969 if ext in [".tgz", ".json"]:
|
|
970 continue
|
|
971 if ext in [".yml", ".xml", ".yaml"]:
|
|
972 newname = f"{entry.name.replace('.','_')}.txt"
|
|
973 else:
|
|
974 newname = entry.name
|
|
975 else:
|
|
976 newname = f"{entry.name}.txt"
|
|
977 dest = os.path.join(self.repdir, newname)
|
|
978 src = os.path.join(self.tooloutdir, entry.name)
|
|
979 shutil.copyfile(src, dest)
|
|
980 if self.args.include_tests:
|
|
981 with os.scandir(self.testdir) as outs:
|
|
982 for entry in outs:
|
|
983 if (not entry.is_file()) or entry.name.endswith(
|
|
984 "_planemo_test_report.html"
|
|
985 ):
|
|
986 continue
|
|
987 if "." in entry.name:
|
|
988 _, ext = os.path.splitext(entry.name)
|
|
989 if ext in [".tgz", ".json"]:
|
|
990 continue
|
|
991 if ext in [".yml", ".xml", ".yaml"]:
|
|
992 newname = f"{entry.name.replace('.','_')}.txt"
|
|
993 else:
|
|
994 newname = entry.name
|
|
995 else:
|
|
996 newname = f"{entry.name}.txt"
|
|
997 dest = os.path.join(self.repdir, newname)
|
|
998 src = os.path.join(self.testdir, entry.name)
|
|
999 shutil.copyfile(src, dest)
|
|
1000
|
2
|
1001 def copy_to_container(self, src, dest, container):
|
|
1002 """Recreate the src directory tree at dest - full path included"""
|
|
1003 idir = os.getcwd()
|
|
1004 workdir = os.path.dirname(src)
|
|
1005 os.chdir(workdir)
|
|
1006 _, tfname = tempfile.mkstemp(suffix=".tar")
|
|
1007 tar = tarfile.open(tfname, mode="w")
|
|
1008 srcb = os.path.basename(src)
|
|
1009 tar.add(srcb)
|
|
1010 tar.close()
|
|
1011 data = open(tfname, "rb").read()
|
|
1012 container.put_archive(dest, data)
|
|
1013 os.unlink(tfname)
|
|
1014 os.chdir(idir)
|
|
1015
|
|
1016 def copy_from_container(self, src, dest, container):
|
|
1017 """recreate the src directory tree at dest using docker sdk"""
|
|
1018 os.makedirs(dest, exist_ok=True)
|
|
1019 _, tfname = tempfile.mkstemp(suffix=".tar")
|
|
1020 tf = open(tfname, "wb")
|
|
1021 bits, stat = container.get_archive(src)
|
|
1022 for chunk in bits:
|
|
1023 tf.write(chunk)
|
|
1024 tf.close()
|
|
1025 tar = tarfile.open(tfname, "r")
|
|
1026 tar.extractall(dest)
|
|
1027 tar.close()
|
|
1028 os.unlink(tfname)
|
|
1029
|
|
1030 def planemo_biodocker_test(self):
|
|
1031 """planemo currently leaks dependencies if used in the same container and gets unhappy after a
|
|
1032 first successful run. https://github.com/galaxyproject/planemo/issues/1078#issuecomment-731476930
|
|
1033
|
|
1034 Docker biocontainer has planemo with caches filled to save repeated downloads
|
|
1035
|
|
1036
|
|
1037 """
|
|
1038
|
|
1039 def prun(container, tout, cl, user="biodocker"):
|
|
1040 rlog = container.exec_run(cl, user=user)
|
|
1041 slogl = str(rlog).split("\\n")
|
|
1042 slog = "\n".join(slogl)
|
|
1043 tout.write(f"## got rlog {slog} from {cl}\n")
|
|
1044
|
|
1045 if os.path.exists(self.tlog):
|
|
1046 tout = open(self.tlog, "a")
|
|
1047 else:
|
|
1048 tout = open(self.tlog, "w")
|
|
1049 planemoimage = "quay.io/fubar2/planemo-biocontainer"
|
|
1050 xreal = "%s.xml" % self.tool_name
|
|
1051 repname = f"{self.tool_name}_planemo_test_report.html"
|
|
1052 ptestrep_path = os.path.join(self.repdir, repname)
|
|
1053 client = docker.from_env()
|
|
1054 tvol = client.volumes.create()
|
|
1055 tvolname = tvol.name
|
|
1056 destdir = "/toolfactory/ptest"
|
|
1057 imrep = os.path.join(destdir, repname)
|
|
1058 # need to keep the container running so keep it open with sleep
|
|
1059 # will stop and destroy it when we are done
|
|
1060 container = client.containers.run(
|
|
1061 planemoimage,
|
|
1062 "sleep 120m",
|
|
1063 detach=True,
|
|
1064 user="biodocker",
|
|
1065 volumes={f"{tvolname}": {"bind": "/toolfactory", "mode": "rw"}},
|
|
1066 )
|
|
1067 cl = f"mkdir -p {destdir}"
|
|
1068 prun(container, tout, cl, user="root")
|
|
1069 # that's how hard it is to get root on a biodocker container :(
|
|
1070 cl = f"rm -rf {destdir}/*"
|
|
1071 prun(container, tout, cl, user="root")
|
|
1072 ptestpath = os.path.join(destdir, "tfout", xreal)
|
|
1073 self.copy_to_container(self.tooloutdir, destdir, container)
|
|
1074 cl = "chown -R biodocker /toolfactory"
|
|
1075 prun(container, tout, cl, user="root")
|
|
1076 _ = container.exec_run(f"ls -la {destdir}")
|
6
|
1077 ptestcl = f"planemo test --test_output {imrep} --update_test_data --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}"
|
2
|
1078 try:
|
|
1079 _ = container.exec_run(ptestcl)
|
|
1080 # fails because test outputs missing but updates the test-data directory
|
6
|
1081 except Exception:
|
2
|
1082 e = sys.exc_info()[0]
|
|
1083 tout.write(f"#### error: {e} from {ptestcl}\n")
|
|
1084 cl = f"planemo test --test_output {imrep} --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}"
|
|
1085 try:
|
|
1086 prun(container, tout, cl)
|
6
|
1087 except Exception:
|
2
|
1088 e = sys.exc_info()[0]
|
|
1089 tout.write(f"#### error: {e} from {ptestcl}\n")
|
|
1090 testouts = tempfile.mkdtemp(suffix=None, prefix="tftemp", dir=".")
|
|
1091 self.copy_from_container(destdir, testouts, container)
|
|
1092 src = os.path.join(testouts, "ptest")
|
|
1093 if os.path.isdir(src):
|
|
1094 shutil.copytree(src, ".", dirs_exist_ok=True)
|
|
1095 src = repname
|
|
1096 if os.path.isfile(repname):
|
|
1097 shutil.copyfile(src, ptestrep_path)
|
|
1098 else:
|
|
1099 tout.write(f"No output from run to shutil.copytree in {src}\n")
|
|
1100 tout.close()
|
|
1101 container.stop()
|
|
1102 container.remove()
|
|
1103 tvol.remove()
|
|
1104 shutil.rmtree(testouts) # leave for debugging
|
|
1105
|
|
1106
|
|
1107 def main():
|
|
1108 """
|
6
|
1109 This is a Galaxy wrapper.
|
|
1110 It expects to be called by a special purpose tool.xml
|
2
|
1111
|
|
1112 """
|
|
1113 parser = argparse.ArgumentParser()
|
|
1114 a = parser.add_argument
|
|
1115 a("--script_path", default=None)
|
|
1116 a("--history_test", default=None)
|
|
1117 a("--cl_prefix", default=None)
|
|
1118 a("--sysexe", default=None)
|
|
1119 a("--packages", default=None)
|
|
1120 a("--tool_name", default="newtool")
|
|
1121 a("--tool_dir", default=None)
|
|
1122 a("--input_files", default=[], action="append")
|
|
1123 a("--output_files", default=[], action="append")
|
|
1124 a("--user_email", default="Unknown")
|
|
1125 a("--bad_user", default=None)
|
|
1126 a("--make_Tool", default="runonly")
|
|
1127 a("--help_text", default=None)
|
|
1128 a("--tool_desc", default=None)
|
|
1129 a("--tool_version", default=None)
|
|
1130 a("--citations", default=None)
|
|
1131 a("--command_override", default=None)
|
|
1132 a("--test_override", default=None)
|
|
1133 a("--additional_parameters", action="append", default=[])
|
6
|
1134 a("--selecttext_parameters", action="append", default=[])
|
2
|
1135 a("--edit_additional_parameters", action="store_true", default=False)
|
|
1136 a("--parampass", default="positional")
|
|
1137 a("--tfout", default="./tfout")
|
|
1138 a("--new_tool", default="new_tool")
|
|
1139 a("--galaxy_url", default="http://localhost:8080")
|
|
1140 a("--toolshed_url", default="http://localhost:9009")
|
6
|
1141 # make sure this is identical to tool_sheds_conf.xml
|
|
1142 # localhost != 127.0.0.1 so validation fails
|
2
|
1143 a("--toolshed_api_key", default="fakekey")
|
|
1144 a("--galaxy_api_key", default="fakekey")
|
|
1145 a("--galaxy_root", default="/galaxy-central")
|
|
1146 a("--galaxy_venv", default="/galaxy_venv")
|
6
|
1147 a("--collection", action="append", default=[])
|
|
1148 a("--include_tests", default=False, action="store_true")
|
2
|
1149 args = parser.parse_args()
|
|
1150 assert not args.bad_user, (
|
|
1151 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file'
|
|
1152 % (args.bad_user, args.bad_user)
|
|
1153 )
|
|
1154 assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq"
|
|
1155 assert (
|
|
1156 args.sysexe or args.packages
|
|
1157 ), "## Tool Factory wrapper expects an interpreter or an executable package"
|
|
1158 r = ScriptRunner(args)
|
|
1159 r.writeShedyml()
|
|
1160 r.makeTool()
|
|
1161 if args.make_Tool == "generate":
|
|
1162 _ = r.run() # for testing toolfactory itself
|
|
1163 r.moveRunOutputs()
|
|
1164 r.makeToolTar()
|
|
1165 else:
|
|
1166 r.planemo_biodocker_test() # test to make outputs and then test
|
|
1167 r.moveRunOutputs()
|
|
1168 r.makeToolTar()
|
|
1169 if args.make_Tool == "gentestinstall":
|
|
1170 r.shedLoad()
|
|
1171 r.eph_galaxy_load()
|
|
1172
|
|
1173
|
|
1174 if __name__ == "__main__":
|
|
1175 main()
|