Mercurial > repos > fubar > tool_factory_docker
comparison toolfactory_docker/rgToolFactory2.py @ 6:482386d6cc43 draft default tip
Uploaded
| author | fubar |
|---|---|
| date | Sun, 24 Jan 2021 03:54:01 +0000 |
| parents | f17a2b1972f1 |
| children |
comparison
equal
deleted
inserted
replaced
| 5:f17a2b1972f1 | 6:482386d6cc43 |
|---|---|
| 19 # calling venv. Hilarity ensues. | 19 # calling venv. Hilarity ensues. |
| 20 | 20 |
| 21 | 21 |
| 22 import argparse | 22 import argparse |
| 23 import copy | 23 import copy |
| 24 import json | |
| 24 import logging | 25 import logging |
| 25 import os | 26 import os |
| 26 import re | 27 import re |
| 27 import shutil | 28 import shutil |
| 28 import subprocess | 29 import subprocess |
| 47 | 48 |
| 48 myversion = "V2.1 July 2020" | 49 myversion = "V2.1 July 2020" |
| 49 verbose = True | 50 verbose = True |
| 50 debug = True | 51 debug = True |
| 51 toolFactoryURL = "https://github.com/fubar2/toolfactory" | 52 toolFactoryURL = "https://github.com/fubar2/toolfactory" |
| 52 ourdelim = "~~~" | |
| 53 | |
| 54 # --input_files="$intab.input_files~~~$intab.input_CL~~~ | |
| 55 # $intab.input_formats# ~~~$intab.input_label | |
| 56 # ~~~$intab.input_help" | |
| 57 IPATHPOS = 0 | |
| 58 ICLPOS = 1 | |
| 59 IFMTPOS = 2 | |
| 60 ILABPOS = 3 | |
| 61 IHELPOS = 4 | |
| 62 IOCLPOS = 5 | |
| 63 | |
| 64 # --output_files "$otab.history_name~~~$otab.history_format~~~ | |
| 65 # $otab.history_CL~~~$otab.history_test" | |
| 66 ONAMEPOS = 0 | |
| 67 OFMTPOS = 1 | |
| 68 OCLPOS = 2 | |
| 69 OTESTPOS = 3 | |
| 70 OOCLPOS = 4 | |
| 71 | |
| 72 | |
| 73 # --additional_parameters="$i.param_name~~~$i.param_value~~~ | |
| 74 # $i.param_label~~~$i.param_help~~~$i.param_type | |
| 75 # ~~~$i.CL~~~i$.param_CLoverride" | |
| 76 ANAMEPOS = 0 | |
| 77 AVALPOS = 1 | |
| 78 ALABPOS = 2 | |
| 79 AHELPPOS = 3 | |
| 80 ATYPEPOS = 4 | |
| 81 ACLPOS = 5 | |
| 82 AOVERPOS = 6 | |
| 83 AOCLPOS = 7 | |
| 84 | |
| 85 | |
| 86 foo = len(lxml.__version__) | 53 foo = len(lxml.__version__) |
| 87 # fug you, flake8. Say my name! | 54 # fug you, flake8. Say my name! |
| 88 FAKEEXE = "~~~REMOVE~~~ME~~~" | 55 FAKEEXE = "~~~REMOVE~~~ME~~~" |
| 89 # need this until a PR/version bump to fix galaxyxml prepending the exe even | 56 # need this until a PR/version bump to fix galaxyxml prepending the exe even |
| 90 # with override. | 57 # with override. |
| 93 def timenow(): | 60 def timenow(): |
| 94 """return current time as a string""" | 61 """return current time as a string""" |
| 95 return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) | 62 return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) |
| 96 | 63 |
| 97 | 64 |
| 98 def quote_non_numeric(s): | |
| 99 """return a prequoted string for non-numerics | |
| 100 useful for perl and Rscript parameter passing? | |
| 101 """ | |
| 102 try: | |
| 103 _ = float(s) | |
| 104 return s | |
| 105 except ValueError: | |
| 106 return '"%s"' % s | |
| 107 | |
| 108 | |
| 109 html_escape_table = { | |
| 110 "&": "&", | |
| 111 ">": ">", | |
| 112 "<": "<", | |
| 113 "#": "#", | |
| 114 "$": "$", | |
| 115 } | |
| 116 cheetah_escape_table = {"$": "\\$", "#": "\\#"} | 65 cheetah_escape_table = {"$": "\\$", "#": "\\#"} |
| 117 | |
| 118 | |
| 119 def html_escape(text): | |
| 120 """Produce entities within text.""" | |
| 121 return "".join([html_escape_table.get(c, c) for c in text]) | |
| 122 | 66 |
| 123 | 67 |
| 124 def cheetah_escape(text): | 68 def cheetah_escape(text): |
| 125 """Produce entities within text.""" | 69 """Produce entities within text.""" |
| 126 return "".join([cheetah_escape_table.get(c, c) for c in text]) | 70 return "".join([cheetah_escape_table.get(c, c) for c in text]) |
| 136 else: | 80 else: |
| 137 citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) | 81 citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) |
| 138 return citation_tuples | 82 return citation_tuples |
| 139 | 83 |
| 140 | 84 |
| 141 class Error(Exception): | |
| 142 """Base class for exceptions in this module.""" | |
| 143 | |
| 144 pass | |
| 145 | |
| 146 | |
| 147 class ScriptRunner: | 85 class ScriptRunner: |
| 148 """Wrapper for an arbitrary script | 86 """Wrapper for an arbitrary script |
| 149 uses galaxyxml | 87 uses galaxyxml |
| 150 | 88 |
| 151 """ | 89 """ |
| 155 prepare command line cl for running the tool here | 93 prepare command line cl for running the tool here |
| 156 and prepare elements needed for galaxyxml tool generation | 94 and prepare elements needed for galaxyxml tool generation |
| 157 """ | 95 """ |
| 158 self.ourcwd = os.getcwd() | 96 self.ourcwd = os.getcwd() |
| 159 self.ourenv = copy.deepcopy(os.environ) | 97 self.ourenv = copy.deepcopy(os.environ) |
| 160 self.infiles = [x.split(ourdelim) for x in args.input_files] | 98 self.collections = [] |
| 161 self.outfiles = [x.split(ourdelim) for x in args.output_files] | 99 if len(args.collection) > 0: |
| 162 self.addpar = [x.split(ourdelim) for x in args.additional_parameters] | 100 try: |
| 101 self.collections = [ | |
| 102 json.loads(x) for x in args.collection if len(x.strip()) > 1 | |
| 103 ] | |
| 104 except Exception: | |
| 105 print( | |
| 106 f"--collections parameter {str(args.collection)} is malformed - should be a dictionary" | |
| 107 ) | |
| 108 try: | |
| 109 self.infiles = [ | |
| 110 json.loads(x) for x in args.input_files if len(x.strip()) > 1 | |
| 111 ] | |
| 112 except Exception: | |
| 113 print( | |
| 114 f"--input_files parameter {str(args.input_files)} is malformed - should be a dictionary" | |
| 115 ) | |
| 116 try: | |
| 117 self.outfiles = [ | |
| 118 json.loads(x) for x in args.output_files if len(x.strip()) > 1 | |
| 119 ] | |
| 120 except Exception: | |
| 121 print( | |
| 122 f"--output_files parameter {args.output_files} is malformed - should be a dictionary" | |
| 123 ) | |
| 124 try: | |
| 125 self.addpar = [ | |
| 126 json.loads(x) for x in args.additional_parameters if len(x.strip()) > 1 | |
| 127 ] | |
| 128 except Exception: | |
| 129 print( | |
| 130 f"--additional_parameters {args.additional_parameters} is malformed - should be a dictionary" | |
| 131 ) | |
| 132 try: | |
| 133 self.selpar = [ | |
| 134 json.loads(x) for x in args.selecttext_parameters if len(x.strip()) > 1 | |
| 135 ] | |
| 136 except Exception: | |
| 137 print( | |
| 138 f"--selecttext_parameters {args.selecttext_parameters} is malformed - should be a dictionary" | |
| 139 ) | |
| 163 self.args = args | 140 self.args = args |
| 164 self.cleanuppar() | 141 self.cleanuppar() |
| 165 self.lastclredirect = None | 142 self.lastclredirect = None |
| 166 self.lastxclredirect = None | 143 self.lastxclredirect = None |
| 167 self.cl = [] | 144 self.cl = [] |
| 233 self.tlog = os.path.join(self.repdir, "%s_runner_log.txt" % self.tool_name) | 210 self.tlog = os.path.join(self.repdir, "%s_runner_log.txt" % self.tool_name) |
| 234 | 211 |
| 235 if self.args.parampass == "0": | 212 if self.args.parampass == "0": |
| 236 self.clsimple() | 213 self.clsimple() |
| 237 else: | 214 else: |
| 238 clsuffix = [] | |
| 239 xclsuffix = [] | |
| 240 for i, p in enumerate(self.infiles): | |
| 241 if p[IOCLPOS].upper() == "STDIN": | |
| 242 appendme = [ | |
| 243 p[ICLPOS], | |
| 244 p[ICLPOS], | |
| 245 p[IPATHPOS], | |
| 246 "< %s" % p[IPATHPOS], | |
| 247 ] | |
| 248 xappendme = [ | |
| 249 p[ICLPOS], | |
| 250 p[ICLPOS], | |
| 251 p[IPATHPOS], | |
| 252 "< $%s" % p[ICLPOS], | |
| 253 ] | |
| 254 else: | |
| 255 appendme = [p[IOCLPOS], p[ICLPOS], p[IPATHPOS], ""] | |
| 256 xappendme = [p[IOCLPOS], p[ICLPOS], "$%s" % p[ICLPOS], ""] | |
| 257 clsuffix.append(appendme) | |
| 258 xclsuffix.append(xappendme) | |
| 259 for i, p in enumerate(self.outfiles): | |
| 260 if p[OOCLPOS] == "STDOUT": | |
| 261 self.lastclredirect = [">", p[ONAMEPOS]] | |
| 262 self.lastxclredirect = [">", "$%s" % p[OCLPOS]] | |
| 263 else: | |
| 264 clsuffix.append([p[OCLPOS], p[ONAMEPOS], p[ONAMEPOS], ""]) | |
| 265 xclsuffix.append([p[OCLPOS], p[ONAMEPOS], "$%s" % p[ONAMEPOS], ""]) | |
| 266 for p in self.addpar: | |
| 267 clsuffix.append([p[AOCLPOS], p[ACLPOS], p[AVALPOS], p[AOVERPOS]]) | |
| 268 xclsuffix.append( | |
| 269 [p[AOCLPOS], p[ACLPOS], '"$%s"' % p[ANAMEPOS], p[AOVERPOS]] | |
| 270 ) | |
| 271 clsuffix.sort() | |
| 272 xclsuffix.sort() | |
| 273 self.xclsuffix = xclsuffix | |
| 274 self.clsuffix = clsuffix | |
| 275 if self.args.parampass == "positional": | 215 if self.args.parampass == "positional": |
| 216 self.prepclpos() | |
| 276 self.clpositional() | 217 self.clpositional() |
| 277 else: | 218 else: |
| 219 self.prepargp() | |
| 278 self.clargparse() | 220 self.clargparse() |
| 221 | |
| 222 def clsimple(self): | |
| 223 """no parameters - uses < and > for i/o""" | |
| 224 aCL = self.cl.append | |
| 225 aXCL = self.xmlcl.append | |
| 226 if len(self.infiles) > 0: | |
| 227 aCL("<") | |
| 228 aCL(self.infiles[0]["infilename"]) | |
| 229 aXCL("<") | |
| 230 aXCL("$%s" % self.infiles[0]["infilename"]) | |
| 231 if len(self.outfiles) > 0: | |
| 232 aCL(">") | |
| 233 aCL(self.outfiles[0]["name"]) | |
| 234 aXCL(">") | |
| 235 aXCL("$%s" % self.outfiles[0]["name"]) | |
| 236 | |
| 237 def prepargp(self): | |
| 238 clsuffix = [] | |
| 239 xclsuffix = [] | |
| 240 for i, p in enumerate(self.infiles): | |
| 241 if p["origCL"].strip().upper() == "STDIN": | |
| 242 appendme = [ | |
| 243 p["infilename"], | |
| 244 p["infilename"], | |
| 245 "< %s" % p["infilename"], | |
| 246 ] | |
| 247 xappendme = [ | |
| 248 p["infilename"], | |
| 249 p["infilename"], | |
| 250 "< $%s" % p["infilename"], | |
| 251 ] | |
| 252 else: | |
| 253 appendme = [p["CL"], p["CL"], ""] | |
| 254 xappendme = [p["CL"], "$%s" % p["CL"], ""] | |
| 255 clsuffix.append(appendme) | |
| 256 xclsuffix.append(xappendme) | |
| 257 for i, p in enumerate(self.outfiles): | |
| 258 if p["origCL"].strip().upper() == "STDOUT": | |
| 259 self.lastclredirect = [">", p["name"]] | |
| 260 self.lastxclredirect = [">", "$%s" % p["name"]] | |
| 261 else: | |
| 262 clsuffix.append([p["name"], p["name"], ""]) | |
| 263 xclsuffix.append([p["name"], "$%s" % p["name"], ""]) | |
| 264 for p in self.addpar: | |
| 265 clsuffix.append([p["CL"], p["name"], p["override"]]) | |
| 266 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) | |
| 267 for p in self.selpar: | |
| 268 clsuffix.append([p["CL"], p["name"], p["override"]]) | |
| 269 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) | |
| 270 clsuffix.sort() | |
| 271 xclsuffix.sort() | |
| 272 self.xclsuffix = xclsuffix | |
| 273 self.clsuffix = clsuffix | |
| 274 | |
| 275 def prepclpos(self): | |
| 276 clsuffix = [] | |
| 277 xclsuffix = [] | |
| 278 for i, p in enumerate(self.infiles): | |
| 279 if p["origCL"].strip().upper() == "STDIN": | |
| 280 appendme = [ | |
| 281 "999", | |
| 282 p["infilename"], | |
| 283 "< $%s" % p["infilename"], | |
| 284 ] | |
| 285 xappendme = [ | |
| 286 "999", | |
| 287 p["infilename"], | |
| 288 "< $%s" % p["infilename"], | |
| 289 ] | |
| 290 else: | |
| 291 appendme = [p["CL"], p["infilename"], ""] | |
| 292 xappendme = [p["CL"], "$%s" % p["infilename"], ""] | |
| 293 clsuffix.append(appendme) | |
| 294 xclsuffix.append(xappendme) | |
| 295 for i, p in enumerate(self.outfiles): | |
| 296 if p["origCL"].strip().upper() == "STDOUT": | |
| 297 self.lastclredirect = [">", p["name"]] | |
| 298 self.lastxclredirect = [">", "$%s" % p["name"]] | |
| 299 else: | |
| 300 clsuffix.append([p["CL"], p["name"], ""]) | |
| 301 xclsuffix.append([p["CL"], "$%s" % p["name"], ""]) | |
| 302 for p in self.addpar: | |
| 303 clsuffix.append([p["CL"], p["name"], p["override"]]) | |
| 304 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) | |
| 305 for p in self.selpar: | |
| 306 clsuffix.append([p["CL"], p["name"], p["override"]]) | |
| 307 xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) | |
| 308 clsuffix.sort() | |
| 309 xclsuffix.sort() | |
| 310 self.xclsuffix = xclsuffix | |
| 311 self.clsuffix = clsuffix | |
| 279 | 312 |
| 280 def prepScript(self): | 313 def prepScript(self): |
| 281 rx = open(self.args.script_path, "r").readlines() | 314 rx = open(self.args.script_path, "r").readlines() |
| 282 rx = [x.rstrip() for x in rx] | 315 rx = [x.rstrip() for x in rx] |
| 283 rxcheck = [x.strip() for x in rx if x.strip() > ""] | 316 rxcheck = [x.strip() for x in rx if x.strip() > ""] |
| 299 def cleanuppar(self): | 332 def cleanuppar(self): |
| 300 """ positional parameters are complicated by their numeric ordinal""" | 333 """ positional parameters are complicated by their numeric ordinal""" |
| 301 if self.args.parampass == "positional": | 334 if self.args.parampass == "positional": |
| 302 for i, p in enumerate(self.infiles): | 335 for i, p in enumerate(self.infiles): |
| 303 assert ( | 336 assert ( |
| 304 p[ICLPOS].isdigit() or p[ICLPOS].strip().upper() == "STDIN" | 337 p["CL"].isdigit() or p["CL"].strip().upper() == "STDIN" |
| 305 ), "Positional parameters must be ordinal integers - got %s for %s" % ( | 338 ), "Positional parameters must be ordinal integers - got %s for %s" % ( |
| 306 p[ICLPOS], | 339 p["CL"], |
| 307 p[ILABPOS], | 340 p["label"], |
| 308 ) | 341 ) |
| 309 for i, p in enumerate(self.outfiles): | 342 for i, p in enumerate(self.outfiles): |
| 310 assert ( | 343 assert ( |
| 311 p[OCLPOS].isdigit() or p[OCLPOS].strip().upper() == "STDOUT" | 344 p["CL"].isdigit() or p["CL"].strip().upper() == "STDOUT" |
| 312 ), "Positional parameters must be ordinal integers - got %s for %s" % ( | 345 ), "Positional parameters must be ordinal integers - got %s for %s" % ( |
| 313 p[OCLPOS], | 346 p["CL"], |
| 314 p[ONAMEPOS], | 347 p["name"], |
| 315 ) | 348 ) |
| 316 for i, p in enumerate(self.addpar): | 349 for i, p in enumerate(self.addpar): |
| 317 assert p[ | 350 assert p[ |
| 318 ACLPOS | 351 "CL" |
| 319 ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % ( | 352 ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % ( |
| 320 p[ACLPOS], | 353 p["CL"], |
| 321 p[ANAMEPOS], | 354 p["name"], |
| 322 ) | 355 ) |
| 323 for i, p in enumerate(self.infiles): | 356 for i, p in enumerate(self.infiles): |
| 324 infp = copy.copy(p) | 357 infp = copy.copy(p) |
| 325 icl = infp[ICLPOS] | 358 infp["origCL"] = infp["CL"] |
| 326 infp.append(icl) | 359 if self.args.parampass in ["positional", "0"]: |
| 327 if ( | 360 infp["infilename"] = infp["label"].replace(" ", "_") |
| 328 infp[ICLPOS].isdigit() | 361 else: |
| 329 or self.args.parampass == "0" | 362 infp["infilename"] = infp["CL"] |
| 330 or infp[ICLPOS].strip().upper() == "STDOUT" | |
| 331 ): | |
| 332 scl = "input%d" % (i + 1) | |
| 333 infp[ICLPOS] = scl | |
| 334 self.infiles[i] = infp | 363 self.infiles[i] = infp |
| 335 for i, p in enumerate(self.outfiles): | 364 for i, p in enumerate(self.outfiles): |
| 336 p.append(p[OCLPOS]) # keep copy | 365 p["origCL"] = p["CL"] # keep copy |
| 337 if (p[OOCLPOS].isdigit() and self.args.parampass != "positional") or p[ | |
| 338 OOCLPOS | |
| 339 ].strip().upper() == "STDOUT": | |
| 340 scl = p[ONAMEPOS] | |
| 341 p[OCLPOS] = scl | |
| 342 self.outfiles[i] = p | 366 self.outfiles[i] = p |
| 343 for i, p in enumerate(self.addpar): | 367 for i, p in enumerate(self.addpar): |
| 344 p.append(p[ACLPOS]) | 368 p["origCL"] = p["CL"] |
| 345 if p[ACLPOS].isdigit(): | |
| 346 scl = "param%s" % p[ACLPOS] | |
| 347 p[ACLPOS] = scl | |
| 348 self.addpar[i] = p | 369 self.addpar[i] = p |
| 349 | |
| 350 def clsimple(self): | |
| 351 """no parameters - uses < and > for i/o""" | |
| 352 aCL = self.cl.append | |
| 353 aXCL = self.xmlcl.append | |
| 354 | |
| 355 if len(self.infiles) > 0: | |
| 356 aCL("<") | |
| 357 aCL(self.infiles[0][IPATHPOS]) | |
| 358 aXCL("<") | |
| 359 aXCL("$%s" % self.infiles[0][ICLPOS]) | |
| 360 if len(self.outfiles) > 0: | |
| 361 aCL(">") | |
| 362 aCL(self.outfiles[0][OCLPOS]) | |
| 363 aXCL(">") | |
| 364 aXCL("$%s" % self.outfiles[0][ONAMEPOS]) | |
| 365 | 370 |
| 366 def clpositional(self): | 371 def clpositional(self): |
| 367 # inputs in order then params | 372 # inputs in order then params |
| 368 aCL = self.cl.append | 373 aCL = self.cl.append |
| 369 for (o_v, k, v, koverride) in self.clsuffix: | 374 for (k, v, koverride) in self.clsuffix: |
| 370 if " " in v: | 375 if " " in v: |
| 371 aCL("%s" % v) | 376 aCL("%s" % v) |
| 372 else: | 377 else: |
| 373 aCL(v) | 378 aCL(v) |
| 374 aXCL = self.xmlcl.append | 379 aXCL = self.xmlcl.append |
| 375 for (o_v, k, v, koverride) in self.xclsuffix: | 380 for (k, v, koverride) in self.xclsuffix: |
| 376 aXCL(v) | 381 aXCL(v) |
| 377 if self.lastxclredirect: | 382 if self.lastxclredirect: |
| 378 aXCL(self.lastxclredirect[0]) | 383 aXCL(self.lastxclredirect[0]) |
| 379 aXCL(self.lastxclredirect[1]) | 384 aXCL(self.lastxclredirect[1]) |
| 380 | 385 |
| 382 """argparse style""" | 387 """argparse style""" |
| 383 aCL = self.cl.append | 388 aCL = self.cl.append |
| 384 aXCL = self.xmlcl.append | 389 aXCL = self.xmlcl.append |
| 385 # inputs then params in argparse named form | 390 # inputs then params in argparse named form |
| 386 | 391 |
| 387 for (o_v, k, v, koverride) in self.xclsuffix: | 392 for (k, v, koverride) in self.xclsuffix: |
| 388 if koverride > "": | 393 if koverride > "": |
| 389 k = koverride | 394 k = koverride |
| 390 elif len(k.strip()) == 1: | 395 elif len(k.strip()) == 1: |
| 391 k = "-%s" % k | 396 k = "-%s" % k |
| 392 else: | 397 else: |
| 393 k = "--%s" % k | 398 k = "--%s" % k |
| 394 aXCL(k) | 399 aXCL(k) |
| 395 aXCL(v) | 400 aXCL(v) |
| 396 for (o_v, k, v, koverride) in self.clsuffix: | 401 for (k, v, koverride) in self.clsuffix: |
| 397 if koverride > "": | 402 if koverride > "": |
| 398 k = koverride | 403 k = koverride |
| 399 elif len(k.strip()) == 1: | 404 elif len(k.strip()) == 1: |
| 400 k = "-%s" % k | 405 k = "-%s" % k |
| 401 else: | 406 else: |
| 412 ndash = 1 | 417 ndash = 1 |
| 413 return ndash | 418 return ndash |
| 414 | 419 |
| 415 def doXMLparam(self): | 420 def doXMLparam(self): |
| 416 """flake8 made me do this...""" | 421 """flake8 made me do this...""" |
| 417 for ( | 422 for p in self.outfiles: |
| 418 p | 423 newname = p["name"] |
| 419 ) in ( | 424 newfmt = p["format"] |
| 420 self.outfiles | 425 newcl = p["CL"] |
| 421 ): # --output_files "$otab.history_name~~~$otab.history_format~~~$otab.history_CL~~~$otab.history_test" | 426 test = p["test"] |
| 422 newname, newfmt, newcl, test, oldcl = p | 427 oldcl = p["origCL"] |
| 423 test = test.strip() | 428 test = test.strip() |
| 424 ndash = self.getNdash(newcl) | 429 ndash = self.getNdash(newcl) |
| 425 aparm = gxtp.OutputData( | 430 aparm = gxtp.OutputData( |
| 426 name=newname, format=newfmt, num_dashes=ndash, label=newname | 431 name=newname, format=newfmt, num_dashes=ndash, label=newname |
| 427 ) | 432 ) |
| 462 value="%s_sample" % newname, | 467 value="%s_sample" % newname, |
| 463 compare=c, | 468 compare=c, |
| 464 delta=delta, | 469 delta=delta, |
| 465 delta_frac=delta_frac, | 470 delta_frac=delta_frac, |
| 466 ) | 471 ) |
| 472 else: | |
| 473 c = test | |
| 474 tp = gxtp.TestOutput( | |
| 475 name=newname, | |
| 476 value="%s_sample" % newname, | |
| 477 compare=c, | |
| 478 ) | |
| 467 self.testparam.append(tp) | 479 self.testparam.append(tp) |
| 468 for p in self.infiles: | 480 for p in self.infiles: |
| 469 newname = p[ICLPOS] | 481 newname = p["infilename"] |
| 470 newfmt = p[IFMTPOS] | 482 newfmt = p["format"] |
| 471 ndash = self.getNdash(newname) | 483 ndash = self.getNdash(newname) |
| 472 if not len(p[ILABPOS]) > 0: | 484 if not len(p["label"]) > 0: |
| 473 alab = p[ICLPOS] | 485 alab = p["CL"] |
| 474 else: | 486 else: |
| 475 alab = p[ILABPOS] | 487 alab = p["label"] |
| 476 aninput = gxtp.DataParam( | 488 aninput = gxtp.DataParam( |
| 477 newname, | 489 newname, |
| 478 optional=False, | 490 optional=False, |
| 479 label=alab, | 491 label=alab, |
| 480 help=p[IHELPOS], | 492 help=p["help"], |
| 481 format=newfmt, | 493 format=newfmt, |
| 482 multiple=False, | 494 multiple=False, |
| 483 num_dashes=ndash, | 495 num_dashes=ndash, |
| 484 ) | 496 ) |
| 485 aninput.positional = self.is_positional | 497 aninput.positional = self.is_positional |
| 498 if self.is_positional: | |
| 499 if p["origCL"].upper() == "STDIN": | |
| 500 aparm.positional = 9999998 | |
| 501 aparm.command_line_override = "> $%s" % newname | |
| 502 else: | |
| 503 aparm.positional = int(p["origCL"]) | |
| 504 aparm.command_line_override = "$%s" % newname | |
| 486 self.tinputs.append(aninput) | 505 self.tinputs.append(aninput) |
| 487 tparm = gxtp.TestParam(name=newname, value="%s_sample" % newname) | 506 tparm = gxtp.TestParam(name=newname, value="%s_sample" % newname) |
| 488 self.testparam.append(tparm) | 507 self.testparam.append(tparm) |
| 489 for p in self.addpar: | 508 for p in self.addpar: |
| 490 ( | 509 newname = p["name"] |
| 491 newname, | 510 newval = p["value"] |
| 492 newval, | 511 newlabel = p["label"] |
| 493 newlabel, | 512 newhelp = p["help"] |
| 494 newhelp, | 513 newtype = p["type"] |
| 495 newtype, | 514 newcl = p["CL"] |
| 496 newcl, | 515 oldcl = p["origCL"] |
| 497 override, | |
| 498 oldcl, | |
| 499 ) = p | |
| 500 if not len(newlabel) > 0: | 516 if not len(newlabel) > 0: |
| 501 newlabel = newname | 517 newlabel = newname |
| 502 ndash = self.getNdash(newname) | 518 ndash = self.getNdash(newname) |
| 503 if newtype == "text": | 519 if newtype == "text": |
| 504 aparm = gxtp.TextParam( | 520 aparm = gxtp.TextParam( |
| 522 label=newname, | 538 label=newname, |
| 523 help=newhelp, | 539 help=newhelp, |
| 524 value=newval, | 540 value=newval, |
| 525 num_dashes=ndash, | 541 num_dashes=ndash, |
| 526 ) | 542 ) |
| 543 elif newtype == "boolean": | |
| 544 aparm = gxtp.BooleanParam( | |
| 545 newname, | |
| 546 label=newname, | |
| 547 help=newhelp, | |
| 548 value=newval, | |
| 549 num_dashes=ndash, | |
| 550 ) | |
| 527 else: | 551 else: |
| 528 raise ValueError( | 552 raise ValueError( |
| 529 'Unrecognised parameter type "%s" for\ | 553 'Unrecognised parameter type "%s" for\ |
| 530 additional parameter %s in makeXML' | 554 additional parameter %s in makeXML' |
| 531 % (newtype, newname) | 555 % (newtype, newname) |
| 534 if self.is_positional: | 558 if self.is_positional: |
| 535 aparm.positional = int(oldcl) | 559 aparm.positional = int(oldcl) |
| 536 self.tinputs.append(aparm) | 560 self.tinputs.append(aparm) |
| 537 tparm = gxtp.TestParam(newname, value=newval) | 561 tparm = gxtp.TestParam(newname, value=newval) |
| 538 self.testparam.append(tparm) | 562 self.testparam.append(tparm) |
| 563 for p in self.selpar: | |
| 564 newname = p["name"] | |
| 565 newval = p["value"] | |
| 566 newlabel = p["label"] | |
| 567 newhelp = p["help"] | |
| 568 newtype = p["type"] | |
| 569 newcl = p["CL"] | |
| 570 if not len(newlabel) > 0: | |
| 571 newlabel = newname | |
| 572 ndash = self.getNdash(newname) | |
| 573 if newtype == "selecttext": | |
| 574 newtext = p["texts"] | |
| 575 aparm = gxtp.SelectParam( | |
| 576 newname, | |
| 577 label=newlabel, | |
| 578 help=newhelp, | |
| 579 num_dashes=ndash, | |
| 580 ) | |
| 581 for i in range(len(newval)): | |
| 582 anopt = gxtp.SelectOption( | |
| 583 value=newval[i], | |
| 584 text=newtext[i], | |
| 585 ) | |
| 586 aparm.append(anopt) | |
| 587 aparm.positional = self.is_positional | |
| 588 if self.is_positional: | |
| 589 aparm.positional = int(newcl) | |
| 590 self.tinputs.append(aparm) | |
| 591 tparm = gxtp.TestParam(newname, value=newval) | |
| 592 self.testparam.append(tparm) | |
| 593 else: | |
| 594 raise ValueError( | |
| 595 'Unrecognised parameter type "%s" for\ | |
| 596 selecttext parameter %s in makeXML' | |
| 597 % (newtype, newname) | |
| 598 ) | |
| 599 for p in self.collections: | |
| 600 newkind = p["kind"] | |
| 601 newname = p["name"] | |
| 602 newlabel = p["label"] | |
| 603 newdisc = p["discover"] | |
| 604 collect = gxtp.OutputCollection(newname, label=newlabel, type=newkind) | |
| 605 disc = gxtp.DiscoverDatasets( | |
| 606 pattern=newdisc, directory=f"{newname}", visible="false" | |
| 607 ) | |
| 608 collect.append(disc) | |
| 609 self.toutputs.append(collect) | |
| 610 tparm = gxtp.TestOutput(newname, ftype="pdf") | |
| 611 self.testparam.append(tparm) | |
| 539 | 612 |
| 540 def doNoXMLparam(self): | 613 def doNoXMLparam(self): |
| 541 """filter style package - stdin to stdout""" | 614 """filter style package - stdin to stdout""" |
| 542 if len(self.infiles) > 0: | 615 if len(self.infiles) > 0: |
| 543 alab = self.infiles[0][ILABPOS] | 616 alab = self.infiles[0]["label"] |
| 544 if len(alab) == 0: | 617 if len(alab) == 0: |
| 545 alab = self.infiles[0][ICLPOS] | 618 alab = self.infiles[0]["infilename"] |
| 546 max1s = ( | 619 max1s = ( |
| 547 "Maximum one input if parampass is 0 but multiple input files supplied - %s" | 620 "Maximum one input if parampass is 0 but multiple input files supplied - %s" |
| 548 % str(self.infiles) | 621 % str(self.infiles) |
| 549 ) | 622 ) |
| 550 assert len(self.infiles) == 1, max1s | 623 assert len(self.infiles) == 1, max1s |
| 551 newname = self.infiles[0][ICLPOS] | 624 newname = self.infiles[0]["infilename"] |
| 552 aninput = gxtp.DataParam( | 625 aninput = gxtp.DataParam( |
| 553 newname, | 626 newname, |
| 554 optional=False, | 627 optional=False, |
| 555 label=alab, | 628 label=alab, |
| 556 help=self.infiles[0][IHELPOS], | 629 help=self.infiles[0]["help"], |
| 557 format=self.infiles[0][IFMTPOS], | 630 format=self.infiles[0]["format"], |
| 558 multiple=False, | 631 multiple=False, |
| 559 num_dashes=0, | 632 num_dashes=0, |
| 560 ) | 633 ) |
| 561 aninput.command_line_override = "< $%s" % newname | 634 aninput.command_line_override = "< $%s" % newname |
| 562 aninput.positional = self.is_positional | 635 aninput.positional = True |
| 563 self.tinputs.append(aninput) | 636 self.tinputs.append(aninput) |
| 564 tp = gxtp.TestParam(name=newname, value="%s_sample" % newname) | 637 tp = gxtp.TestParam(name=newname, value="%s_sample" % newname) |
| 565 self.testparam.append(tp) | 638 self.testparam.append(tp) |
| 566 if len(self.outfiles) > 0: | 639 if len(self.outfiles) > 0: |
| 567 newname = self.outfiles[0][OCLPOS] | 640 newname = self.outfiles[0]["name"] |
| 568 newfmt = self.outfiles[0][OFMTPOS] | 641 newfmt = self.outfiles[0]["format"] |
| 569 anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0) | 642 anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0) |
| 570 anout.command_line_override = "> $%s" % newname | 643 anout.command_line_override = "> $%s" % newname |
| 571 anout.positional = self.is_positional | 644 anout.positional = self.is_positional |
| 572 self.toutputs.append(anout) | 645 self.toutputs.append(anout) |
| 573 tp = gxtp.TestOutput( | 646 tp = gxtp.TestOutput(name=newname, value="%s_sample" % newname) |
| 574 name=newname, value="%s_sample" % newname, | |
| 575 ) | |
| 576 self.testparam.append(tp) | 647 self.testparam.append(tp) |
| 577 | 648 |
| 578 def makeXML(self): | 649 def makeXML(self): |
| 579 """ | 650 """ |
| 580 Create a Galaxy xml tool wrapper for the new script | 651 Create a Galaxy xml tool wrapper for the new script |
| 655 if ( | 726 if ( |
| 656 self.test_override | 727 self.test_override |
| 657 ): # cannot do this inside galaxyxml as it expects lxml objects for tests | 728 ): # cannot do this inside galaxyxml as it expects lxml objects for tests |
| 658 part1 = exml.split("<tests>")[0] | 729 part1 = exml.split("<tests>")[0] |
| 659 part2 = exml.split("</tests>")[1] | 730 part2 = exml.split("</tests>")[1] |
| 660 fixed = "%s\n%s\n%s" % (part1, self.test_override, part2) | 731 fixed = "%s\n%s\n%s" % (part1, "\n".join(self.test_override), part2) |
| 661 exml = fixed | 732 exml = fixed |
| 662 # exml = exml.replace('range="1:"', 'range="1000:"') | 733 # exml = exml.replace('range="1:"', 'range="1000:"') |
| 663 xf = open("%s.xml" % self.tool_name, "w") | 734 xf = open("%s.xml" % self.tool_name, "w") |
| 664 xf.write(exml) | 735 xf.write(exml) |
| 665 xf.write("\n") | 736 xf.write("\n") |
| 666 xf.close() | 737 xf.close() |
| 667 # ready for the tarball | 738 # ready for the tarball |
| 668 | |
| 669 | 739 |
| 670 def run(self): | 740 def run(self): |
| 671 """ | 741 """ |
| 672 generate test outputs by running a command line | 742 generate test outputs by running a command line |
| 673 won't work if command or test override in play - planemo is the | 743 won't work if command or test override in play - planemo is the |
| 698 sto.close() | 768 sto.close() |
| 699 ste.close() | 769 ste.close() |
| 700 retval = subp.returncode | 770 retval = subp.returncode |
| 701 else: # work around special case - stdin and write to stdout | 771 else: # work around special case - stdin and write to stdout |
| 702 if len(self.infiles) > 0: | 772 if len(self.infiles) > 0: |
| 703 sti = open(self.infiles[0][IPATHPOS], "rb") | 773 sti = open(self.infiles[0]["name"], "rb") |
| 704 else: | 774 else: |
| 705 sti = sys.stdin | 775 sti = sys.stdin |
| 706 if len(self.outfiles) > 0: | 776 if len(self.outfiles) > 0: |
| 707 sto = open(self.outfiles[0][ONAMEPOS], "wb") | 777 sto = open(self.outfiles[0]["name"], "wb") |
| 708 else: | 778 else: |
| 709 sto = sys.stdout | 779 sto = sys.stdout |
| 710 subp = subprocess.run( | 780 subp = subprocess.run( |
| 711 self.cl, env=self.ourenv, shell=False, stdout=sto, stdin=sti | 781 self.cl, env=self.ourenv, shell=False, stdout=sto, stdin=sti |
| 712 ) | 782 ) |
| 720 os.unlink(self.elog) | 790 os.unlink(self.elog) |
| 721 if retval != 0 and err: # problem | 791 if retval != 0 and err: # problem |
| 722 sys.stderr.write(err) | 792 sys.stderr.write(err) |
| 723 logging.debug("run done") | 793 logging.debug("run done") |
| 724 return retval | 794 return retval |
| 795 | |
| 796 def shedLoad(self): | |
| 797 """ | |
| 798 use bioblend to create new repository | |
| 799 or update existing | |
| 800 | |
| 801 """ | |
| 802 if os.path.exists(self.tlog): | |
| 803 sto = open(self.tlog, "a") | |
| 804 else: | |
| 805 sto = open(self.tlog, "w") | |
| 806 | |
| 807 ts = toolshed.ToolShedInstance( | |
| 808 url=self.args.toolshed_url, | |
| 809 key=self.args.toolshed_api_key, | |
| 810 verify=False, | |
| 811 ) | |
| 812 repos = ts.repositories.get_repositories() | |
| 813 rnames = [x.get("name", "?") for x in repos] | |
| 814 rids = [x.get("id", "?") for x in repos] | |
| 815 tfcat = "ToolFactory generated tools" | |
| 816 if self.tool_name not in rnames: | |
| 817 tscat = ts.categories.get_categories() | |
| 818 cnames = [x.get("name", "?").strip() for x in tscat] | |
| 819 cids = [x.get("id", "?") for x in tscat] | |
| 820 catID = None | |
| 821 if tfcat.strip() in cnames: | |
| 822 ci = cnames.index(tfcat) | |
| 823 catID = cids[ci] | |
| 824 res = ts.repositories.create_repository( | |
| 825 name=self.args.tool_name, | |
| 826 synopsis="Synopsis:%s" % self.args.tool_desc, | |
| 827 description=self.args.tool_desc, | |
| 828 type="unrestricted", | |
| 829 remote_repository_url=self.args.toolshed_url, | |
| 830 homepage_url=None, | |
| 831 category_ids=catID, | |
| 832 ) | |
| 833 tid = res.get("id", None) | |
| 834 sto.write(f"#create_repository {self.args.tool_name} tid={tid} res={res}\n") | |
| 835 else: | |
| 836 i = rnames.index(self.tool_name) | |
| 837 tid = rids[i] | |
| 838 try: | |
| 839 res = ts.repositories.update_repository( | |
| 840 id=tid, tar_ball_path=self.newtarpath, commit_message=None | |
| 841 ) | |
| 842 sto.write(f"#update res id {id} ={res}\n") | |
| 843 except ConnectionError: | |
| 844 sto.write( | |
| 845 "####### Is the toolshed running and the API key correct? Bioblend shed upload failed\n" | |
| 846 ) | |
| 847 sto.close() | |
| 848 | |
| 849 def eph_galaxy_load(self): | |
| 850 """ | |
| 851 use ephemeris to load the new tool from the local toolshed after planemo uploads it | |
| 852 """ | |
| 853 if os.path.exists(self.tlog): | |
| 854 tout = open(self.tlog, "a") | |
| 855 else: | |
| 856 tout = open(self.tlog, "w") | |
| 857 cll = [ | |
| 858 "shed-tools", | |
| 859 "install", | |
| 860 "-g", | |
| 861 self.args.galaxy_url, | |
| 862 "--latest", | |
| 863 "-a", | |
| 864 self.args.galaxy_api_key, | |
| 865 "--name", | |
| 866 self.tool_name, | |
| 867 "--owner", | |
| 868 "fubar", | |
| 869 "--toolshed", | |
| 870 self.args.toolshed_url, | |
| 871 "--section_label", | |
| 872 "ToolFactory", | |
| 873 ] | |
| 874 tout.write("running\n%s\n" % " ".join(cll)) | |
| 875 subp = subprocess.run( | |
| 876 cll, | |
| 877 env=self.ourenv, | |
| 878 cwd=self.ourcwd, | |
| 879 shell=False, | |
| 880 stderr=tout, | |
| 881 stdout=tout, | |
| 882 ) | |
| 883 tout.write( | |
| 884 "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode) | |
| 885 ) | |
| 886 tout.close() | |
| 887 return subp.returncode | |
| 888 | |
| 889 def writeShedyml(self): | |
| 890 """for planemo""" | |
| 891 yuser = self.args.user_email.split("@")[0] | |
| 892 yfname = os.path.join(self.tooloutdir, ".shed.yml") | |
| 893 yamlf = open(yfname, "w") | |
| 894 odict = { | |
| 895 "name": self.tool_name, | |
| 896 "owner": yuser, | |
| 897 "type": "unrestricted", | |
| 898 "description": self.args.tool_desc, | |
| 899 "synopsis": self.args.tool_desc, | |
| 900 "category": "TF Generated Tools", | |
| 901 } | |
| 902 yaml.dump(odict, yamlf, allow_unicode=True) | |
| 903 yamlf.close() | |
| 904 | |
| 905 def makeTool(self): | |
| 906 """write xmls and input samples into place""" | |
| 907 if self.args.parampass == 0: | |
| 908 self.doNoXMLparam() | |
| 909 else: | |
| 910 self.makeXML() | |
| 911 if self.args.script_path: | |
| 912 stname = os.path.join(self.tooloutdir, self.sfile) | |
| 913 if not os.path.exists(stname): | |
| 914 shutil.copyfile(self.sfile, stname) | |
| 915 xreal = "%s.xml" % self.tool_name | |
| 916 xout = os.path.join(self.tooloutdir, xreal) | |
| 917 shutil.copyfile(xreal, xout) | |
| 918 for p in self.infiles: | |
| 919 pth = p["name"] | |
| 920 dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) | |
| 921 shutil.copyfile(pth, dest) | |
| 922 dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) | |
| 923 shutil.copyfile(pth, dest) | |
| 924 | |
| 925 def makeToolTar(self, report_fail=False): | |
| 926 """move outputs into test-data and prepare the tarball""" | |
| 927 excludeme = "_planemo_test_report.html" | |
| 928 | |
| 929 def exclude_function(tarinfo): | |
| 930 filename = tarinfo.name | |
| 931 return None if filename.endswith(excludeme) else tarinfo | |
| 932 | |
| 933 if os.path.exists(self.tlog): | |
| 934 tout = open(self.tlog, "a") | |
| 935 else: | |
| 936 tout = open(self.tlog, "w") | |
| 937 for p in self.outfiles: | |
| 938 oname = p["name"] | |
| 939 tdest = os.path.join(self.testdir, "%s_sample" % oname) | |
| 940 src = os.path.join(self.testdir, oname) | |
| 941 if not os.path.isfile(tdest): | |
| 942 if os.path.isfile(src): | |
| 943 shutil.copyfile(src, tdest) | |
| 944 dest = os.path.join(self.repdir, "%s.sample" % (oname)) | |
| 945 shutil.copyfile(src, dest) | |
| 946 else: | |
| 947 if report_fail: | |
| 948 tout.write( | |
| 949 "###Tool may have failed - output file %s not found in testdir after planemo run %s." | |
| 950 % (tdest, self.testdir) | |
| 951 ) | |
| 952 tf = tarfile.open(self.newtarpath, "w:gz") | |
| 953 tf.add( | |
| 954 name=self.tooloutdir, | |
| 955 arcname=self.tool_name, | |
| 956 filter=exclude_function, | |
| 957 ) | |
| 958 tf.close() | |
| 959 shutil.copyfile(self.newtarpath, self.args.new_tool) | |
| 960 | |
| 961 def moveRunOutputs(self): | |
| 962 """need to move planemo or run outputs into toolfactory collection""" | |
| 963 with os.scandir(self.tooloutdir) as outs: | |
| 964 for entry in outs: | |
| 965 if not entry.is_file(): | |
| 966 continue | |
| 967 if "." in entry.name: | |
| 968 _, ext = os.path.splitext(entry.name) | |
| 969 if ext in [".tgz", ".json"]: | |
| 970 continue | |
| 971 if ext in [".yml", ".xml", ".yaml"]: | |
| 972 newname = f"{entry.name.replace('.','_')}.txt" | |
| 973 else: | |
| 974 newname = entry.name | |
| 975 else: | |
| 976 newname = f"{entry.name}.txt" | |
| 977 dest = os.path.join(self.repdir, newname) | |
| 978 src = os.path.join(self.tooloutdir, entry.name) | |
| 979 shutil.copyfile(src, dest) | |
| 980 if self.args.include_tests: | |
| 981 with os.scandir(self.testdir) as outs: | |
| 982 for entry in outs: | |
| 983 if (not entry.is_file()) or entry.name.endswith( | |
| 984 "_planemo_test_report.html" | |
| 985 ): | |
| 986 continue | |
| 987 if "." in entry.name: | |
| 988 _, ext = os.path.splitext(entry.name) | |
| 989 if ext in [".tgz", ".json"]: | |
| 990 continue | |
| 991 if ext in [".yml", ".xml", ".yaml"]: | |
| 992 newname = f"{entry.name.replace('.','_')}.txt" | |
| 993 else: | |
| 994 newname = entry.name | |
| 995 else: | |
| 996 newname = f"{entry.name}.txt" | |
| 997 dest = os.path.join(self.repdir, newname) | |
| 998 src = os.path.join(self.testdir, entry.name) | |
| 999 shutil.copyfile(src, dest) | |
| 725 | 1000 |
| 726 def copy_to_container(self, src, dest, container): | 1001 def copy_to_container(self, src, dest, container): |
| 727 """Recreate the src directory tree at dest - full path included""" | 1002 """Recreate the src directory tree at dest - full path included""" |
| 728 idir = os.getcwd() | 1003 idir = os.getcwd() |
| 729 workdir = os.path.dirname(src) | 1004 workdir = os.path.dirname(src) |
| 797 ptestpath = os.path.join(destdir, "tfout", xreal) | 1072 ptestpath = os.path.join(destdir, "tfout", xreal) |
| 798 self.copy_to_container(self.tooloutdir, destdir, container) | 1073 self.copy_to_container(self.tooloutdir, destdir, container) |
| 799 cl = "chown -R biodocker /toolfactory" | 1074 cl = "chown -R biodocker /toolfactory" |
| 800 prun(container, tout, cl, user="root") | 1075 prun(container, tout, cl, user="root") |
| 801 _ = container.exec_run(f"ls -la {destdir}") | 1076 _ = container.exec_run(f"ls -la {destdir}") |
| 802 ptestcl = f"planemo test --update_test_data --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" | 1077 ptestcl = f"planemo test --test_output {imrep} --update_test_data --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" |
| 803 try: | 1078 try: |
| 804 _ = container.exec_run(ptestcl) | 1079 _ = container.exec_run(ptestcl) |
| 805 # fails because test outputs missing but updates the test-data directory | 1080 # fails because test outputs missing but updates the test-data directory |
| 806 except Error: | 1081 except Exception: |
| 807 e = sys.exc_info()[0] | 1082 e = sys.exc_info()[0] |
| 808 tout.write(f"#### error: {e} from {ptestcl}\n") | 1083 tout.write(f"#### error: {e} from {ptestcl}\n") |
| 809 cl = f"planemo test --test_output {imrep} --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" | 1084 cl = f"planemo test --test_output {imrep} --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" |
| 810 try: | 1085 try: |
| 811 prun(container, tout, cl) | 1086 prun(container, tout, cl) |
| 812 except Error: | 1087 except Exception: |
| 813 e = sys.exc_info()[0] | 1088 e = sys.exc_info()[0] |
| 814 tout.write(f"#### error: {e} from {ptestcl}\n") | 1089 tout.write(f"#### error: {e} from {ptestcl}\n") |
| 815 testouts = tempfile.mkdtemp(suffix=None, prefix="tftemp", dir=".") | 1090 testouts = tempfile.mkdtemp(suffix=None, prefix="tftemp", dir=".") |
| 816 self.copy_from_container(destdir, testouts, container) | 1091 self.copy_from_container(destdir, testouts, container) |
| 817 src = os.path.join(testouts, "ptest") | 1092 src = os.path.join(testouts, "ptest") |
| 826 container.stop() | 1101 container.stop() |
| 827 container.remove() | 1102 container.remove() |
| 828 tvol.remove() | 1103 tvol.remove() |
| 829 shutil.rmtree(testouts) # leave for debugging | 1104 shutil.rmtree(testouts) # leave for debugging |
| 830 | 1105 |
| 831 def shedLoad(self): | |
| 832 """ | |
| 833 use bioblend to create new repository | |
| 834 or update existing | |
| 835 | |
| 836 """ | |
| 837 if os.path.exists(self.tlog): | |
| 838 sto = open(self.tlog, "a") | |
| 839 else: | |
| 840 sto = open(self.tlog, "w") | |
| 841 | |
| 842 ts = toolshed.ToolShedInstance( | |
| 843 url=self.args.toolshed_url, key=self.args.toolshed_api_key, verify=False | |
| 844 ) | |
| 845 repos = ts.repositories.get_repositories() | |
| 846 rnames = [x.get("name", "?") for x in repos] | |
| 847 rids = [x.get("id", "?") for x in repos] | |
| 848 tfcat = "ToolFactory generated tools" | |
| 849 if self.tool_name not in rnames: | |
| 850 tscat = ts.categories.get_categories() | |
| 851 cnames = [x.get("name", "?").strip() for x in tscat] | |
| 852 cids = [x.get("id", "?") for x in tscat] | |
| 853 catID = None | |
| 854 if tfcat.strip() in cnames: | |
| 855 ci = cnames.index(tfcat) | |
| 856 catID = cids[ci] | |
| 857 res = ts.repositories.create_repository( | |
| 858 name=self.args.tool_name, | |
| 859 synopsis="Synopsis:%s" % self.args.tool_desc, | |
| 860 description=self.args.tool_desc, | |
| 861 type="unrestricted", | |
| 862 remote_repository_url=self.args.toolshed_url, | |
| 863 homepage_url=None, | |
| 864 category_ids=catID, | |
| 865 ) | |
| 866 tid = res.get("id", None) | |
| 867 sto.write(f"#create_repository {self.args.tool_name} tid={tid} res={res}\n") | |
| 868 else: | |
| 869 i = rnames.index(self.tool_name) | |
| 870 tid = rids[i] | |
| 871 try: | |
| 872 res = ts.repositories.update_repository( | |
| 873 id=tid, tar_ball_path=self.newtarpath, commit_message=None | |
| 874 ) | |
| 875 sto.write(f"#update res id {id} ={res}\n") | |
| 876 except ConnectionError: | |
| 877 sto.write( | |
| 878 "####### Is the toolshed running and the API key correct? Bioblend shed upload failed\n" | |
| 879 ) | |
| 880 sto.close() | |
| 881 | |
| 882 def eph_galaxy_load(self): | |
| 883 """ | |
| 884 use ephemeris to load the new tool from the local toolshed after planemo uploads it | |
| 885 """ | |
| 886 if os.path.exists(self.tlog): | |
| 887 tout = open(self.tlog, "a") | |
| 888 else: | |
| 889 tout = open(self.tlog, "w") | |
| 890 cll = [ | |
| 891 "shed-tools", | |
| 892 "install", | |
| 893 "-g", | |
| 894 self.args.galaxy_url, | |
| 895 "--latest", | |
| 896 "-a", | |
| 897 self.args.galaxy_api_key, | |
| 898 "--name", | |
| 899 self.tool_name, | |
| 900 "--owner", | |
| 901 "fubar", | |
| 902 "--toolshed", | |
| 903 self.args.toolshed_url, | |
| 904 "--section_label", | |
| 905 "ToolFactory", | |
| 906 ] | |
| 907 tout.write("running\n%s\n" % " ".join(cll)) | |
| 908 subp = subprocess.run( | |
| 909 cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stderr=tout, stdout=tout | |
| 910 ) | |
| 911 tout.write( | |
| 912 "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode) | |
| 913 ) | |
| 914 tout.close() | |
| 915 return subp.returncode | |
| 916 | |
| 917 def writeShedyml(self): | |
| 918 """for planemo""" | |
| 919 yuser = self.args.user_email.split("@")[0] | |
| 920 yfname = os.path.join(self.tooloutdir, ".shed.yml") | |
| 921 yamlf = open(yfname, "w") | |
| 922 odict = { | |
| 923 "name": self.tool_name, | |
| 924 "owner": yuser, | |
| 925 "type": "unrestricted", | |
| 926 "description": self.args.tool_desc, | |
| 927 "synopsis": self.args.tool_desc, | |
| 928 "category": "TF Generated Tools", | |
| 929 } | |
| 930 yaml.dump(odict, yamlf, allow_unicode=True) | |
| 931 yamlf.close() | |
| 932 | |
| 933 def makeTool(self): | |
| 934 """write xmls and input samples into place""" | |
| 935 self.makeXML() | |
| 936 if self.args.script_path: | |
| 937 stname = os.path.join(self.tooloutdir, "%s" % (self.sfile)) | |
| 938 if not os.path.exists(stname): | |
| 939 shutil.copyfile(self.sfile, stname) | |
| 940 xreal = "%s.xml" % self.tool_name | |
| 941 xout = os.path.join(self.tooloutdir, xreal) | |
| 942 shutil.copyfile(xreal, xout) | |
| 943 for p in self.infiles: | |
| 944 pth = p[IPATHPOS] | |
| 945 dest = os.path.join(self.testdir, "%s_sample" % p[ICLPOS]) | |
| 946 shutil.copyfile(pth, dest) | |
| 947 | |
| 948 def makeToolTar(self): | |
| 949 """move outputs into test-data and prepare the tarball""" | |
| 950 excludeme = "_planemo_test_report.html" | |
| 951 | |
| 952 def exclude_function(tarinfo): | |
| 953 filename = tarinfo.name | |
| 954 return None if filename.endswith(excludeme) else tarinfo | |
| 955 | |
| 956 if os.path.exists(self.tlog): | |
| 957 tout = open(self.tlog, "a") | |
| 958 else: | |
| 959 tout = open(self.tlog, "w") | |
| 960 for p in self.outfiles: | |
| 961 oname = p[ONAMEPOS] | |
| 962 tdest = os.path.join(self.testdir, "%s_sample" % oname) | |
| 963 if not os.path.isfile(tdest): | |
| 964 src = os.path.join(self.testdir, oname) | |
| 965 if os.path.isfile(src): | |
| 966 shutil.copyfile(src, tdest) | |
| 967 dest = os.path.join(self.repdir, "%s.sample" % (oname)) | |
| 968 shutil.copyfile(src, dest) | |
| 969 else: | |
| 970 tout.write( | |
| 971 "###Output file %s not found in testdir %s. This is normal during the first Planemo run that generates test outputs" | |
| 972 % (tdest, self.testdir) | |
| 973 ) | |
| 974 tf = tarfile.open(self.newtarpath, "w:gz") | |
| 975 tf.add(name=self.tooloutdir, arcname=self.tool_name, filter=exclude_function) | |
| 976 tf.close() | |
| 977 shutil.copyfile(self.newtarpath, self.args.new_tool) | |
| 978 | |
| 979 def moveRunOutputs(self): | |
| 980 """need to move planemo or run outputs into toolfactory collection""" | |
| 981 with os.scandir(self.tooloutdir) as outs: | |
| 982 for entry in outs: | |
| 983 if not entry.is_file(): | |
| 984 continue | |
| 985 if "." in entry.name: | |
| 986 nayme, ext = os.path.splitext(entry.name) | |
| 987 if ext in [".yml", ".xml", ".json", ".yaml"]: | |
| 988 ext = f"{ext}.txt" | |
| 989 else: | |
| 990 ext = ".txt" | |
| 991 ofn = "%s%s" % (entry.name.replace(".", "_"), ext) | |
| 992 dest = os.path.join(self.repdir, ofn) | |
| 993 src = os.path.join(self.tooloutdir, entry.name) | |
| 994 shutil.copyfile(src, dest) | |
| 995 with os.scandir(self.testdir) as outs: | |
| 996 for entry in outs: | |
| 997 if ( | |
| 998 (not entry.is_file()) | |
| 999 or entry.name.endswith("_sample") | |
| 1000 or entry.name.endswith("_planemo_test_report.html") | |
| 1001 ): | |
| 1002 continue | |
| 1003 if "." in entry.name: | |
| 1004 nayme, ext = os.path.splitext(entry.name) | |
| 1005 else: | |
| 1006 ext = ".txt" | |
| 1007 newname = f"{entry.name}{ext}" | |
| 1008 dest = os.path.join(self.repdir, newname) | |
| 1009 src = os.path.join(self.testdir, entry.name) | |
| 1010 shutil.copyfile(src, dest) | |
| 1011 | |
| 1012 | 1106 |
| 1013 def main(): | 1107 def main(): |
| 1014 """ | 1108 """ |
| 1015 This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml | 1109 This is a Galaxy wrapper. |
| 1110 It expects to be called by a special purpose tool.xml | |
| 1016 | 1111 |
| 1017 """ | 1112 """ |
| 1018 parser = argparse.ArgumentParser() | 1113 parser = argparse.ArgumentParser() |
| 1019 a = parser.add_argument | 1114 a = parser.add_argument |
| 1020 a("--script_path", default=None) | 1115 a("--script_path", default=None) |
| 1034 a("--tool_version", default=None) | 1129 a("--tool_version", default=None) |
| 1035 a("--citations", default=None) | 1130 a("--citations", default=None) |
| 1036 a("--command_override", default=None) | 1131 a("--command_override", default=None) |
| 1037 a("--test_override", default=None) | 1132 a("--test_override", default=None) |
| 1038 a("--additional_parameters", action="append", default=[]) | 1133 a("--additional_parameters", action="append", default=[]) |
| 1134 a("--selecttext_parameters", action="append", default=[]) | |
| 1039 a("--edit_additional_parameters", action="store_true", default=False) | 1135 a("--edit_additional_parameters", action="store_true", default=False) |
| 1040 a("--parampass", default="positional") | 1136 a("--parampass", default="positional") |
| 1041 a("--tfout", default="./tfout") | 1137 a("--tfout", default="./tfout") |
| 1042 a("--new_tool", default="new_tool") | 1138 a("--new_tool", default="new_tool") |
| 1043 a("--galaxy_url", default="http://localhost:8080") | 1139 a("--galaxy_url", default="http://localhost:8080") |
| 1044 a("--toolshed_url", default="http://localhost:9009") | 1140 a("--toolshed_url", default="http://localhost:9009") |
| 1045 # make sure this is identical to tool_sheds_conf.xml localhost != 127.0.0.1 so validation fails | 1141 # make sure this is identical to tool_sheds_conf.xml |
| 1142 # localhost != 127.0.0.1 so validation fails | |
| 1046 a("--toolshed_api_key", default="fakekey") | 1143 a("--toolshed_api_key", default="fakekey") |
| 1047 a("--galaxy_api_key", default="fakekey") | 1144 a("--galaxy_api_key", default="fakekey") |
| 1048 a("--galaxy_root", default="/galaxy-central") | 1145 a("--galaxy_root", default="/galaxy-central") |
| 1049 a("--galaxy_venv", default="/galaxy_venv") | 1146 a("--galaxy_venv", default="/galaxy_venv") |
| 1147 a("--collection", action="append", default=[]) | |
| 1148 a("--include_tests", default=False, action="store_true") | |
| 1050 args = parser.parse_args() | 1149 args = parser.parse_args() |
| 1051 assert not args.bad_user, ( | 1150 assert not args.bad_user, ( |
| 1052 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' | 1151 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' |
| 1053 % (args.bad_user, args.bad_user) | 1152 % (args.bad_user, args.bad_user) |
| 1054 ) | 1153 ) |
| 1055 assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" | 1154 assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" |
| 1056 assert ( | 1155 assert ( |
| 1057 args.sysexe or args.packages | 1156 args.sysexe or args.packages |
| 1058 ), "## Tool Factory wrapper expects an interpreter or an executable package" | 1157 ), "## Tool Factory wrapper expects an interpreter or an executable package" |
| 1059 args.input_files = [x.replace('"', "").replace("'", "") for x in args.input_files] | |
| 1060 # remove quotes we need to deal with spaces in CL params | |
| 1061 for i, x in enumerate(args.additional_parameters): | |
| 1062 args.additional_parameters[i] = args.additional_parameters[i].replace('"', "") | |
| 1063 r = ScriptRunner(args) | 1158 r = ScriptRunner(args) |
| 1064 r.writeShedyml() | 1159 r.writeShedyml() |
| 1065 r.makeTool() | 1160 r.makeTool() |
| 1066 if args.make_Tool == "generate": | 1161 if args.make_Tool == "generate": |
| 1067 _ = r.run() # for testing toolfactory itself | 1162 _ = r.run() # for testing toolfactory itself |
