Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/gxformat2/schema/v19_09.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:4f3585e2f14b |
---|---|
1 # | |
2 # This file was autogenerated using schema-salad-tool --codegen=python | |
3 # The code itself is released under the Apache 2.0 license and the help text is | |
4 # subject to the license of the original schema. | |
5 # | |
6 # type: ignore | |
7 import copy | |
8 import os | |
9 import re | |
10 import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 | |
11 from io import StringIO | |
12 from typing import ( | |
13 Any, | |
14 Dict, | |
15 List, | |
16 MutableMapping, | |
17 MutableSequence, | |
18 Optional, | |
19 Sequence, | |
20 Tuple, | |
21 Type, | |
22 Union, | |
23 ) | |
24 from urllib.parse import quote, urlsplit, urlunsplit | |
25 from urllib.request import pathname2url | |
26 | |
27 from ruamel import yaml | |
28 from ruamel.yaml.comments import CommentedMap | |
29 from schema_salad.exceptions import SchemaSaladException, ValidationException | |
30 from schema_salad.fetcher import DefaultFetcher, Fetcher | |
31 from schema_salad.sourceline import SourceLine, add_lc_filename | |
32 | |
33 _vocab = {} # type: Dict[str, str] | |
34 _rvocab = {} # type: Dict[str, str] | |
35 | |
36 | |
37 class Savable(object): | |
38 @classmethod | |
39 def fromDoc(cls, _doc, baseuri, loadingOptions, docRoot=None): | |
40 # type: (Any, str, LoadingOptions, Optional[str]) -> Savable | |
41 pass | |
42 | |
43 def save(self, top=False, base_url="", relative_uris=True): | |
44 # type: (bool, str, bool) -> Dict[str, str] | |
45 pass | |
46 | |
47 | |
48 class LoadingOptions(object): | |
49 def __init__( | |
50 self, | |
51 fetcher=None, # type: Optional[Fetcher] | |
52 namespaces=None, # type: Optional[Dict[str, str]] | |
53 schemas=None, # type: Optional[Dict[str, str]] | |
54 fileuri=None, # type: Optional[str] | |
55 copyfrom=None, # type: Optional[LoadingOptions] | |
56 original_doc=None, # type: Optional[Any] | |
57 ): # type: (...) -> None | |
58 self.idx = {} # type: Dict[str, Dict[str, Any]] | |
59 self.fileuri = fileuri # type: Optional[str] | |
60 self.namespaces = namespaces | |
61 self.schemas = schemas | |
62 self.original_doc = original_doc | |
63 if copyfrom is not None: | |
64 self.idx = copyfrom.idx | |
65 if fetcher is None: | |
66 fetcher = copyfrom.fetcher | |
67 if fileuri is None: | |
68 self.fileuri = copyfrom.fileuri | |
69 if namespaces is None: | |
70 self.namespaces = copyfrom.namespaces | |
71 if schemas is None: | |
72 self.schemas = copyfrom.schemas | |
73 | |
74 if fetcher is None: | |
75 import requests | |
76 from cachecontrol.wrapper import CacheControl | |
77 from cachecontrol.caches import FileCache | |
78 | |
79 if "HOME" in os.environ: | |
80 session = CacheControl( | |
81 requests.Session(), | |
82 cache=FileCache( | |
83 os.path.join(os.environ["HOME"], ".cache", "salad") | |
84 ), | |
85 ) | |
86 elif "TMPDIR" in os.environ: | |
87 session = CacheControl( | |
88 requests.Session(), | |
89 cache=FileCache( | |
90 os.path.join(os.environ["TMPDIR"], ".cache", "salad") | |
91 ), | |
92 ) | |
93 else: | |
94 session = CacheControl( | |
95 requests.Session(), cache=FileCache("/tmp", ".cache", "salad") | |
96 ) | |
97 self.fetcher = DefaultFetcher({}, session) # type: Fetcher | |
98 else: | |
99 self.fetcher = fetcher | |
100 | |
101 self.vocab = _vocab | |
102 self.rvocab = _rvocab | |
103 | |
104 if namespaces is not None: | |
105 self.vocab = self.vocab.copy() | |
106 self.rvocab = self.rvocab.copy() | |
107 for k, v in namespaces.items(): | |
108 self.vocab[k] = v | |
109 self.rvocab[v] = k | |
110 | |
111 | |
112 def load_field(val, fieldtype, baseuri, loadingOptions): | |
113 # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any | |
114 if isinstance(val, MutableMapping): | |
115 if "$import" in val: | |
116 if loadingOptions.fileuri is None: | |
117 raise SchemaSaladException("Cannot load $import without fileuri") | |
118 return _document_load_by_url( | |
119 fieldtype, | |
120 loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]), | |
121 loadingOptions, | |
122 ) | |
123 elif "$include" in val: | |
124 if loadingOptions.fileuri is None: | |
125 raise SchemaSaladException("Cannot load $import without fileuri") | |
126 val = loadingOptions.fetcher.fetch_text( | |
127 loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) | |
128 ) | |
129 return fieldtype.load(val, baseuri, loadingOptions) | |
130 | |
131 | |
132 save_type = Union[Dict[str, str], List[Union[Dict[str, str], List[Any], None]], None] | |
133 | |
134 | |
135 def save( | |
136 val, # type: Optional[Union[Savable, MutableSequence[Savable]]] | |
137 top=True, # type: bool | |
138 base_url="", # type: str | |
139 relative_uris=True, # type: bool | |
140 ): # type: (...) -> save_type | |
141 | |
142 if isinstance(val, Savable): | |
143 return val.save(top=top, base_url=base_url, relative_uris=relative_uris) | |
144 if isinstance(val, MutableSequence): | |
145 return [ | |
146 save(v, top=False, base_url=base_url, relative_uris=relative_uris) | |
147 for v in val | |
148 ] | |
149 if isinstance(val, MutableMapping): | |
150 newdict = {} | |
151 for key in val: | |
152 newdict[key] = save( | |
153 val[key], top=False, base_url=base_url, relative_uris=relative_uris | |
154 ) | |
155 return newdict | |
156 return val | |
157 | |
158 | |
159 def expand_url( | |
160 url, # type: str | |
161 base_url, # type: str | |
162 loadingOptions, # type: LoadingOptions | |
163 scoped_id=False, # type: bool | |
164 vocab_term=False, # type: bool | |
165 scoped_ref=None, # type: Optional[int] | |
166 ): | |
167 # type: (...) -> str | |
168 if url in ("@id", "@type"): | |
169 return url | |
170 | |
171 if vocab_term and url in loadingOptions.vocab: | |
172 return url | |
173 | |
174 if bool(loadingOptions.vocab) and ":" in url: | |
175 prefix = url.split(":")[0] | |
176 if prefix in loadingOptions.vocab: | |
177 url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] | |
178 | |
179 split = urlsplit(url) | |
180 | |
181 if ( | |
182 (bool(split.scheme) and split.scheme in ["http", "https", "file"]) | |
183 or url.startswith("$(") | |
184 or url.startswith("${") | |
185 ): | |
186 pass | |
187 elif scoped_id and not bool(split.fragment): | |
188 splitbase = urlsplit(base_url) | |
189 frg = "" | |
190 if bool(splitbase.fragment): | |
191 frg = splitbase.fragment + "/" + split.path | |
192 else: | |
193 frg = split.path | |
194 pt = splitbase.path if splitbase.path != "" else "/" | |
195 url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) | |
196 elif scoped_ref is not None and not bool(split.fragment): | |
197 splitbase = urlsplit(base_url) | |
198 sp = splitbase.fragment.split("/") | |
199 n = scoped_ref | |
200 while n > 0 and len(sp) > 0: | |
201 sp.pop() | |
202 n -= 1 | |
203 sp.append(url) | |
204 url = urlunsplit( | |
205 ( | |
206 splitbase.scheme, | |
207 splitbase.netloc, | |
208 splitbase.path, | |
209 splitbase.query, | |
210 "/".join(sp), | |
211 ) | |
212 ) | |
213 else: | |
214 url = loadingOptions.fetcher.urljoin(base_url, url) | |
215 | |
216 if vocab_term: | |
217 split = urlsplit(url) | |
218 if bool(split.scheme): | |
219 if url in loadingOptions.rvocab: | |
220 return loadingOptions.rvocab[url] | |
221 else: | |
222 raise ValidationException("Term '{}' not in vocabulary".format(url)) | |
223 | |
224 return url | |
225 | |
226 | |
227 class _Loader(object): | |
228 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
229 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
230 pass | |
231 | |
232 | |
233 class _AnyLoader(_Loader): | |
234 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
235 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
236 if doc is not None: | |
237 return doc | |
238 raise ValidationException("Expected non-null") | |
239 | |
240 | |
241 class _PrimitiveLoader(_Loader): | |
242 def __init__(self, tp): | |
243 # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None | |
244 self.tp = tp | |
245 | |
246 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
247 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
248 if not isinstance(doc, self.tp): | |
249 raise ValidationException( | |
250 "Expected a {} but got {}".format( | |
251 self.tp.__class__.__name__, doc.__class__.__name__ | |
252 ) | |
253 ) | |
254 return doc | |
255 | |
256 def __repr__(self): # type: () -> str | |
257 return str(self.tp) | |
258 | |
259 | |
260 class _ArrayLoader(_Loader): | |
261 def __init__(self, items): | |
262 # type: (_Loader) -> None | |
263 self.items = items | |
264 | |
265 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
266 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
267 if not isinstance(doc, MutableSequence): | |
268 raise ValidationException("Expected a list") | |
269 r = [] # type: List[Any] | |
270 errors = [] # type: List[SchemaSaladException] | |
271 for i in range(0, len(doc)): | |
272 try: | |
273 lf = load_field( | |
274 doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions | |
275 ) | |
276 if isinstance(lf, MutableSequence): | |
277 r.extend(lf) | |
278 else: | |
279 r.append(lf) | |
280 except ValidationException as e: | |
281 errors.append(e.with_sourceline(SourceLine(doc, i, str))) | |
282 if errors: | |
283 raise ValidationException("", None, errors) | |
284 return r | |
285 | |
286 def __repr__(self): # type: () -> str | |
287 return "array<{}>".format(self.items) | |
288 | |
289 | |
290 class _EnumLoader(_Loader): | |
291 def __init__(self, symbols): | |
292 # type: (Sequence[str]) -> None | |
293 self.symbols = symbols | |
294 | |
295 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
296 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
297 if doc in self.symbols: | |
298 return doc | |
299 else: | |
300 raise ValidationException("Expected one of {}".format(self.symbols)) | |
301 | |
302 | |
303 class _RecordLoader(_Loader): | |
304 def __init__(self, classtype): | |
305 # type: (Type[Savable]) -> None | |
306 self.classtype = classtype | |
307 | |
308 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
309 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
310 if not isinstance(doc, MutableMapping): | |
311 raise ValidationException("Expected a dict") | |
312 return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) | |
313 | |
314 def __repr__(self): # type: () -> str | |
315 return str(self.classtype) | |
316 | |
317 | |
318 class _UnionLoader(_Loader): | |
319 def __init__(self, alternates): | |
320 # type: (Sequence[_Loader]) -> None | |
321 self.alternates = alternates | |
322 | |
323 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
324 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
325 errors = [] | |
326 for t in self.alternates: | |
327 try: | |
328 return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) | |
329 except ValidationException as e: | |
330 errors.append( | |
331 ValidationException( | |
332 "tried {} but".format(t.__class__.__name__), None, [e] | |
333 ) | |
334 ) | |
335 raise ValidationException("", None, errors, "-") | |
336 | |
337 def __repr__(self): # type: () -> str | |
338 return " | ".join(str(a) for a in self.alternates) | |
339 | |
340 | |
341 class _URILoader(_Loader): | |
342 def __init__(self, inner, scoped_id, vocab_term, scoped_ref): | |
343 # type: (_Loader, bool, bool, Union[int, None]) -> None | |
344 self.inner = inner | |
345 self.scoped_id = scoped_id | |
346 self.vocab_term = vocab_term | |
347 self.scoped_ref = scoped_ref | |
348 | |
349 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
350 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
351 if isinstance(doc, MutableSequence): | |
352 doc = [ | |
353 expand_url( | |
354 i, | |
355 baseuri, | |
356 loadingOptions, | |
357 self.scoped_id, | |
358 self.vocab_term, | |
359 self.scoped_ref, | |
360 ) | |
361 for i in doc | |
362 ] | |
363 if isinstance(doc, str): | |
364 doc = expand_url( | |
365 doc, | |
366 baseuri, | |
367 loadingOptions, | |
368 self.scoped_id, | |
369 self.vocab_term, | |
370 self.scoped_ref, | |
371 ) | |
372 return self.inner.load(doc, baseuri, loadingOptions) | |
373 | |
374 | |
375 class _TypeDSLLoader(_Loader): | |
376 typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") | |
377 | |
378 def __init__(self, inner, refScope): | |
379 # type: (_Loader, Union[int, None]) -> None | |
380 self.inner = inner | |
381 self.refScope = refScope | |
382 | |
383 def resolve( | |
384 self, | |
385 doc, # type: str | |
386 baseuri, # type: str | |
387 loadingOptions, # type: LoadingOptions | |
388 ): | |
389 # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] | |
390 m = self.typeDSLregex.match(doc) | |
391 if m: | |
392 group1 = m.group(1) | |
393 assert group1 is not None | |
394 first = expand_url( | |
395 group1, baseuri, loadingOptions, False, True, self.refScope | |
396 ) | |
397 second = third = None | |
398 if bool(m.group(2)): | |
399 second = {"type": "array", "items": first} | |
400 # second = CommentedMap((("type", "array"), | |
401 # ("items", first))) | |
402 # second.lc.add_kv_line_col("type", lc) | |
403 # second.lc.add_kv_line_col("items", lc) | |
404 # second.lc.filename = filename | |
405 if bool(m.group(3)): | |
406 third = ["null", second or first] | |
407 # third = CommentedSeq(["null", second or first]) | |
408 # third.lc.add_kv_line_col(0, lc) | |
409 # third.lc.add_kv_line_col(1, lc) | |
410 # third.lc.filename = filename | |
411 return third or second or first | |
412 return doc | |
413 | |
414 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
415 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
416 if isinstance(doc, MutableSequence): | |
417 r = [] # type: List[Any] | |
418 for d in doc: | |
419 if isinstance(d, str): | |
420 resolved = self.resolve(d, baseuri, loadingOptions) | |
421 if isinstance(resolved, MutableSequence): | |
422 for i in resolved: | |
423 if i not in r: | |
424 r.append(i) | |
425 else: | |
426 if resolved not in r: | |
427 r.append(resolved) | |
428 else: | |
429 r.append(d) | |
430 doc = r | |
431 elif isinstance(doc, str): | |
432 doc = self.resolve(doc, baseuri, loadingOptions) | |
433 | |
434 return self.inner.load(doc, baseuri, loadingOptions) | |
435 | |
436 | |
437 class _IdMapLoader(_Loader): | |
438 def __init__(self, inner, mapSubject, mapPredicate): | |
439 # type: (_Loader, str, Union[str, None]) -> None | |
440 self.inner = inner | |
441 self.mapSubject = mapSubject | |
442 self.mapPredicate = mapPredicate | |
443 | |
444 def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
445 # type: (Any, str, LoadingOptions, Optional[str]) -> Any | |
446 if isinstance(doc, MutableMapping): | |
447 r = [] # type: List[Any] | |
448 for k in sorted(doc.keys()): | |
449 val = doc[k] | |
450 if isinstance(val, CommentedMap): | |
451 v = copy.copy(val) | |
452 v.lc.data = val.lc.data | |
453 v.lc.filename = val.lc.filename | |
454 v[self.mapSubject] = k | |
455 r.append(v) | |
456 elif isinstance(val, MutableMapping): | |
457 v2 = copy.copy(val) | |
458 v2[self.mapSubject] = k | |
459 r.append(v2) | |
460 else: | |
461 if self.mapPredicate: | |
462 v3 = {self.mapPredicate: val} | |
463 v3[self.mapSubject] = k | |
464 r.append(v3) | |
465 else: | |
466 raise ValidationException("No mapPredicate") | |
467 doc = r | |
468 return self.inner.load(doc, baseuri, loadingOptions) | |
469 | |
470 | |
471 def _document_load(loader, doc, baseuri, loadingOptions): | |
472 # type: (_Loader, Any, str, LoadingOptions) -> Any | |
473 if isinstance(doc, str): | |
474 return _document_load_by_url( | |
475 loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions | |
476 ) | |
477 | |
478 if isinstance(doc, MutableMapping): | |
479 if "$namespaces" in doc or "$schemas" in doc: | |
480 loadingOptions = LoadingOptions( | |
481 copyfrom=loadingOptions, | |
482 namespaces=doc.get("$namespaces", None), | |
483 schemas=doc.get("$schemas", None), | |
484 ) | |
485 doc = {k: v for k, v in doc.items() if k not in ["$namespaces", "$schemas"]} | |
486 | |
487 if "$base" in doc: | |
488 baseuri = doc["$base"] | |
489 | |
490 if "$graph" in doc: | |
491 return loader.load(doc["$graph"], baseuri, loadingOptions) | |
492 else: | |
493 return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri) | |
494 | |
495 if isinstance(doc, MutableSequence): | |
496 return loader.load(doc, baseuri, loadingOptions) | |
497 | |
498 raise ValidationException("Oops, we shouldn't be here!") | |
499 | |
500 | |
501 def _document_load_by_url(loader, url, loadingOptions): | |
502 # type: (_Loader, str, LoadingOptions) -> Any | |
503 if url in loadingOptions.idx: | |
504 return _document_load(loader, loadingOptions.idx[url], url, loadingOptions) | |
505 | |
506 text = loadingOptions.fetcher.fetch_text(url) | |
507 if isinstance(text, bytes): | |
508 textIO = StringIO(text.decode("utf-8")) | |
509 else: | |
510 textIO = StringIO(text) | |
511 textIO.name = str(url) | |
512 result = yaml.main.round_trip_load(textIO, preserve_quotes=True) | |
513 add_lc_filename(result, url) | |
514 | |
515 loadingOptions.idx[url] = result | |
516 | |
517 loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url) | |
518 | |
519 return _document_load(loader, result, url, loadingOptions) | |
520 | |
521 | |
522 def file_uri(path, split_frag=False): # type: (str, bool) -> str | |
523 if path.startswith("file://"): | |
524 return path | |
525 if split_frag: | |
526 pathsp = path.split("#", 2) | |
527 frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" | |
528 urlpath = pathname2url(str(pathsp[0])) | |
529 else: | |
530 urlpath = pathname2url(path) | |
531 frag = "" | |
532 if urlpath.startswith("//"): | |
533 return "file:{}{}".format(urlpath, frag) | |
534 else: | |
535 return "file://{}{}".format(urlpath, frag) | |
536 | |
537 | |
538 def prefix_url(url, namespaces): # type: (str, Dict[str, str]) -> str | |
539 for k, v in namespaces.items(): | |
540 if url.startswith(v): | |
541 return k + ":" + url[len(v) :] | |
542 return url | |
543 | |
544 | |
545 def save_relative_uri(uri, base_url, scoped_id, ref_scope, relative_uris): | |
546 # type: (str, str, bool, Optional[int], bool) -> Union[str, List[str]] | |
547 if not relative_uris or uri == base_url: | |
548 return uri | |
549 if isinstance(uri, MutableSequence): | |
550 return [ | |
551 save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) | |
552 for u in uri | |
553 ] | |
554 elif isinstance(uri, str): | |
555 urisplit = urlsplit(uri) | |
556 basesplit = urlsplit(base_url) | |
557 if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: | |
558 if urisplit.path != basesplit.path: | |
559 p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) | |
560 if urisplit.fragment: | |
561 p = p + "#" + urisplit.fragment | |
562 return p | |
563 | |
564 basefrag = basesplit.fragment + "/" | |
565 if ref_scope: | |
566 sp = basefrag.split("/") | |
567 i = 0 | |
568 while i < ref_scope: | |
569 sp.pop() | |
570 i += 1 | |
571 basefrag = "/".join(sp) | |
572 | |
573 if urisplit.fragment.startswith(basefrag): | |
574 return urisplit.fragment[len(basefrag) :] | |
575 else: | |
576 return urisplit.fragment | |
577 return uri | |
578 else: | |
579 return save(uri, top=False, base_url=base_url) | |
580 | |
581 | |
582 class Documented(Savable): | |
583 pass | |
584 | |
585 | |
586 class RecordField(Documented): | |
587 """ | |
588 A field of a record. | |
589 """ | |
590 def __init__( | |
591 self, | |
592 name, # type: Any | |
593 type, # type: Any | |
594 doc=None, # type: Any | |
595 extension_fields=None, # type: Optional[Dict[str, Any]] | |
596 loadingOptions=None # type: Optional[LoadingOptions] | |
597 ): # type: (...) -> None | |
598 | |
599 if extension_fields: | |
600 self.extension_fields = extension_fields | |
601 else: | |
602 self.extension_fields = yaml.comments.CommentedMap() | |
603 if loadingOptions: | |
604 self.loadingOptions = loadingOptions | |
605 else: | |
606 self.loadingOptions = LoadingOptions() | |
607 self.doc = doc | |
608 self.name = name | |
609 self.type = type | |
610 | |
611 @classmethod | |
612 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
613 # type: (Any, str, LoadingOptions, Optional[str]) -> RecordField | |
614 | |
615 _doc = copy.copy(doc) | |
616 if hasattr(doc, 'lc'): | |
617 _doc.lc.data = doc.lc.data | |
618 _doc.lc.filename = doc.lc.filename | |
619 _errors__ = [] | |
620 if 'name' in _doc: | |
621 try: | |
622 name = load_field(_doc.get( | |
623 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
624 except ValidationException as e: | |
625 _errors__.append( | |
626 ValidationException( | |
627 "the `name` field is not valid because:", | |
628 SourceLine(_doc, 'name', str), | |
629 [e] | |
630 ) | |
631 ) | |
632 else: | |
633 name = None | |
634 | |
635 if name is None: | |
636 if docRoot is not None: | |
637 name = docRoot | |
638 else: | |
639 raise ValidationException("Missing name") | |
640 baseuri = name | |
641 if 'doc' in _doc: | |
642 try: | |
643 doc = load_field(_doc.get( | |
644 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
645 except ValidationException as e: | |
646 _errors__.append( | |
647 ValidationException( | |
648 "the `doc` field is not valid because:", | |
649 SourceLine(_doc, 'doc', str), | |
650 [e] | |
651 ) | |
652 ) | |
653 else: | |
654 doc = None | |
655 try: | |
656 type = load_field(_doc.get( | |
657 'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
658 except ValidationException as e: | |
659 _errors__.append( | |
660 ValidationException( | |
661 "the `type` field is not valid because:", | |
662 SourceLine(_doc, 'type', str), | |
663 [e] | |
664 ) | |
665 ) | |
666 | |
667 extension_fields = yaml.comments.CommentedMap() | |
668 for k in _doc.keys(): | |
669 if k not in cls.attrs: | |
670 if ":" in k: | |
671 ex = expand_url(k, | |
672 "", | |
673 loadingOptions, | |
674 scoped_id=False, | |
675 vocab_term=False) | |
676 extension_fields[ex] = _doc[k] | |
677 else: | |
678 _errors__.append( | |
679 ValidationException( | |
680 "invalid field `%s`, expected one of: `doc`, `name`, `type`" % (k), | |
681 SourceLine(_doc, k, str) | |
682 ) | |
683 ) | |
684 break | |
685 | |
686 if _errors__: | |
687 raise ValidationException("Trying 'RecordField'", None, _errors__) | |
688 loadingOptions = copy.deepcopy(loadingOptions) | |
689 loadingOptions.original_doc = _doc | |
690 return cls(doc=doc, name=name, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
691 | |
692 def save(self, top=False, base_url="", relative_uris=True): | |
693 # type: (bool, str, bool) -> Dict[str, Any] | |
694 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
695 for ef in self.extension_fields: | |
696 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
697 | |
698 if self.name is not None: | |
699 u = save_relative_uri( | |
700 self.name, | |
701 base_url, | |
702 True, | |
703 None, | |
704 relative_uris) | |
705 if u: | |
706 r['name'] = u | |
707 | |
708 if self.doc is not None: | |
709 r['doc'] = save( | |
710 self.doc, | |
711 top=False, | |
712 base_url=self.name, | |
713 relative_uris=relative_uris) | |
714 | |
715 if self.type is not None: | |
716 r['type'] = save( | |
717 self.type, | |
718 top=False, | |
719 base_url=self.name, | |
720 relative_uris=relative_uris) | |
721 | |
722 # top refers to the directory level | |
723 if top: | |
724 if self.loadingOptions.namespaces: | |
725 r["$namespaces"] = self.loadingOptions.namespaces | |
726 if self.loadingOptions.schemas: | |
727 r["$schemas"] = self.loadingOptions.schemas | |
728 return r | |
729 | |
730 attrs = frozenset(['doc', 'name', 'type']) | |
731 | |
732 | |
733 class RecordSchema(Savable): | |
734 def __init__( | |
735 self, | |
736 type, # type: Any | |
737 fields=None, # type: Any | |
738 extension_fields=None, # type: Optional[Dict[str, Any]] | |
739 loadingOptions=None # type: Optional[LoadingOptions] | |
740 ): # type: (...) -> None | |
741 | |
742 if extension_fields: | |
743 self.extension_fields = extension_fields | |
744 else: | |
745 self.extension_fields = yaml.comments.CommentedMap() | |
746 if loadingOptions: | |
747 self.loadingOptions = loadingOptions | |
748 else: | |
749 self.loadingOptions = LoadingOptions() | |
750 self.fields = fields | |
751 self.type = type | |
752 | |
753 @classmethod | |
754 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
755 # type: (Any, str, LoadingOptions, Optional[str]) -> RecordSchema | |
756 | |
757 _doc = copy.copy(doc) | |
758 if hasattr(doc, 'lc'): | |
759 _doc.lc.data = doc.lc.data | |
760 _doc.lc.filename = doc.lc.filename | |
761 _errors__ = [] | |
762 if 'fields' in _doc: | |
763 try: | |
764 fields = load_field(_doc.get( | |
765 'fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions) | |
766 except ValidationException as e: | |
767 _errors__.append( | |
768 ValidationException( | |
769 "the `fields` field is not valid because:", | |
770 SourceLine(_doc, 'fields', str), | |
771 [e] | |
772 ) | |
773 ) | |
774 else: | |
775 fields = None | |
776 try: | |
777 type = load_field(_doc.get( | |
778 'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions) | |
779 except ValidationException as e: | |
780 _errors__.append( | |
781 ValidationException( | |
782 "the `type` field is not valid because:", | |
783 SourceLine(_doc, 'type', str), | |
784 [e] | |
785 ) | |
786 ) | |
787 | |
788 extension_fields = yaml.comments.CommentedMap() | |
789 for k in _doc.keys(): | |
790 if k not in cls.attrs: | |
791 if ":" in k: | |
792 ex = expand_url(k, | |
793 "", | |
794 loadingOptions, | |
795 scoped_id=False, | |
796 vocab_term=False) | |
797 extension_fields[ex] = _doc[k] | |
798 else: | |
799 _errors__.append( | |
800 ValidationException( | |
801 "invalid field `%s`, expected one of: `fields`, `type`" % (k), | |
802 SourceLine(_doc, k, str) | |
803 ) | |
804 ) | |
805 break | |
806 | |
807 if _errors__: | |
808 raise ValidationException("Trying 'RecordSchema'", None, _errors__) | |
809 loadingOptions = copy.deepcopy(loadingOptions) | |
810 loadingOptions.original_doc = _doc | |
811 return cls(fields=fields, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
812 | |
813 def save(self, top=False, base_url="", relative_uris=True): | |
814 # type: (bool, str, bool) -> Dict[str, Any] | |
815 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
816 for ef in self.extension_fields: | |
817 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
818 | |
819 if self.fields is not None: | |
820 r['fields'] = save( | |
821 self.fields, | |
822 top=False, | |
823 base_url=base_url, | |
824 relative_uris=relative_uris) | |
825 | |
826 if self.type is not None: | |
827 r['type'] = save( | |
828 self.type, | |
829 top=False, | |
830 base_url=base_url, | |
831 relative_uris=relative_uris) | |
832 | |
833 # top refers to the directory level | |
834 if top: | |
835 if self.loadingOptions.namespaces: | |
836 r["$namespaces"] = self.loadingOptions.namespaces | |
837 if self.loadingOptions.schemas: | |
838 r["$schemas"] = self.loadingOptions.schemas | |
839 return r | |
840 | |
841 attrs = frozenset(['fields', 'type']) | |
842 | |
843 | |
844 class EnumSchema(Savable): | |
845 """ | |
846 Define an enumerated type. | |
847 | |
848 """ | |
849 def __init__( | |
850 self, | |
851 symbols, # type: Any | |
852 type, # type: Any | |
853 extension_fields=None, # type: Optional[Dict[str, Any]] | |
854 loadingOptions=None # type: Optional[LoadingOptions] | |
855 ): # type: (...) -> None | |
856 | |
857 if extension_fields: | |
858 self.extension_fields = extension_fields | |
859 else: | |
860 self.extension_fields = yaml.comments.CommentedMap() | |
861 if loadingOptions: | |
862 self.loadingOptions = loadingOptions | |
863 else: | |
864 self.loadingOptions = LoadingOptions() | |
865 self.symbols = symbols | |
866 self.type = type | |
867 | |
868 @classmethod | |
869 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
870 # type: (Any, str, LoadingOptions, Optional[str]) -> EnumSchema | |
871 | |
872 _doc = copy.copy(doc) | |
873 if hasattr(doc, 'lc'): | |
874 _doc.lc.data = doc.lc.data | |
875 _doc.lc.filename = doc.lc.filename | |
876 _errors__ = [] | |
877 try: | |
878 symbols = load_field(_doc.get( | |
879 'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) | |
880 except ValidationException as e: | |
881 _errors__.append( | |
882 ValidationException( | |
883 "the `symbols` field is not valid because:", | |
884 SourceLine(_doc, 'symbols', str), | |
885 [e] | |
886 ) | |
887 ) | |
888 try: | |
889 type = load_field(_doc.get( | |
890 'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions) | |
891 except ValidationException as e: | |
892 _errors__.append( | |
893 ValidationException( | |
894 "the `type` field is not valid because:", | |
895 SourceLine(_doc, 'type', str), | |
896 [e] | |
897 ) | |
898 ) | |
899 | |
900 extension_fields = yaml.comments.CommentedMap() | |
901 for k in _doc.keys(): | |
902 if k not in cls.attrs: | |
903 if ":" in k: | |
904 ex = expand_url(k, | |
905 "", | |
906 loadingOptions, | |
907 scoped_id=False, | |
908 vocab_term=False) | |
909 extension_fields[ex] = _doc[k] | |
910 else: | |
911 _errors__.append( | |
912 ValidationException( | |
913 "invalid field `%s`, expected one of: `symbols`, `type`" % (k), | |
914 SourceLine(_doc, k, str) | |
915 ) | |
916 ) | |
917 break | |
918 | |
919 if _errors__: | |
920 raise ValidationException("Trying 'EnumSchema'", None, _errors__) | |
921 loadingOptions = copy.deepcopy(loadingOptions) | |
922 loadingOptions.original_doc = _doc | |
923 return cls(symbols=symbols, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
924 | |
925 def save(self, top=False, base_url="", relative_uris=True): | |
926 # type: (bool, str, bool) -> Dict[str, Any] | |
927 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
928 for ef in self.extension_fields: | |
929 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
930 | |
931 if self.symbols is not None: | |
932 u = save_relative_uri( | |
933 self.symbols, | |
934 base_url, | |
935 True, | |
936 None, | |
937 relative_uris) | |
938 if u: | |
939 r['symbols'] = u | |
940 | |
941 if self.type is not None: | |
942 r['type'] = save( | |
943 self.type, | |
944 top=False, | |
945 base_url=base_url, | |
946 relative_uris=relative_uris) | |
947 | |
948 # top refers to the directory level | |
949 if top: | |
950 if self.loadingOptions.namespaces: | |
951 r["$namespaces"] = self.loadingOptions.namespaces | |
952 if self.loadingOptions.schemas: | |
953 r["$schemas"] = self.loadingOptions.schemas | |
954 return r | |
955 | |
956 attrs = frozenset(['symbols', 'type']) | |
957 | |
958 | |
959 class ArraySchema(Savable): | |
960 def __init__( | |
961 self, | |
962 items, # type: Any | |
963 type, # type: Any | |
964 extension_fields=None, # type: Optional[Dict[str, Any]] | |
965 loadingOptions=None # type: Optional[LoadingOptions] | |
966 ): # type: (...) -> None | |
967 | |
968 if extension_fields: | |
969 self.extension_fields = extension_fields | |
970 else: | |
971 self.extension_fields = yaml.comments.CommentedMap() | |
972 if loadingOptions: | |
973 self.loadingOptions = loadingOptions | |
974 else: | |
975 self.loadingOptions = LoadingOptions() | |
976 self.items = items | |
977 self.type = type | |
978 | |
979 @classmethod | |
980 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
981 # type: (Any, str, LoadingOptions, Optional[str]) -> ArraySchema | |
982 | |
983 _doc = copy.copy(doc) | |
984 if hasattr(doc, 'lc'): | |
985 _doc.lc.data = doc.lc.data | |
986 _doc.lc.filename = doc.lc.filename | |
987 _errors__ = [] | |
988 try: | |
989 items = load_field(_doc.get( | |
990 'items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) | |
991 except ValidationException as e: | |
992 _errors__.append( | |
993 ValidationException( | |
994 "the `items` field is not valid because:", | |
995 SourceLine(_doc, 'items', str), | |
996 [e] | |
997 ) | |
998 ) | |
999 try: | |
1000 type = load_field(_doc.get( | |
1001 'type'), typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, baseuri, loadingOptions) | |
1002 except ValidationException as e: | |
1003 _errors__.append( | |
1004 ValidationException( | |
1005 "the `type` field is not valid because:", | |
1006 SourceLine(_doc, 'type', str), | |
1007 [e] | |
1008 ) | |
1009 ) | |
1010 | |
1011 extension_fields = yaml.comments.CommentedMap() | |
1012 for k in _doc.keys(): | |
1013 if k not in cls.attrs: | |
1014 if ":" in k: | |
1015 ex = expand_url(k, | |
1016 "", | |
1017 loadingOptions, | |
1018 scoped_id=False, | |
1019 vocab_term=False) | |
1020 extension_fields[ex] = _doc[k] | |
1021 else: | |
1022 _errors__.append( | |
1023 ValidationException( | |
1024 "invalid field `%s`, expected one of: `items`, `type`" % (k), | |
1025 SourceLine(_doc, k, str) | |
1026 ) | |
1027 ) | |
1028 break | |
1029 | |
1030 if _errors__: | |
1031 raise ValidationException("Trying 'ArraySchema'", None, _errors__) | |
1032 loadingOptions = copy.deepcopy(loadingOptions) | |
1033 loadingOptions.original_doc = _doc | |
1034 return cls(items=items, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
1035 | |
1036 def save(self, top=False, base_url="", relative_uris=True): | |
1037 # type: (bool, str, bool) -> Dict[str, Any] | |
1038 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
1039 for ef in self.extension_fields: | |
1040 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
1041 | |
1042 if self.items is not None: | |
1043 u = save_relative_uri( | |
1044 self.items, | |
1045 base_url, | |
1046 False, | |
1047 2, | |
1048 relative_uris) | |
1049 if u: | |
1050 r['items'] = u | |
1051 | |
1052 if self.type is not None: | |
1053 r['type'] = save( | |
1054 self.type, | |
1055 top=False, | |
1056 base_url=base_url, | |
1057 relative_uris=relative_uris) | |
1058 | |
1059 # top refers to the directory level | |
1060 if top: | |
1061 if self.loadingOptions.namespaces: | |
1062 r["$namespaces"] = self.loadingOptions.namespaces | |
1063 if self.loadingOptions.schemas: | |
1064 r["$schemas"] = self.loadingOptions.schemas | |
1065 return r | |
1066 | |
1067 attrs = frozenset(['items', 'type']) | |
1068 | |
1069 | |
1070 class Labeled(Savable): | |
1071 pass | |
1072 | |
1073 | |
1074 class Identified(Savable): | |
1075 pass | |
1076 | |
1077 | |
1078 class Parameter(Documented, Identified): | |
1079 """ | |
1080 Define an input or output parameter to a process. | |
1081 | |
1082 """ | |
1083 pass | |
1084 | |
1085 | |
1086 class InputParameter(Parameter): | |
1087 pass | |
1088 | |
1089 | |
1090 class OutputParameter(Parameter): | |
1091 pass | |
1092 | |
1093 | |
1094 class Process(Identified, Labeled, Documented): | |
1095 """ | |
1096 | |
1097 The base executable type in CWL is the `Process` object defined by the | |
1098 document. Note that the `Process` object is abstract and cannot be | |
1099 directly executed. | |
1100 | |
1101 """ | |
1102 pass | |
1103 | |
1104 | |
1105 class HasUUID(Savable): | |
1106 pass | |
1107 | |
1108 | |
1109 class HasStepErrors(Savable): | |
1110 pass | |
1111 | |
1112 | |
1113 class HasStepPosition(Savable): | |
1114 pass | |
1115 | |
1116 | |
1117 class StepPosition(Savable): | |
1118 """ | |
1119 This field specifies the location of the step's node when rendered in the workflow editor. | |
1120 """ | |
1121 def __init__( | |
1122 self, | |
1123 top, # type: Any | |
1124 left, # type: Any | |
1125 extension_fields=None, # type: Optional[Dict[str, Any]] | |
1126 loadingOptions=None # type: Optional[LoadingOptions] | |
1127 ): # type: (...) -> None | |
1128 | |
1129 if extension_fields: | |
1130 self.extension_fields = extension_fields | |
1131 else: | |
1132 self.extension_fields = yaml.comments.CommentedMap() | |
1133 if loadingOptions: | |
1134 self.loadingOptions = loadingOptions | |
1135 else: | |
1136 self.loadingOptions = LoadingOptions() | |
1137 self.top = top | |
1138 self.left = left | |
1139 | |
1140 @classmethod | |
1141 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
1142 # type: (Any, str, LoadingOptions, Optional[str]) -> StepPosition | |
1143 | |
1144 _doc = copy.copy(doc) | |
1145 if hasattr(doc, 'lc'): | |
1146 _doc.lc.data = doc.lc.data | |
1147 _doc.lc.filename = doc.lc.filename | |
1148 _errors__ = [] | |
1149 try: | |
1150 top = load_field(_doc.get( | |
1151 'top'), union_of_floattype_or_inttype, baseuri, loadingOptions) | |
1152 except ValidationException as e: | |
1153 _errors__.append( | |
1154 ValidationException( | |
1155 "the `top` field is not valid because:", | |
1156 SourceLine(_doc, 'top', str), | |
1157 [e] | |
1158 ) | |
1159 ) | |
1160 try: | |
1161 left = load_field(_doc.get( | |
1162 'left'), union_of_floattype_or_inttype, baseuri, loadingOptions) | |
1163 except ValidationException as e: | |
1164 _errors__.append( | |
1165 ValidationException( | |
1166 "the `left` field is not valid because:", | |
1167 SourceLine(_doc, 'left', str), | |
1168 [e] | |
1169 ) | |
1170 ) | |
1171 | |
1172 extension_fields = yaml.comments.CommentedMap() | |
1173 for k in _doc.keys(): | |
1174 if k not in cls.attrs: | |
1175 if ":" in k: | |
1176 ex = expand_url(k, | |
1177 "", | |
1178 loadingOptions, | |
1179 scoped_id=False, | |
1180 vocab_term=False) | |
1181 extension_fields[ex] = _doc[k] | |
1182 else: | |
1183 _errors__.append( | |
1184 ValidationException( | |
1185 "invalid field `%s`, expected one of: `top`, `left`" % (k), | |
1186 SourceLine(_doc, k, str) | |
1187 ) | |
1188 ) | |
1189 break | |
1190 | |
1191 if _errors__: | |
1192 raise ValidationException("Trying 'StepPosition'", None, _errors__) | |
1193 loadingOptions = copy.deepcopy(loadingOptions) | |
1194 loadingOptions.original_doc = _doc | |
1195 return cls(top=top, left=left, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
1196 | |
1197 def save(self, top=False, base_url="", relative_uris=True): | |
1198 # type: (bool, str, bool) -> Dict[str, Any] | |
1199 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
1200 for ef in self.extension_fields: | |
1201 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
1202 | |
1203 if self.top is not None: | |
1204 r['top'] = save( | |
1205 self.top, | |
1206 top=False, | |
1207 base_url=base_url, | |
1208 relative_uris=relative_uris) | |
1209 | |
1210 if self.left is not None: | |
1211 r['left'] = save( | |
1212 self.left, | |
1213 top=False, | |
1214 base_url=base_url, | |
1215 relative_uris=relative_uris) | |
1216 | |
1217 # top refers to the directory level | |
1218 if top: | |
1219 if self.loadingOptions.namespaces: | |
1220 r["$namespaces"] = self.loadingOptions.namespaces | |
1221 if self.loadingOptions.schemas: | |
1222 r["$schemas"] = self.loadingOptions.schemas | |
1223 return r | |
1224 | |
1225 attrs = frozenset(['top', 'left']) | |
1226 | |
1227 | |
1228 class ReferencesTool(Savable): | |
1229 pass | |
1230 | |
1231 | |
1232 class ToolShedRepository(Savable): | |
1233 def __init__( | |
1234 self, | |
1235 changeset_revision, # type: Any | |
1236 name, # type: Any | |
1237 owner, # type: Any | |
1238 tool_shed, # type: Any | |
1239 extension_fields=None, # type: Optional[Dict[str, Any]] | |
1240 loadingOptions=None # type: Optional[LoadingOptions] | |
1241 ): # type: (...) -> None | |
1242 | |
1243 if extension_fields: | |
1244 self.extension_fields = extension_fields | |
1245 else: | |
1246 self.extension_fields = yaml.comments.CommentedMap() | |
1247 if loadingOptions: | |
1248 self.loadingOptions = loadingOptions | |
1249 else: | |
1250 self.loadingOptions = LoadingOptions() | |
1251 self.changeset_revision = changeset_revision | |
1252 self.name = name | |
1253 self.owner = owner | |
1254 self.tool_shed = tool_shed | |
1255 | |
1256 @classmethod | |
1257 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
1258 # type: (Any, str, LoadingOptions, Optional[str]) -> ToolShedRepository | |
1259 | |
1260 _doc = copy.copy(doc) | |
1261 if hasattr(doc, 'lc'): | |
1262 _doc.lc.data = doc.lc.data | |
1263 _doc.lc.filename = doc.lc.filename | |
1264 _errors__ = [] | |
1265 if 'name' in _doc: | |
1266 try: | |
1267 name = load_field(_doc.get( | |
1268 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
1269 except ValidationException as e: | |
1270 _errors__.append( | |
1271 ValidationException( | |
1272 "the `name` field is not valid because:", | |
1273 SourceLine(_doc, 'name', str), | |
1274 [e] | |
1275 ) | |
1276 ) | |
1277 else: | |
1278 name = None | |
1279 | |
1280 if name is None: | |
1281 if docRoot is not None: | |
1282 name = docRoot | |
1283 else: | |
1284 raise ValidationException("Missing name") | |
1285 baseuri = name | |
1286 try: | |
1287 changeset_revision = load_field(_doc.get( | |
1288 'changeset_revision'), strtype, baseuri, loadingOptions) | |
1289 except ValidationException as e: | |
1290 _errors__.append( | |
1291 ValidationException( | |
1292 "the `changeset_revision` field is not valid because:", | |
1293 SourceLine(_doc, 'changeset_revision', str), | |
1294 [e] | |
1295 ) | |
1296 ) | |
1297 try: | |
1298 owner = load_field(_doc.get( | |
1299 'owner'), strtype, baseuri, loadingOptions) | |
1300 except ValidationException as e: | |
1301 _errors__.append( | |
1302 ValidationException( | |
1303 "the `owner` field is not valid because:", | |
1304 SourceLine(_doc, 'owner', str), | |
1305 [e] | |
1306 ) | |
1307 ) | |
1308 try: | |
1309 tool_shed = load_field(_doc.get( | |
1310 'tool_shed'), strtype, baseuri, loadingOptions) | |
1311 except ValidationException as e: | |
1312 _errors__.append( | |
1313 ValidationException( | |
1314 "the `tool_shed` field is not valid because:", | |
1315 SourceLine(_doc, 'tool_shed', str), | |
1316 [e] | |
1317 ) | |
1318 ) | |
1319 | |
1320 extension_fields = yaml.comments.CommentedMap() | |
1321 for k in _doc.keys(): | |
1322 if k not in cls.attrs: | |
1323 if ":" in k: | |
1324 ex = expand_url(k, | |
1325 "", | |
1326 loadingOptions, | |
1327 scoped_id=False, | |
1328 vocab_term=False) | |
1329 extension_fields[ex] = _doc[k] | |
1330 else: | |
1331 _errors__.append( | |
1332 ValidationException( | |
1333 "invalid field `%s`, expected one of: `changeset_revision`, `name`, `owner`, `tool_shed`" % (k), | |
1334 SourceLine(_doc, k, str) | |
1335 ) | |
1336 ) | |
1337 break | |
1338 | |
1339 if _errors__: | |
1340 raise ValidationException("Trying 'ToolShedRepository'", None, _errors__) | |
1341 loadingOptions = copy.deepcopy(loadingOptions) | |
1342 loadingOptions.original_doc = _doc | |
1343 return cls(changeset_revision=changeset_revision, name=name, owner=owner, tool_shed=tool_shed, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
1344 | |
1345 def save(self, top=False, base_url="", relative_uris=True): | |
1346 # type: (bool, str, bool) -> Dict[str, Any] | |
1347 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
1348 for ef in self.extension_fields: | |
1349 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
1350 | |
1351 if self.name is not None: | |
1352 u = save_relative_uri( | |
1353 self.name, | |
1354 base_url, | |
1355 True, | |
1356 None, | |
1357 relative_uris) | |
1358 if u: | |
1359 r['name'] = u | |
1360 | |
1361 if self.changeset_revision is not None: | |
1362 r['changeset_revision'] = save( | |
1363 self.changeset_revision, | |
1364 top=False, | |
1365 base_url=self.name, | |
1366 relative_uris=relative_uris) | |
1367 | |
1368 if self.owner is not None: | |
1369 r['owner'] = save( | |
1370 self.owner, | |
1371 top=False, | |
1372 base_url=self.name, | |
1373 relative_uris=relative_uris) | |
1374 | |
1375 if self.tool_shed is not None: | |
1376 r['tool_shed'] = save( | |
1377 self.tool_shed, | |
1378 top=False, | |
1379 base_url=self.name, | |
1380 relative_uris=relative_uris) | |
1381 | |
1382 # top refers to the directory level | |
1383 if top: | |
1384 if self.loadingOptions.namespaces: | |
1385 r["$namespaces"] = self.loadingOptions.namespaces | |
1386 if self.loadingOptions.schemas: | |
1387 r["$schemas"] = self.loadingOptions.schemas | |
1388 return r | |
1389 | |
1390 attrs = frozenset(['changeset_revision', 'name', 'owner', 'tool_shed']) | |
1391 | |
1392 | |
1393 class WorkflowInputParameter(InputParameter, HasStepPosition): | |
1394 def __init__( | |
1395 self, | |
1396 type, # type: Any | |
1397 doc=None, # type: Any | |
1398 id=None, # type: Any | |
1399 default=None, # type: Any | |
1400 position=None, # type: Any | |
1401 extension_fields=None, # type: Optional[Dict[str, Any]] | |
1402 loadingOptions=None # type: Optional[LoadingOptions] | |
1403 ): # type: (...) -> None | |
1404 | |
1405 if extension_fields: | |
1406 self.extension_fields = extension_fields | |
1407 else: | |
1408 self.extension_fields = yaml.comments.CommentedMap() | |
1409 if loadingOptions: | |
1410 self.loadingOptions = loadingOptions | |
1411 else: | |
1412 self.loadingOptions = LoadingOptions() | |
1413 self.doc = doc | |
1414 self.id = id | |
1415 self.default = default | |
1416 self.position = position | |
1417 self.type = type | |
1418 | |
1419 @classmethod | |
1420 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
1421 # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowInputParameter | |
1422 | |
1423 _doc = copy.copy(doc) | |
1424 if hasattr(doc, 'lc'): | |
1425 _doc.lc.data = doc.lc.data | |
1426 _doc.lc.filename = doc.lc.filename | |
1427 _errors__ = [] | |
1428 if 'id' in _doc: | |
1429 try: | |
1430 id = load_field(_doc.get( | |
1431 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
1432 except ValidationException as e: | |
1433 _errors__.append( | |
1434 ValidationException( | |
1435 "the `id` field is not valid because:", | |
1436 SourceLine(_doc, 'id', str), | |
1437 [e] | |
1438 ) | |
1439 ) | |
1440 else: | |
1441 id = None | |
1442 | |
1443 if id is None: | |
1444 if docRoot is not None: | |
1445 id = docRoot | |
1446 else: | |
1447 id = "_:" + str(_uuid__.uuid4()) | |
1448 baseuri = id | |
1449 if 'doc' in _doc: | |
1450 try: | |
1451 doc = load_field(_doc.get( | |
1452 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
1453 except ValidationException as e: | |
1454 _errors__.append( | |
1455 ValidationException( | |
1456 "the `doc` field is not valid because:", | |
1457 SourceLine(_doc, 'doc', str), | |
1458 [e] | |
1459 ) | |
1460 ) | |
1461 else: | |
1462 doc = None | |
1463 if 'default' in _doc: | |
1464 try: | |
1465 default = load_field(_doc.get( | |
1466 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
1467 except ValidationException as e: | |
1468 _errors__.append( | |
1469 ValidationException( | |
1470 "the `default` field is not valid because:", | |
1471 SourceLine(_doc, 'default', str), | |
1472 [e] | |
1473 ) | |
1474 ) | |
1475 else: | |
1476 default = None | |
1477 if 'position' in _doc: | |
1478 try: | |
1479 position = load_field(_doc.get( | |
1480 'position'), union_of_None_type_or_StepPositionLoader, baseuri, loadingOptions) | |
1481 except ValidationException as e: | |
1482 _errors__.append( | |
1483 ValidationException( | |
1484 "the `position` field is not valid because:", | |
1485 SourceLine(_doc, 'position', str), | |
1486 [e] | |
1487 ) | |
1488 ) | |
1489 else: | |
1490 position = None | |
1491 if 'type' in _doc: | |
1492 try: | |
1493 type = load_field(_doc.get( | |
1494 'type'), typedsl_union_of_GalaxyTypeLoader_or_strtype_or_None_type_2, baseuri, loadingOptions) | |
1495 except ValidationException as e: | |
1496 _errors__.append( | |
1497 ValidationException( | |
1498 "the `type` field is not valid because:", | |
1499 SourceLine(_doc, 'type', str), | |
1500 [e] | |
1501 ) | |
1502 ) | |
1503 else: | |
1504 type = None | |
1505 | |
1506 extension_fields = yaml.comments.CommentedMap() | |
1507 for k in _doc.keys(): | |
1508 if k not in cls.attrs: | |
1509 if ":" in k: | |
1510 ex = expand_url(k, | |
1511 "", | |
1512 loadingOptions, | |
1513 scoped_id=False, | |
1514 vocab_term=False) | |
1515 extension_fields[ex] = _doc[k] | |
1516 else: | |
1517 _errors__.append( | |
1518 ValidationException( | |
1519 "invalid field `%s`, expected one of: `doc`, `id`, `default`, `position`, `type`" % (k), | |
1520 SourceLine(_doc, k, str) | |
1521 ) | |
1522 ) | |
1523 break | |
1524 | |
1525 if _errors__: | |
1526 raise ValidationException("Trying 'WorkflowInputParameter'", None, _errors__) | |
1527 loadingOptions = copy.deepcopy(loadingOptions) | |
1528 loadingOptions.original_doc = _doc | |
1529 return cls(doc=doc, id=id, default=default, position=position, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
1530 | |
1531 def save(self, top=False, base_url="", relative_uris=True): | |
1532 # type: (bool, str, bool) -> Dict[str, Any] | |
1533 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
1534 for ef in self.extension_fields: | |
1535 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
1536 | |
1537 if self.id is not None: | |
1538 u = save_relative_uri( | |
1539 self.id, | |
1540 base_url, | |
1541 True, | |
1542 None, | |
1543 relative_uris) | |
1544 if u: | |
1545 r['id'] = u | |
1546 | |
1547 if self.doc is not None: | |
1548 r['doc'] = save( | |
1549 self.doc, | |
1550 top=False, | |
1551 base_url=self.id, | |
1552 relative_uris=relative_uris) | |
1553 | |
1554 if self.default is not None: | |
1555 r['default'] = save( | |
1556 self.default, | |
1557 top=False, | |
1558 base_url=self.id, | |
1559 relative_uris=relative_uris) | |
1560 | |
1561 if self.position is not None: | |
1562 r['position'] = save( | |
1563 self.position, | |
1564 top=False, | |
1565 base_url=self.id, | |
1566 relative_uris=relative_uris) | |
1567 | |
1568 if self.type is not None: | |
1569 r['type'] = save( | |
1570 self.type, | |
1571 top=False, | |
1572 base_url=self.id, | |
1573 relative_uris=relative_uris) | |
1574 | |
1575 # top refers to the directory level | |
1576 if top: | |
1577 if self.loadingOptions.namespaces: | |
1578 r["$namespaces"] = self.loadingOptions.namespaces | |
1579 if self.loadingOptions.schemas: | |
1580 r["$schemas"] = self.loadingOptions.schemas | |
1581 return r | |
1582 | |
1583 attrs = frozenset(['doc', 'id', 'default', 'position', 'type']) | |
1584 | |
1585 | |
1586 class WorkflowOutputParameter(OutputParameter): | |
1587 """ | |
1588 Describe an output parameter of a workflow. The parameter must be | |
1589 connected to one parameter defined in the workflow that | |
1590 will provide the value of the output parameter. It is legal to | |
1591 connect a WorkflowInputParameter to a WorkflowOutputParameter. | |
1592 | |
1593 """ | |
1594 def __init__( | |
1595 self, | |
1596 doc=None, # type: Any | |
1597 id=None, # type: Any | |
1598 outputSource=None, # type: Any | |
1599 type=None, # type: Any | |
1600 extension_fields=None, # type: Optional[Dict[str, Any]] | |
1601 loadingOptions=None # type: Optional[LoadingOptions] | |
1602 ): # type: (...) -> None | |
1603 | |
1604 if extension_fields: | |
1605 self.extension_fields = extension_fields | |
1606 else: | |
1607 self.extension_fields = yaml.comments.CommentedMap() | |
1608 if loadingOptions: | |
1609 self.loadingOptions = loadingOptions | |
1610 else: | |
1611 self.loadingOptions = LoadingOptions() | |
1612 self.doc = doc | |
1613 self.id = id | |
1614 self.outputSource = outputSource | |
1615 self.type = type | |
1616 | |
1617 @classmethod | |
1618 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
1619 # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowOutputParameter | |
1620 | |
1621 _doc = copy.copy(doc) | |
1622 if hasattr(doc, 'lc'): | |
1623 _doc.lc.data = doc.lc.data | |
1624 _doc.lc.filename = doc.lc.filename | |
1625 _errors__ = [] | |
1626 if 'id' in _doc: | |
1627 try: | |
1628 id = load_field(_doc.get( | |
1629 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
1630 except ValidationException as e: | |
1631 _errors__.append( | |
1632 ValidationException( | |
1633 "the `id` field is not valid because:", | |
1634 SourceLine(_doc, 'id', str), | |
1635 [e] | |
1636 ) | |
1637 ) | |
1638 else: | |
1639 id = None | |
1640 | |
1641 if id is None: | |
1642 if docRoot is not None: | |
1643 id = docRoot | |
1644 else: | |
1645 id = "_:" + str(_uuid__.uuid4()) | |
1646 baseuri = id | |
1647 if 'doc' in _doc: | |
1648 try: | |
1649 doc = load_field(_doc.get( | |
1650 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
1651 except ValidationException as e: | |
1652 _errors__.append( | |
1653 ValidationException( | |
1654 "the `doc` field is not valid because:", | |
1655 SourceLine(_doc, 'doc', str), | |
1656 [e] | |
1657 ) | |
1658 ) | |
1659 else: | |
1660 doc = None | |
1661 if 'outputSource' in _doc: | |
1662 try: | |
1663 outputSource = load_field(_doc.get( | |
1664 'outputSource'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
1665 except ValidationException as e: | |
1666 _errors__.append( | |
1667 ValidationException( | |
1668 "the `outputSource` field is not valid because:", | |
1669 SourceLine(_doc, 'outputSource', str), | |
1670 [e] | |
1671 ) | |
1672 ) | |
1673 else: | |
1674 outputSource = None | |
1675 if 'type' in _doc: | |
1676 try: | |
1677 type = load_field(_doc.get( | |
1678 'type'), typedsl_union_of_None_type_or_GalaxyTypeLoader_2, baseuri, loadingOptions) | |
1679 except ValidationException as e: | |
1680 _errors__.append( | |
1681 ValidationException( | |
1682 "the `type` field is not valid because:", | |
1683 SourceLine(_doc, 'type', str), | |
1684 [e] | |
1685 ) | |
1686 ) | |
1687 else: | |
1688 type = None | |
1689 | |
1690 extension_fields = yaml.comments.CommentedMap() | |
1691 for k in _doc.keys(): | |
1692 if k not in cls.attrs: | |
1693 if ":" in k: | |
1694 ex = expand_url(k, | |
1695 "", | |
1696 loadingOptions, | |
1697 scoped_id=False, | |
1698 vocab_term=False) | |
1699 extension_fields[ex] = _doc[k] | |
1700 else: | |
1701 _errors__.append( | |
1702 ValidationException( | |
1703 "invalid field `%s`, expected one of: `doc`, `id`, `outputSource`, `type`" % (k), | |
1704 SourceLine(_doc, k, str) | |
1705 ) | |
1706 ) | |
1707 break | |
1708 | |
1709 if _errors__: | |
1710 raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) | |
1711 loadingOptions = copy.deepcopy(loadingOptions) | |
1712 loadingOptions.original_doc = _doc | |
1713 return cls(doc=doc, id=id, outputSource=outputSource, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
1714 | |
1715 def save(self, top=False, base_url="", relative_uris=True): | |
1716 # type: (bool, str, bool) -> Dict[str, Any] | |
1717 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
1718 for ef in self.extension_fields: | |
1719 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
1720 | |
1721 if self.id is not None: | |
1722 u = save_relative_uri( | |
1723 self.id, | |
1724 base_url, | |
1725 True, | |
1726 None, | |
1727 relative_uris) | |
1728 if u: | |
1729 r['id'] = u | |
1730 | |
1731 if self.doc is not None: | |
1732 r['doc'] = save( | |
1733 self.doc, | |
1734 top=False, | |
1735 base_url=self.id, | |
1736 relative_uris=relative_uris) | |
1737 | |
1738 if self.outputSource is not None: | |
1739 r['outputSource'] = save( | |
1740 self.outputSource, | |
1741 top=False, | |
1742 base_url=self.id, | |
1743 relative_uris=relative_uris) | |
1744 | |
1745 if self.type is not None: | |
1746 r['type'] = save( | |
1747 self.type, | |
1748 top=False, | |
1749 base_url=self.id, | |
1750 relative_uris=relative_uris) | |
1751 | |
1752 # top refers to the directory level | |
1753 if top: | |
1754 if self.loadingOptions.namespaces: | |
1755 r["$namespaces"] = self.loadingOptions.namespaces | |
1756 if self.loadingOptions.schemas: | |
1757 r["$schemas"] = self.loadingOptions.schemas | |
1758 return r | |
1759 | |
1760 attrs = frozenset(['doc', 'id', 'outputSource', 'type']) | |
1761 | |
1762 | |
1763 class WorkflowStep(Identified, Labeled, Documented, HasStepPosition, ReferencesTool, HasStepErrors, HasUUID): | |
1764 """ | |
1765 This represents a non-input step a Galaxy Workflow. | |
1766 | |
1767 # A note about `state` and `tool_state` fields. | |
1768 | |
1769 Only one or the other should be specified. These are two ways to represent the "state" | |
1770 of a tool at this workflow step. Both are essentially maps from parameter names to | |
1771 parameter values. | |
1772 | |
1773 `tool_state` is much more low-level and expects a flat dictionary with each value a JSON | |
1774 dump. Nested tool structures such as conditionals and repeats should have all their values | |
1775 in the JSON dumped string. In general `tool_state` may be present in workflows exported from | |
1776 Galaxy but shouldn't be written by humans. | |
1777 | |
1778 `state` can contained a typed map. Repeat values can be represented as YAML arrays. An alternative | |
1779 to representing `state` this way is defining inputs with default values. | |
1780 | |
1781 """ | |
1782 def __init__( | |
1783 self, | |
1784 out, # type: Any | |
1785 id=None, # type: Any | |
1786 label=None, # type: Any | |
1787 doc=None, # type: Any | |
1788 position=None, # type: Any | |
1789 tool_id=None, # type: Any | |
1790 tool_shed_repository=None, # type: Any | |
1791 tool_version=None, # type: Any | |
1792 errors=None, # type: Any | |
1793 uuid=None, # type: Any | |
1794 in_=None, # type: Any | |
1795 state=None, # type: Any | |
1796 tool_state=None, # type: Any | |
1797 type=None, # type: Any | |
1798 run=None, # type: Any | |
1799 runtime_inputs=None, # type: Any | |
1800 extension_fields=None, # type: Optional[Dict[str, Any]] | |
1801 loadingOptions=None # type: Optional[LoadingOptions] | |
1802 ): # type: (...) -> None | |
1803 | |
1804 if extension_fields: | |
1805 self.extension_fields = extension_fields | |
1806 else: | |
1807 self.extension_fields = yaml.comments.CommentedMap() | |
1808 if loadingOptions: | |
1809 self.loadingOptions = loadingOptions | |
1810 else: | |
1811 self.loadingOptions = LoadingOptions() | |
1812 self.id = id | |
1813 self.label = label | |
1814 self.doc = doc | |
1815 self.position = position | |
1816 self.tool_id = tool_id | |
1817 self.tool_shed_repository = tool_shed_repository | |
1818 self.tool_version = tool_version | |
1819 self.errors = errors | |
1820 self.uuid = uuid | |
1821 self.in_ = in_ | |
1822 self.out = out | |
1823 self.state = state | |
1824 self.tool_state = tool_state | |
1825 self.type = type | |
1826 self.run = run | |
1827 self.runtime_inputs = runtime_inputs | |
1828 | |
1829 @classmethod | |
1830 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
1831 # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowStep | |
1832 | |
1833 _doc = copy.copy(doc) | |
1834 if hasattr(doc, 'lc'): | |
1835 _doc.lc.data = doc.lc.data | |
1836 _doc.lc.filename = doc.lc.filename | |
1837 _errors__ = [] | |
1838 if 'id' in _doc: | |
1839 try: | |
1840 id = load_field(_doc.get( | |
1841 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
1842 except ValidationException as e: | |
1843 _errors__.append( | |
1844 ValidationException( | |
1845 "the `id` field is not valid because:", | |
1846 SourceLine(_doc, 'id', str), | |
1847 [e] | |
1848 ) | |
1849 ) | |
1850 else: | |
1851 id = None | |
1852 | |
1853 if id is None: | |
1854 if docRoot is not None: | |
1855 id = docRoot | |
1856 else: | |
1857 id = "_:" + str(_uuid__.uuid4()) | |
1858 baseuri = id | |
1859 if 'label' in _doc: | |
1860 try: | |
1861 label = load_field(_doc.get( | |
1862 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
1863 except ValidationException as e: | |
1864 _errors__.append( | |
1865 ValidationException( | |
1866 "the `label` field is not valid because:", | |
1867 SourceLine(_doc, 'label', str), | |
1868 [e] | |
1869 ) | |
1870 ) | |
1871 else: | |
1872 label = None | |
1873 if 'doc' in _doc: | |
1874 try: | |
1875 doc = load_field(_doc.get( | |
1876 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
1877 except ValidationException as e: | |
1878 _errors__.append( | |
1879 ValidationException( | |
1880 "the `doc` field is not valid because:", | |
1881 SourceLine(_doc, 'doc', str), | |
1882 [e] | |
1883 ) | |
1884 ) | |
1885 else: | |
1886 doc = None | |
1887 if 'position' in _doc: | |
1888 try: | |
1889 position = load_field(_doc.get( | |
1890 'position'), union_of_None_type_or_StepPositionLoader, baseuri, loadingOptions) | |
1891 except ValidationException as e: | |
1892 _errors__.append( | |
1893 ValidationException( | |
1894 "the `position` field is not valid because:", | |
1895 SourceLine(_doc, 'position', str), | |
1896 [e] | |
1897 ) | |
1898 ) | |
1899 else: | |
1900 position = None | |
1901 if 'tool_id' in _doc: | |
1902 try: | |
1903 tool_id = load_field(_doc.get( | |
1904 'tool_id'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
1905 except ValidationException as e: | |
1906 _errors__.append( | |
1907 ValidationException( | |
1908 "the `tool_id` field is not valid because:", | |
1909 SourceLine(_doc, 'tool_id', str), | |
1910 [e] | |
1911 ) | |
1912 ) | |
1913 else: | |
1914 tool_id = None | |
1915 if 'tool_shed_repository' in _doc: | |
1916 try: | |
1917 tool_shed_repository = load_field(_doc.get( | |
1918 'tool_shed_repository'), union_of_None_type_or_ToolShedRepositoryLoader, baseuri, loadingOptions) | |
1919 except ValidationException as e: | |
1920 _errors__.append( | |
1921 ValidationException( | |
1922 "the `tool_shed_repository` field is not valid because:", | |
1923 SourceLine(_doc, 'tool_shed_repository', str), | |
1924 [e] | |
1925 ) | |
1926 ) | |
1927 else: | |
1928 tool_shed_repository = None | |
1929 if 'tool_version' in _doc: | |
1930 try: | |
1931 tool_version = load_field(_doc.get( | |
1932 'tool_version'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
1933 except ValidationException as e: | |
1934 _errors__.append( | |
1935 ValidationException( | |
1936 "the `tool_version` field is not valid because:", | |
1937 SourceLine(_doc, 'tool_version', str), | |
1938 [e] | |
1939 ) | |
1940 ) | |
1941 else: | |
1942 tool_version = None | |
1943 if 'errors' in _doc: | |
1944 try: | |
1945 errors = load_field(_doc.get( | |
1946 'errors'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
1947 except ValidationException as e: | |
1948 _errors__.append( | |
1949 ValidationException( | |
1950 "the `errors` field is not valid because:", | |
1951 SourceLine(_doc, 'errors', str), | |
1952 [e] | |
1953 ) | |
1954 ) | |
1955 else: | |
1956 errors = None | |
1957 if 'uuid' in _doc: | |
1958 try: | |
1959 uuid = load_field(_doc.get( | |
1960 'uuid'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
1961 except ValidationException as e: | |
1962 _errors__.append( | |
1963 ValidationException( | |
1964 "the `uuid` field is not valid because:", | |
1965 SourceLine(_doc, 'uuid', str), | |
1966 [e] | |
1967 ) | |
1968 ) | |
1969 else: | |
1970 uuid = None | |
1971 if 'in' in _doc: | |
1972 try: | |
1973 in_ = load_field(_doc.get( | |
1974 'in'), idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader, baseuri, loadingOptions) | |
1975 except ValidationException as e: | |
1976 _errors__.append( | |
1977 ValidationException( | |
1978 "the `in` field is not valid because:", | |
1979 SourceLine(_doc, 'in', str), | |
1980 [e] | |
1981 ) | |
1982 ) | |
1983 else: | |
1984 in_ = None | |
1985 if 'out' in _doc: | |
1986 try: | |
1987 out = load_field(_doc.get( | |
1988 'out'), idmap_out_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type, baseuri, loadingOptions) | |
1989 except ValidationException as e: | |
1990 _errors__.append( | |
1991 ValidationException( | |
1992 "the `out` field is not valid because:", | |
1993 SourceLine(_doc, 'out', str), | |
1994 [e] | |
1995 ) | |
1996 ) | |
1997 else: | |
1998 out = None | |
1999 if 'state' in _doc: | |
2000 try: | |
2001 state = load_field(_doc.get( | |
2002 'state'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
2003 except ValidationException as e: | |
2004 _errors__.append( | |
2005 ValidationException( | |
2006 "the `state` field is not valid because:", | |
2007 SourceLine(_doc, 'state', str), | |
2008 [e] | |
2009 ) | |
2010 ) | |
2011 else: | |
2012 state = None | |
2013 if 'tool_state' in _doc: | |
2014 try: | |
2015 tool_state = load_field(_doc.get( | |
2016 'tool_state'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
2017 except ValidationException as e: | |
2018 _errors__.append( | |
2019 ValidationException( | |
2020 "the `tool_state` field is not valid because:", | |
2021 SourceLine(_doc, 'tool_state', str), | |
2022 [e] | |
2023 ) | |
2024 ) | |
2025 else: | |
2026 tool_state = None | |
2027 if 'type' in _doc: | |
2028 try: | |
2029 type = load_field(_doc.get( | |
2030 'type'), typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2, baseuri, loadingOptions) | |
2031 except ValidationException as e: | |
2032 _errors__.append( | |
2033 ValidationException( | |
2034 "the `type` field is not valid because:", | |
2035 SourceLine(_doc, 'type', str), | |
2036 [e] | |
2037 ) | |
2038 ) | |
2039 else: | |
2040 type = None | |
2041 if 'run' in _doc: | |
2042 try: | |
2043 run = load_field(_doc.get( | |
2044 'run'), uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None, baseuri, loadingOptions) | |
2045 except ValidationException as e: | |
2046 _errors__.append( | |
2047 ValidationException( | |
2048 "the `run` field is not valid because:", | |
2049 SourceLine(_doc, 'run', str), | |
2050 [e] | |
2051 ) | |
2052 ) | |
2053 else: | |
2054 run = None | |
2055 if 'runtime_inputs' in _doc: | |
2056 try: | |
2057 runtime_inputs = load_field(_doc.get( | |
2058 'runtime_inputs'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
2059 except ValidationException as e: | |
2060 _errors__.append( | |
2061 ValidationException( | |
2062 "the `runtime_inputs` field is not valid because:", | |
2063 SourceLine(_doc, 'runtime_inputs', str), | |
2064 [e] | |
2065 ) | |
2066 ) | |
2067 else: | |
2068 runtime_inputs = None | |
2069 | |
2070 extension_fields = yaml.comments.CommentedMap() | |
2071 for k in _doc.keys(): | |
2072 if k not in cls.attrs: | |
2073 if ":" in k: | |
2074 ex = expand_url(k, | |
2075 "", | |
2076 loadingOptions, | |
2077 scoped_id=False, | |
2078 vocab_term=False) | |
2079 extension_fields[ex] = _doc[k] | |
2080 else: | |
2081 _errors__.append( | |
2082 ValidationException( | |
2083 "invalid field `%s`, expected one of: `id`, `label`, `doc`, `position`, `tool_id`, `tool_shed_repository`, `tool_version`, `errors`, `uuid`, `in`, `out`, `state`, `tool_state`, `type`, `run`, `runtime_inputs`" % (k), | |
2084 SourceLine(_doc, k, str) | |
2085 ) | |
2086 ) | |
2087 break | |
2088 | |
2089 if _errors__: | |
2090 raise ValidationException("Trying 'WorkflowStep'", None, _errors__) | |
2091 loadingOptions = copy.deepcopy(loadingOptions) | |
2092 loadingOptions.original_doc = _doc | |
2093 return cls(id=id, label=label, doc=doc, position=position, tool_id=tool_id, tool_shed_repository=tool_shed_repository, tool_version=tool_version, errors=errors, uuid=uuid, in_=in_, out=out, state=state, tool_state=tool_state, type=type, run=run, runtime_inputs=runtime_inputs, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
2094 | |
2095 def save(self, top=False, base_url="", relative_uris=True): | |
2096 # type: (bool, str, bool) -> Dict[str, Any] | |
2097 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
2098 for ef in self.extension_fields: | |
2099 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
2100 | |
2101 if self.id is not None: | |
2102 u = save_relative_uri( | |
2103 self.id, | |
2104 base_url, | |
2105 True, | |
2106 None, | |
2107 relative_uris) | |
2108 if u: | |
2109 r['id'] = u | |
2110 | |
2111 if self.label is not None: | |
2112 r['label'] = save( | |
2113 self.label, | |
2114 top=False, | |
2115 base_url=self.id, | |
2116 relative_uris=relative_uris) | |
2117 | |
2118 if self.doc is not None: | |
2119 r['doc'] = save( | |
2120 self.doc, | |
2121 top=False, | |
2122 base_url=self.id, | |
2123 relative_uris=relative_uris) | |
2124 | |
2125 if self.position is not None: | |
2126 r['position'] = save( | |
2127 self.position, | |
2128 top=False, | |
2129 base_url=self.id, | |
2130 relative_uris=relative_uris) | |
2131 | |
2132 if self.tool_id is not None: | |
2133 r['tool_id'] = save( | |
2134 self.tool_id, | |
2135 top=False, | |
2136 base_url=self.id, | |
2137 relative_uris=relative_uris) | |
2138 | |
2139 if self.tool_shed_repository is not None: | |
2140 r['tool_shed_repository'] = save( | |
2141 self.tool_shed_repository, | |
2142 top=False, | |
2143 base_url=self.id, | |
2144 relative_uris=relative_uris) | |
2145 | |
2146 if self.tool_version is not None: | |
2147 r['tool_version'] = save( | |
2148 self.tool_version, | |
2149 top=False, | |
2150 base_url=self.id, | |
2151 relative_uris=relative_uris) | |
2152 | |
2153 if self.errors is not None: | |
2154 r['errors'] = save( | |
2155 self.errors, | |
2156 top=False, | |
2157 base_url=self.id, | |
2158 relative_uris=relative_uris) | |
2159 | |
2160 if self.uuid is not None: | |
2161 r['uuid'] = save( | |
2162 self.uuid, | |
2163 top=False, | |
2164 base_url=self.id, | |
2165 relative_uris=relative_uris) | |
2166 | |
2167 if self.in_ is not None: | |
2168 r['in'] = save( | |
2169 self.in_, | |
2170 top=False, | |
2171 base_url=self.id, | |
2172 relative_uris=relative_uris) | |
2173 | |
2174 if self.out is not None: | |
2175 r['out'] = save( | |
2176 self.out, | |
2177 top=False, | |
2178 base_url=self.id, | |
2179 relative_uris=relative_uris) | |
2180 | |
2181 if self.state is not None: | |
2182 r['state'] = save( | |
2183 self.state, | |
2184 top=False, | |
2185 base_url=self.id, | |
2186 relative_uris=relative_uris) | |
2187 | |
2188 if self.tool_state is not None: | |
2189 r['tool_state'] = save( | |
2190 self.tool_state, | |
2191 top=False, | |
2192 base_url=self.id, | |
2193 relative_uris=relative_uris) | |
2194 | |
2195 if self.type is not None: | |
2196 r['type'] = save( | |
2197 self.type, | |
2198 top=False, | |
2199 base_url=self.id, | |
2200 relative_uris=relative_uris) | |
2201 | |
2202 if self.run is not None: | |
2203 u = save_relative_uri( | |
2204 self.run, | |
2205 self.id, | |
2206 False, | |
2207 None, | |
2208 relative_uris) | |
2209 if u: | |
2210 r['run'] = u | |
2211 | |
2212 if self.runtime_inputs is not None: | |
2213 r['runtime_inputs'] = save( | |
2214 self.runtime_inputs, | |
2215 top=False, | |
2216 base_url=self.id, | |
2217 relative_uris=relative_uris) | |
2218 | |
2219 # top refers to the directory level | |
2220 if top: | |
2221 if self.loadingOptions.namespaces: | |
2222 r["$namespaces"] = self.loadingOptions.namespaces | |
2223 if self.loadingOptions.schemas: | |
2224 r["$schemas"] = self.loadingOptions.schemas | |
2225 return r | |
2226 | |
2227 attrs = frozenset(['id', 'label', 'doc', 'position', 'tool_id', 'tool_shed_repository', 'tool_version', 'errors', 'uuid', 'in', 'out', 'state', 'tool_state', 'type', 'run', 'runtime_inputs']) | |
2228 | |
2229 | |
2230 class Sink(Savable): | |
2231 pass | |
2232 | |
2233 | |
2234 class WorkflowStepInput(Identified, Sink, Labeled): | |
2235 """ | |
2236 TODO: | |
2237 | |
2238 """ | |
2239 def __init__( | |
2240 self, | |
2241 id=None, # type: Any | |
2242 source=None, # type: Any | |
2243 label=None, # type: Any | |
2244 default=None, # type: Any | |
2245 extension_fields=None, # type: Optional[Dict[str, Any]] | |
2246 loadingOptions=None # type: Optional[LoadingOptions] | |
2247 ): # type: (...) -> None | |
2248 | |
2249 if extension_fields: | |
2250 self.extension_fields = extension_fields | |
2251 else: | |
2252 self.extension_fields = yaml.comments.CommentedMap() | |
2253 if loadingOptions: | |
2254 self.loadingOptions = loadingOptions | |
2255 else: | |
2256 self.loadingOptions = LoadingOptions() | |
2257 self.id = id | |
2258 self.source = source | |
2259 self.label = label | |
2260 self.default = default | |
2261 | |
2262 @classmethod | |
2263 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
2264 # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowStepInput | |
2265 | |
2266 _doc = copy.copy(doc) | |
2267 if hasattr(doc, 'lc'): | |
2268 _doc.lc.data = doc.lc.data | |
2269 _doc.lc.filename = doc.lc.filename | |
2270 _errors__ = [] | |
2271 if 'id' in _doc: | |
2272 try: | |
2273 id = load_field(_doc.get( | |
2274 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
2275 except ValidationException as e: | |
2276 _errors__.append( | |
2277 ValidationException( | |
2278 "the `id` field is not valid because:", | |
2279 SourceLine(_doc, 'id', str), | |
2280 [e] | |
2281 ) | |
2282 ) | |
2283 else: | |
2284 id = None | |
2285 | |
2286 if id is None: | |
2287 if docRoot is not None: | |
2288 id = docRoot | |
2289 else: | |
2290 id = "_:" + str(_uuid__.uuid4()) | |
2291 baseuri = id | |
2292 if 'source' in _doc: | |
2293 try: | |
2294 source = load_field(_doc.get( | |
2295 'source'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, baseuri, loadingOptions) | |
2296 except ValidationException as e: | |
2297 _errors__.append( | |
2298 ValidationException( | |
2299 "the `source` field is not valid because:", | |
2300 SourceLine(_doc, 'source', str), | |
2301 [e] | |
2302 ) | |
2303 ) | |
2304 else: | |
2305 source = None | |
2306 if 'label' in _doc: | |
2307 try: | |
2308 label = load_field(_doc.get( | |
2309 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
2310 except ValidationException as e: | |
2311 _errors__.append( | |
2312 ValidationException( | |
2313 "the `label` field is not valid because:", | |
2314 SourceLine(_doc, 'label', str), | |
2315 [e] | |
2316 ) | |
2317 ) | |
2318 else: | |
2319 label = None | |
2320 if 'default' in _doc: | |
2321 try: | |
2322 default = load_field(_doc.get( | |
2323 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
2324 except ValidationException as e: | |
2325 _errors__.append( | |
2326 ValidationException( | |
2327 "the `default` field is not valid because:", | |
2328 SourceLine(_doc, 'default', str), | |
2329 [e] | |
2330 ) | |
2331 ) | |
2332 else: | |
2333 default = None | |
2334 | |
2335 extension_fields = yaml.comments.CommentedMap() | |
2336 for k in _doc.keys(): | |
2337 if k not in cls.attrs: | |
2338 if ":" in k: | |
2339 ex = expand_url(k, | |
2340 "", | |
2341 loadingOptions, | |
2342 scoped_id=False, | |
2343 vocab_term=False) | |
2344 extension_fields[ex] = _doc[k] | |
2345 else: | |
2346 _errors__.append( | |
2347 ValidationException( | |
2348 "invalid field `%s`, expected one of: `id`, `source`, `label`, `default`" % (k), | |
2349 SourceLine(_doc, k, str) | |
2350 ) | |
2351 ) | |
2352 break | |
2353 | |
2354 if _errors__: | |
2355 raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) | |
2356 loadingOptions = copy.deepcopy(loadingOptions) | |
2357 loadingOptions.original_doc = _doc | |
2358 return cls(id=id, source=source, label=label, default=default, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
2359 | |
2360 def save(self, top=False, base_url="", relative_uris=True): | |
2361 # type: (bool, str, bool) -> Dict[str, Any] | |
2362 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
2363 for ef in self.extension_fields: | |
2364 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
2365 | |
2366 if self.id is not None: | |
2367 u = save_relative_uri( | |
2368 self.id, | |
2369 base_url, | |
2370 True, | |
2371 None, | |
2372 relative_uris) | |
2373 if u: | |
2374 r['id'] = u | |
2375 | |
2376 if self.source is not None: | |
2377 u = save_relative_uri( | |
2378 self.source, | |
2379 self.id, | |
2380 False, | |
2381 2, | |
2382 relative_uris) | |
2383 if u: | |
2384 r['source'] = u | |
2385 | |
2386 if self.label is not None: | |
2387 r['label'] = save( | |
2388 self.label, | |
2389 top=False, | |
2390 base_url=self.id, | |
2391 relative_uris=relative_uris) | |
2392 | |
2393 if self.default is not None: | |
2394 r['default'] = save( | |
2395 self.default, | |
2396 top=False, | |
2397 base_url=self.id, | |
2398 relative_uris=relative_uris) | |
2399 | |
2400 # top refers to the directory level | |
2401 if top: | |
2402 if self.loadingOptions.namespaces: | |
2403 r["$namespaces"] = self.loadingOptions.namespaces | |
2404 if self.loadingOptions.schemas: | |
2405 r["$schemas"] = self.loadingOptions.schemas | |
2406 return r | |
2407 | |
2408 attrs = frozenset(['id', 'source', 'label', 'default']) | |
2409 | |
2410 | |
2411 class Report(Savable): | |
2412 """ | |
2413 Definition of an invocation report for this workflow. Currently the only | |
2414 field is 'markdown'. | |
2415 | |
2416 """ | |
2417 def __init__( | |
2418 self, | |
2419 markdown, # type: Any | |
2420 extension_fields=None, # type: Optional[Dict[str, Any]] | |
2421 loadingOptions=None # type: Optional[LoadingOptions] | |
2422 ): # type: (...) -> None | |
2423 | |
2424 if extension_fields: | |
2425 self.extension_fields = extension_fields | |
2426 else: | |
2427 self.extension_fields = yaml.comments.CommentedMap() | |
2428 if loadingOptions: | |
2429 self.loadingOptions = loadingOptions | |
2430 else: | |
2431 self.loadingOptions = LoadingOptions() | |
2432 self.markdown = markdown | |
2433 | |
2434 @classmethod | |
2435 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
2436 # type: (Any, str, LoadingOptions, Optional[str]) -> Report | |
2437 | |
2438 _doc = copy.copy(doc) | |
2439 if hasattr(doc, 'lc'): | |
2440 _doc.lc.data = doc.lc.data | |
2441 _doc.lc.filename = doc.lc.filename | |
2442 _errors__ = [] | |
2443 try: | |
2444 markdown = load_field(_doc.get( | |
2445 'markdown'), strtype, baseuri, loadingOptions) | |
2446 except ValidationException as e: | |
2447 _errors__.append( | |
2448 ValidationException( | |
2449 "the `markdown` field is not valid because:", | |
2450 SourceLine(_doc, 'markdown', str), | |
2451 [e] | |
2452 ) | |
2453 ) | |
2454 | |
2455 extension_fields = yaml.comments.CommentedMap() | |
2456 for k in _doc.keys(): | |
2457 if k not in cls.attrs: | |
2458 if ":" in k: | |
2459 ex = expand_url(k, | |
2460 "", | |
2461 loadingOptions, | |
2462 scoped_id=False, | |
2463 vocab_term=False) | |
2464 extension_fields[ex] = _doc[k] | |
2465 else: | |
2466 _errors__.append( | |
2467 ValidationException( | |
2468 "invalid field `%s`, expected one of: `markdown`" % (k), | |
2469 SourceLine(_doc, k, str) | |
2470 ) | |
2471 ) | |
2472 break | |
2473 | |
2474 if _errors__: | |
2475 raise ValidationException("Trying 'Report'", None, _errors__) | |
2476 loadingOptions = copy.deepcopy(loadingOptions) | |
2477 loadingOptions.original_doc = _doc | |
2478 return cls(markdown=markdown, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
2479 | |
2480 def save(self, top=False, base_url="", relative_uris=True): | |
2481 # type: (bool, str, bool) -> Dict[str, Any] | |
2482 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
2483 for ef in self.extension_fields: | |
2484 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
2485 | |
2486 if self.markdown is not None: | |
2487 r['markdown'] = save( | |
2488 self.markdown, | |
2489 top=False, | |
2490 base_url=base_url, | |
2491 relative_uris=relative_uris) | |
2492 | |
2493 # top refers to the directory level | |
2494 if top: | |
2495 if self.loadingOptions.namespaces: | |
2496 r["$namespaces"] = self.loadingOptions.namespaces | |
2497 if self.loadingOptions.schemas: | |
2498 r["$schemas"] = self.loadingOptions.schemas | |
2499 return r | |
2500 | |
2501 attrs = frozenset(['markdown']) | |
2502 | |
2503 | |
2504 class WorkflowStepOutput(Identified): | |
2505 """ | |
2506 Associate an output parameter of the underlying process with a workflow | |
2507 parameter. The workflow parameter (given in the `id` field) be may be used | |
2508 as a `source` to connect with input parameters of other workflow steps, or | |
2509 with an output parameter of the process. | |
2510 | |
2511 A unique identifier for this workflow output parameter. This is | |
2512 the identifier to use in the `source` field of `WorkflowStepInput` | |
2513 to connect the output value to downstream parameters. | |
2514 | |
2515 """ | |
2516 def __init__( | |
2517 self, | |
2518 id=None, # type: Any | |
2519 add_tags=None, # type: Any | |
2520 change_datatype=None, # type: Any | |
2521 delete_intermediate_datasets=None, # type: Any | |
2522 hide=None, # type: Any | |
2523 remove_tags=None, # type: Any | |
2524 rename=None, # type: Any | |
2525 set_columns=None, # type: Any | |
2526 extension_fields=None, # type: Optional[Dict[str, Any]] | |
2527 loadingOptions=None # type: Optional[LoadingOptions] | |
2528 ): # type: (...) -> None | |
2529 | |
2530 if extension_fields: | |
2531 self.extension_fields = extension_fields | |
2532 else: | |
2533 self.extension_fields = yaml.comments.CommentedMap() | |
2534 if loadingOptions: | |
2535 self.loadingOptions = loadingOptions | |
2536 else: | |
2537 self.loadingOptions = LoadingOptions() | |
2538 self.id = id | |
2539 self.add_tags = add_tags | |
2540 self.change_datatype = change_datatype | |
2541 self.delete_intermediate_datasets = delete_intermediate_datasets | |
2542 self.hide = hide | |
2543 self.remove_tags = remove_tags | |
2544 self.rename = rename | |
2545 self.set_columns = set_columns | |
2546 | |
2547 @classmethod | |
2548 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
2549 # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowStepOutput | |
2550 | |
2551 _doc = copy.copy(doc) | |
2552 if hasattr(doc, 'lc'): | |
2553 _doc.lc.data = doc.lc.data | |
2554 _doc.lc.filename = doc.lc.filename | |
2555 _errors__ = [] | |
2556 if 'id' in _doc: | |
2557 try: | |
2558 id = load_field(_doc.get( | |
2559 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
2560 except ValidationException as e: | |
2561 _errors__.append( | |
2562 ValidationException( | |
2563 "the `id` field is not valid because:", | |
2564 SourceLine(_doc, 'id', str), | |
2565 [e] | |
2566 ) | |
2567 ) | |
2568 else: | |
2569 id = None | |
2570 | |
2571 if id is None: | |
2572 if docRoot is not None: | |
2573 id = docRoot | |
2574 else: | |
2575 id = "_:" + str(_uuid__.uuid4()) | |
2576 baseuri = id | |
2577 if 'add_tags' in _doc: | |
2578 try: | |
2579 add_tags = load_field(_doc.get( | |
2580 'add_tags'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
2581 except ValidationException as e: | |
2582 _errors__.append( | |
2583 ValidationException( | |
2584 "the `add_tags` field is not valid because:", | |
2585 SourceLine(_doc, 'add_tags', str), | |
2586 [e] | |
2587 ) | |
2588 ) | |
2589 else: | |
2590 add_tags = None | |
2591 if 'change_datatype' in _doc: | |
2592 try: | |
2593 change_datatype = load_field(_doc.get( | |
2594 'change_datatype'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
2595 except ValidationException as e: | |
2596 _errors__.append( | |
2597 ValidationException( | |
2598 "the `change_datatype` field is not valid because:", | |
2599 SourceLine(_doc, 'change_datatype', str), | |
2600 [e] | |
2601 ) | |
2602 ) | |
2603 else: | |
2604 change_datatype = None | |
2605 if 'delete_intermediate_datasets' in _doc: | |
2606 try: | |
2607 delete_intermediate_datasets = load_field(_doc.get( | |
2608 'delete_intermediate_datasets'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
2609 except ValidationException as e: | |
2610 _errors__.append( | |
2611 ValidationException( | |
2612 "the `delete_intermediate_datasets` field is not valid because:", | |
2613 SourceLine(_doc, 'delete_intermediate_datasets', str), | |
2614 [e] | |
2615 ) | |
2616 ) | |
2617 else: | |
2618 delete_intermediate_datasets = None | |
2619 if 'hide' in _doc: | |
2620 try: | |
2621 hide = load_field(_doc.get( | |
2622 'hide'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
2623 except ValidationException as e: | |
2624 _errors__.append( | |
2625 ValidationException( | |
2626 "the `hide` field is not valid because:", | |
2627 SourceLine(_doc, 'hide', str), | |
2628 [e] | |
2629 ) | |
2630 ) | |
2631 else: | |
2632 hide = None | |
2633 if 'remove_tags' in _doc: | |
2634 try: | |
2635 remove_tags = load_field(_doc.get( | |
2636 'remove_tags'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
2637 except ValidationException as e: | |
2638 _errors__.append( | |
2639 ValidationException( | |
2640 "the `remove_tags` field is not valid because:", | |
2641 SourceLine(_doc, 'remove_tags', str), | |
2642 [e] | |
2643 ) | |
2644 ) | |
2645 else: | |
2646 remove_tags = None | |
2647 if 'rename' in _doc: | |
2648 try: | |
2649 rename = load_field(_doc.get( | |
2650 'rename'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
2651 except ValidationException as e: | |
2652 _errors__.append( | |
2653 ValidationException( | |
2654 "the `rename` field is not valid because:", | |
2655 SourceLine(_doc, 'rename', str), | |
2656 [e] | |
2657 ) | |
2658 ) | |
2659 else: | |
2660 rename = None | |
2661 if 'set_columns' in _doc: | |
2662 try: | |
2663 set_columns = load_field(_doc.get( | |
2664 'set_columns'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
2665 except ValidationException as e: | |
2666 _errors__.append( | |
2667 ValidationException( | |
2668 "the `set_columns` field is not valid because:", | |
2669 SourceLine(_doc, 'set_columns', str), | |
2670 [e] | |
2671 ) | |
2672 ) | |
2673 else: | |
2674 set_columns = None | |
2675 | |
2676 extension_fields = yaml.comments.CommentedMap() | |
2677 for k in _doc.keys(): | |
2678 if k not in cls.attrs: | |
2679 if ":" in k: | |
2680 ex = expand_url(k, | |
2681 "", | |
2682 loadingOptions, | |
2683 scoped_id=False, | |
2684 vocab_term=False) | |
2685 extension_fields[ex] = _doc[k] | |
2686 else: | |
2687 _errors__.append( | |
2688 ValidationException( | |
2689 "invalid field `%s`, expected one of: `id`, `add_tags`, `change_datatype`, `delete_intermediate_datasets`, `hide`, `remove_tags`, `rename`, `set_columns`" % (k), | |
2690 SourceLine(_doc, k, str) | |
2691 ) | |
2692 ) | |
2693 break | |
2694 | |
2695 if _errors__: | |
2696 raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) | |
2697 loadingOptions = copy.deepcopy(loadingOptions) | |
2698 loadingOptions.original_doc = _doc | |
2699 return cls(id=id, add_tags=add_tags, change_datatype=change_datatype, delete_intermediate_datasets=delete_intermediate_datasets, hide=hide, remove_tags=remove_tags, rename=rename, set_columns=set_columns, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
2700 | |
2701 def save(self, top=False, base_url="", relative_uris=True): | |
2702 # type: (bool, str, bool) -> Dict[str, Any] | |
2703 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
2704 for ef in self.extension_fields: | |
2705 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
2706 | |
2707 if self.id is not None: | |
2708 u = save_relative_uri( | |
2709 self.id, | |
2710 base_url, | |
2711 True, | |
2712 None, | |
2713 relative_uris) | |
2714 if u: | |
2715 r['id'] = u | |
2716 | |
2717 if self.add_tags is not None: | |
2718 r['add_tags'] = save( | |
2719 self.add_tags, | |
2720 top=False, | |
2721 base_url=self.id, | |
2722 relative_uris=relative_uris) | |
2723 | |
2724 if self.change_datatype is not None: | |
2725 r['change_datatype'] = save( | |
2726 self.change_datatype, | |
2727 top=False, | |
2728 base_url=self.id, | |
2729 relative_uris=relative_uris) | |
2730 | |
2731 if self.delete_intermediate_datasets is not None: | |
2732 r['delete_intermediate_datasets'] = save( | |
2733 self.delete_intermediate_datasets, | |
2734 top=False, | |
2735 base_url=self.id, | |
2736 relative_uris=relative_uris) | |
2737 | |
2738 if self.hide is not None: | |
2739 r['hide'] = save( | |
2740 self.hide, | |
2741 top=False, | |
2742 base_url=self.id, | |
2743 relative_uris=relative_uris) | |
2744 | |
2745 if self.remove_tags is not None: | |
2746 r['remove_tags'] = save( | |
2747 self.remove_tags, | |
2748 top=False, | |
2749 base_url=self.id, | |
2750 relative_uris=relative_uris) | |
2751 | |
2752 if self.rename is not None: | |
2753 r['rename'] = save( | |
2754 self.rename, | |
2755 top=False, | |
2756 base_url=self.id, | |
2757 relative_uris=relative_uris) | |
2758 | |
2759 if self.set_columns is not None: | |
2760 r['set_columns'] = save( | |
2761 self.set_columns, | |
2762 top=False, | |
2763 base_url=self.id, | |
2764 relative_uris=relative_uris) | |
2765 | |
2766 # top refers to the directory level | |
2767 if top: | |
2768 if self.loadingOptions.namespaces: | |
2769 r["$namespaces"] = self.loadingOptions.namespaces | |
2770 if self.loadingOptions.schemas: | |
2771 r["$schemas"] = self.loadingOptions.schemas | |
2772 return r | |
2773 | |
2774 attrs = frozenset(['id', 'add_tags', 'change_datatype', 'delete_intermediate_datasets', 'hide', 'remove_tags', 'rename', 'set_columns']) | |
2775 | |
2776 | |
2777 class GalaxyWorkflow(Process, HasUUID): | |
2778 """ | |
2779 A Galaxy workflow description. This record corresponds to the description of a workflow that should be executable | |
2780 on a Galaxy server that includes the contained tool definitions. | |
2781 | |
2782 The workflows API or the user interface of Galaxy instances that are of version 19.09 or newer should be able to | |
2783 import a document defining this record. | |
2784 | |
2785 ## A note about `label` field. | |
2786 | |
2787 This is the name of the workflow in the Galaxy user interface. This is the mechanism that | |
2788 users will primarily identify the workflow using. Legacy support - this may also be called 'name' and Galaxy will | |
2789 consume the workflow document fine and treat this attribute correctly - however in order to validate against this | |
2790 workflow definition schema the attribute should be called `label`. | |
2791 | |
2792 """ | |
2793 def __init__( | |
2794 self, | |
2795 inputs, # type: Any | |
2796 outputs, # type: Any | |
2797 steps, # type: Any | |
2798 id=None, # type: Any | |
2799 label=None, # type: Any | |
2800 doc=None, # type: Any | |
2801 uuid=None, # type: Any | |
2802 report=None, # type: Any | |
2803 extension_fields=None, # type: Optional[Dict[str, Any]] | |
2804 loadingOptions=None # type: Optional[LoadingOptions] | |
2805 ): # type: (...) -> None | |
2806 | |
2807 if extension_fields: | |
2808 self.extension_fields = extension_fields | |
2809 else: | |
2810 self.extension_fields = yaml.comments.CommentedMap() | |
2811 if loadingOptions: | |
2812 self.loadingOptions = loadingOptions | |
2813 else: | |
2814 self.loadingOptions = LoadingOptions() | |
2815 self.id = id | |
2816 self.label = label | |
2817 self.doc = doc | |
2818 self.inputs = inputs | |
2819 self.outputs = outputs | |
2820 self.uuid = uuid | |
2821 self.class_ = "GalaxyWorkflow" | |
2822 self.steps = steps | |
2823 self.report = report | |
2824 | |
2825 @classmethod | |
2826 def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): | |
2827 # type: (Any, str, LoadingOptions, Optional[str]) -> GalaxyWorkflow | |
2828 | |
2829 _doc = copy.copy(doc) | |
2830 if hasattr(doc, 'lc'): | |
2831 _doc.lc.data = doc.lc.data | |
2832 _doc.lc.filename = doc.lc.filename | |
2833 _errors__ = [] | |
2834 | |
2835 if _doc.get('class') != 'GalaxyWorkflow': | |
2836 raise ValidationException("Not a GalaxyWorkflow") | |
2837 | |
2838 if 'id' in _doc: | |
2839 try: | |
2840 id = load_field(_doc.get( | |
2841 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
2842 except ValidationException as e: | |
2843 _errors__.append( | |
2844 ValidationException( | |
2845 "the `id` field is not valid because:", | |
2846 SourceLine(_doc, 'id', str), | |
2847 [e] | |
2848 ) | |
2849 ) | |
2850 else: | |
2851 id = None | |
2852 | |
2853 if id is None: | |
2854 if docRoot is not None: | |
2855 id = docRoot | |
2856 else: | |
2857 id = "_:" + str(_uuid__.uuid4()) | |
2858 baseuri = id | |
2859 if 'label' in _doc: | |
2860 try: | |
2861 label = load_field(_doc.get( | |
2862 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
2863 except ValidationException as e: | |
2864 _errors__.append( | |
2865 ValidationException( | |
2866 "the `label` field is not valid because:", | |
2867 SourceLine(_doc, 'label', str), | |
2868 [e] | |
2869 ) | |
2870 ) | |
2871 else: | |
2872 label = None | |
2873 if 'doc' in _doc: | |
2874 try: | |
2875 doc = load_field(_doc.get( | |
2876 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
2877 except ValidationException as e: | |
2878 _errors__.append( | |
2879 ValidationException( | |
2880 "the `doc` field is not valid because:", | |
2881 SourceLine(_doc, 'doc', str), | |
2882 [e] | |
2883 ) | |
2884 ) | |
2885 else: | |
2886 doc = None | |
2887 try: | |
2888 inputs = load_field(_doc.get( | |
2889 'inputs'), idmap_inputs_array_of_WorkflowInputParameterLoader, baseuri, loadingOptions) | |
2890 except ValidationException as e: | |
2891 _errors__.append( | |
2892 ValidationException( | |
2893 "the `inputs` field is not valid because:", | |
2894 SourceLine(_doc, 'inputs', str), | |
2895 [e] | |
2896 ) | |
2897 ) | |
2898 try: | |
2899 outputs = load_field(_doc.get( | |
2900 'outputs'), idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions) | |
2901 except ValidationException as e: | |
2902 _errors__.append( | |
2903 ValidationException( | |
2904 "the `outputs` field is not valid because:", | |
2905 SourceLine(_doc, 'outputs', str), | |
2906 [e] | |
2907 ) | |
2908 ) | |
2909 if 'uuid' in _doc: | |
2910 try: | |
2911 uuid = load_field(_doc.get( | |
2912 'uuid'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
2913 except ValidationException as e: | |
2914 _errors__.append( | |
2915 ValidationException( | |
2916 "the `uuid` field is not valid because:", | |
2917 SourceLine(_doc, 'uuid', str), | |
2918 [e] | |
2919 ) | |
2920 ) | |
2921 else: | |
2922 uuid = None | |
2923 try: | |
2924 steps = load_field(_doc.get( | |
2925 'steps'), idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions) | |
2926 except ValidationException as e: | |
2927 _errors__.append( | |
2928 ValidationException( | |
2929 "the `steps` field is not valid because:", | |
2930 SourceLine(_doc, 'steps', str), | |
2931 [e] | |
2932 ) | |
2933 ) | |
2934 if 'report' in _doc: | |
2935 try: | |
2936 report = load_field(_doc.get( | |
2937 'report'), union_of_None_type_or_ReportLoader, baseuri, loadingOptions) | |
2938 except ValidationException as e: | |
2939 _errors__.append( | |
2940 ValidationException( | |
2941 "the `report` field is not valid because:", | |
2942 SourceLine(_doc, 'report', str), | |
2943 [e] | |
2944 ) | |
2945 ) | |
2946 else: | |
2947 report = None | |
2948 | |
2949 extension_fields = yaml.comments.CommentedMap() | |
2950 for k in _doc.keys(): | |
2951 if k not in cls.attrs: | |
2952 if ":" in k: | |
2953 ex = expand_url(k, | |
2954 "", | |
2955 loadingOptions, | |
2956 scoped_id=False, | |
2957 vocab_term=False) | |
2958 extension_fields[ex] = _doc[k] | |
2959 else: | |
2960 _errors__.append( | |
2961 ValidationException( | |
2962 "invalid field `%s`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `uuid`, `class`, `steps`, `report`" % (k), | |
2963 SourceLine(_doc, k, str) | |
2964 ) | |
2965 ) | |
2966 break | |
2967 | |
2968 if _errors__: | |
2969 raise ValidationException("Trying 'GalaxyWorkflow'", None, _errors__) | |
2970 loadingOptions = copy.deepcopy(loadingOptions) | |
2971 loadingOptions.original_doc = _doc | |
2972 return cls(id=id, label=label, doc=doc, inputs=inputs, outputs=outputs, uuid=uuid, steps=steps, report=report, extension_fields=extension_fields, loadingOptions=loadingOptions) | |
2973 | |
2974 def save(self, top=False, base_url="", relative_uris=True): | |
2975 # type: (bool, str, bool) -> Dict[str, Any] | |
2976 r = yaml.comments.CommentedMap() # type: Dict[str, Any] | |
2977 for ef in self.extension_fields: | |
2978 r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
2979 | |
2980 r['class'] = 'GalaxyWorkflow' | |
2981 | |
2982 if self.id is not None: | |
2983 u = save_relative_uri( | |
2984 self.id, | |
2985 base_url, | |
2986 True, | |
2987 None, | |
2988 relative_uris) | |
2989 if u: | |
2990 r['id'] = u | |
2991 | |
2992 if self.label is not None: | |
2993 r['label'] = save( | |
2994 self.label, | |
2995 top=False, | |
2996 base_url=self.id, | |
2997 relative_uris=relative_uris) | |
2998 | |
2999 if self.doc is not None: | |
3000 r['doc'] = save( | |
3001 self.doc, | |
3002 top=False, | |
3003 base_url=self.id, | |
3004 relative_uris=relative_uris) | |
3005 | |
3006 if self.inputs is not None: | |
3007 r['inputs'] = save( | |
3008 self.inputs, | |
3009 top=False, | |
3010 base_url=self.id, | |
3011 relative_uris=relative_uris) | |
3012 | |
3013 if self.outputs is not None: | |
3014 r['outputs'] = save( | |
3015 self.outputs, | |
3016 top=False, | |
3017 base_url=self.id, | |
3018 relative_uris=relative_uris) | |
3019 | |
3020 if self.uuid is not None: | |
3021 r['uuid'] = save( | |
3022 self.uuid, | |
3023 top=False, | |
3024 base_url=self.id, | |
3025 relative_uris=relative_uris) | |
3026 | |
3027 if self.steps is not None: | |
3028 r['steps'] = save( | |
3029 self.steps, | |
3030 top=False, | |
3031 base_url=self.id, | |
3032 relative_uris=relative_uris) | |
3033 | |
3034 if self.report is not None: | |
3035 r['report'] = save( | |
3036 self.report, | |
3037 top=False, | |
3038 base_url=self.id, | |
3039 relative_uris=relative_uris) | |
3040 | |
3041 # top refers to the directory level | |
3042 if top: | |
3043 if self.loadingOptions.namespaces: | |
3044 r["$namespaces"] = self.loadingOptions.namespaces | |
3045 if self.loadingOptions.schemas: | |
3046 r["$schemas"] = self.loadingOptions.schemas | |
3047 return r | |
3048 | |
3049 attrs = frozenset(['id', 'label', 'doc', 'inputs', 'outputs', 'uuid', 'class', 'steps', 'report']) | |
3050 | |
3051 | |
3052 _vocab = { | |
3053 "Any": "https://w3id.org/cwl/salad#Any", | |
3054 "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", | |
3055 "Documented": "https://w3id.org/cwl/salad#Documented", | |
3056 "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", | |
3057 "File": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File", | |
3058 "GalaxyType": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType", | |
3059 "GalaxyWorkflow": "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow", | |
3060 "HasStepErrors": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepErrors", | |
3061 "HasStepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition", | |
3062 "HasUUID": "https://galaxyproject.org/gxformat2/gxformat2common#HasUUID", | |
3063 "Identified": "https://w3id.org/cwl/cwl#Identified", | |
3064 "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", | |
3065 "Labeled": "https://w3id.org/cwl/cwl#Labeled", | |
3066 "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", | |
3067 "Parameter": "https://w3id.org/cwl/cwl#Parameter", | |
3068 "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", | |
3069 "Process": "https://w3id.org/cwl/cwl#Process", | |
3070 "RecordField": "https://w3id.org/cwl/salad#RecordField", | |
3071 "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", | |
3072 "ReferencesTool": "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool", | |
3073 "Report": "https://galaxyproject.org/gxformat2/v19_09#Report", | |
3074 "Sink": "https://galaxyproject.org/gxformat2/v19_09#Sink", | |
3075 "StepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition", | |
3076 "ToolShedRepository": "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository", | |
3077 "WorkflowInputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter", | |
3078 "WorkflowOutputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter", | |
3079 "WorkflowStep": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep", | |
3080 "WorkflowStepInput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput", | |
3081 "WorkflowStepOutput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput", | |
3082 "WorkflowStepType": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType", | |
3083 "array": "https://w3id.org/cwl/salad#array", | |
3084 "boolean": "http://www.w3.org/2001/XMLSchema#boolean", | |
3085 "collection": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection", | |
3086 "data": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data", | |
3087 "double": "http://www.w3.org/2001/XMLSchema#double", | |
3088 "enum": "https://w3id.org/cwl/salad#enum", | |
3089 "float": "http://www.w3.org/2001/XMLSchema#float", | |
3090 "int": "http://www.w3.org/2001/XMLSchema#int", | |
3091 "integer": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/integer", | |
3092 "long": "http://www.w3.org/2001/XMLSchema#long", | |
3093 "null": "https://w3id.org/cwl/salad#null", | |
3094 "pause": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause", | |
3095 "record": "https://w3id.org/cwl/salad#record", | |
3096 "string": "http://www.w3.org/2001/XMLSchema#string", | |
3097 "subworkflow": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow", | |
3098 "text": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/text", | |
3099 "tool": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool", | |
3100 } | |
3101 _rvocab = { | |
3102 "https://w3id.org/cwl/salad#Any": "Any", | |
3103 "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", | |
3104 "https://w3id.org/cwl/salad#Documented": "Documented", | |
3105 "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", | |
3106 "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File": "File", | |
3107 "https://galaxyproject.org/gxformat2/v19_09#GalaxyType": "GalaxyType", | |
3108 "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow": "GalaxyWorkflow", | |
3109 "https://galaxyproject.org/gxformat2/gxformat2common#HasStepErrors": "HasStepErrors", | |
3110 "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition": "HasStepPosition", | |
3111 "https://galaxyproject.org/gxformat2/gxformat2common#HasUUID": "HasUUID", | |
3112 "https://w3id.org/cwl/cwl#Identified": "Identified", | |
3113 "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", | |
3114 "https://w3id.org/cwl/cwl#Labeled": "Labeled", | |
3115 "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", | |
3116 "https://w3id.org/cwl/cwl#Parameter": "Parameter", | |
3117 "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", | |
3118 "https://w3id.org/cwl/cwl#Process": "Process", | |
3119 "https://w3id.org/cwl/salad#RecordField": "RecordField", | |
3120 "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", | |
3121 "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool": "ReferencesTool", | |
3122 "https://galaxyproject.org/gxformat2/v19_09#Report": "Report", | |
3123 "https://galaxyproject.org/gxformat2/v19_09#Sink": "Sink", | |
3124 "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition": "StepPosition", | |
3125 "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository": "ToolShedRepository", | |
3126 "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter": "WorkflowInputParameter", | |
3127 "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter": "WorkflowOutputParameter", | |
3128 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep": "WorkflowStep", | |
3129 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput": "WorkflowStepInput", | |
3130 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput": "WorkflowStepOutput", | |
3131 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType": "WorkflowStepType", | |
3132 "https://w3id.org/cwl/salad#array": "array", | |
3133 "http://www.w3.org/2001/XMLSchema#boolean": "boolean", | |
3134 "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection": "collection", | |
3135 "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data": "data", | |
3136 "http://www.w3.org/2001/XMLSchema#double": "double", | |
3137 "https://w3id.org/cwl/salad#enum": "enum", | |
3138 "http://www.w3.org/2001/XMLSchema#float": "float", | |
3139 "http://www.w3.org/2001/XMLSchema#int": "int", | |
3140 "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/integer": "integer", | |
3141 "http://www.w3.org/2001/XMLSchema#long": "long", | |
3142 "https://w3id.org/cwl/salad#null": "null", | |
3143 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause": "pause", | |
3144 "https://w3id.org/cwl/salad#record": "record", | |
3145 "http://www.w3.org/2001/XMLSchema#string": "string", | |
3146 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow": "subworkflow", | |
3147 "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/text": "text", | |
3148 "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool": "tool", | |
3149 } | |
3150 | |
3151 strtype = _PrimitiveLoader((str, str)) | |
3152 inttype = _PrimitiveLoader(int) | |
3153 floattype = _PrimitiveLoader(float) | |
3154 booltype = _PrimitiveLoader(bool) | |
3155 None_type = _PrimitiveLoader(type(None)) | |
3156 Any_type = _AnyLoader() | |
3157 DocumentedLoader = _RecordLoader(Documented) | |
3158 PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",)) | |
3159 AnyLoader = _EnumLoader(("Any",)) | |
3160 RecordFieldLoader = _RecordLoader(RecordField) | |
3161 RecordSchemaLoader = _RecordLoader(RecordSchema) | |
3162 EnumSchemaLoader = _RecordLoader(EnumSchema) | |
3163 ArraySchemaLoader = _RecordLoader(ArraySchema) | |
3164 LabeledLoader = _RecordLoader(Labeled) | |
3165 IdentifiedLoader = _RecordLoader(Identified) | |
3166 ParameterLoader = _RecordLoader(Parameter) | |
3167 InputParameterLoader = _RecordLoader(InputParameter) | |
3168 OutputParameterLoader = _RecordLoader(OutputParameter) | |
3169 ProcessLoader = _RecordLoader(Process) | |
3170 HasUUIDLoader = _RecordLoader(HasUUID) | |
3171 HasStepErrorsLoader = _RecordLoader(HasStepErrors) | |
3172 HasStepPositionLoader = _RecordLoader(HasStepPosition) | |
3173 StepPositionLoader = _RecordLoader(StepPosition) | |
3174 ReferencesToolLoader = _RecordLoader(ReferencesTool) | |
3175 ToolShedRepositoryLoader = _RecordLoader(ToolShedRepository) | |
3176 GalaxyTypeLoader = _EnumLoader(("integer", "text", "File", "data", "collection",)) | |
3177 WorkflowStepTypeLoader = _EnumLoader(("tool", "subworkflow", "pause",)) | |
3178 WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) | |
3179 WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) | |
3180 WorkflowStepLoader = _RecordLoader(WorkflowStep) | |
3181 SinkLoader = _RecordLoader(Sink) | |
3182 WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) | |
3183 ReportLoader = _RecordLoader(Report) | |
3184 WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) | |
3185 GalaxyWorkflowLoader = _RecordLoader(GalaxyWorkflow) | |
3186 array_of_strtype = _ArrayLoader(strtype) | |
3187 union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,)) | |
3188 uri_strtype_True_False_None = _URILoader(strtype, True, False, None) | |
3189 union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,)) | |
3190 array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype) | |
3191 union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,)) | |
3192 typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2) | |
3193 array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) | |
3194 union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,)) | |
3195 idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type') | |
3196 enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(("record",)) | |
3197 typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2) | |
3198 uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) | |
3199 enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(("enum",)) | |
3200 typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2) | |
3201 uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2) | |
3202 enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(("array",)) | |
3203 typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2) | |
3204 union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,)) | |
3205 uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None) | |
3206 union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,)) | |
3207 union_of_WorkflowInputParameterLoader = _UnionLoader((WorkflowInputParameterLoader,)) | |
3208 array_of_union_of_WorkflowInputParameterLoader = _ArrayLoader(union_of_WorkflowInputParameterLoader) | |
3209 idmap_inputs_array_of_union_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowInputParameterLoader, 'id', 'type') | |
3210 union_of_WorkflowOutputParameterLoader = _UnionLoader((WorkflowOutputParameterLoader,)) | |
3211 array_of_union_of_WorkflowOutputParameterLoader = _ArrayLoader(union_of_WorkflowOutputParameterLoader) | |
3212 idmap_outputs_array_of_union_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowOutputParameterLoader, 'id', 'type') | |
3213 union_of_None_type_or_StepPositionLoader = _UnionLoader((None_type, StepPositionLoader,)) | |
3214 union_of_floattype_or_inttype = _UnionLoader((floattype, inttype,)) | |
3215 union_of_None_type_or_ToolShedRepositoryLoader = _UnionLoader((None_type, ToolShedRepositoryLoader,)) | |
3216 union_of_GalaxyTypeLoader_or_strtype_or_None_type = _UnionLoader((GalaxyTypeLoader, strtype, None_type,)) | |
3217 typedsl_union_of_GalaxyTypeLoader_or_strtype_or_None_type_2 = _TypeDSLLoader(union_of_GalaxyTypeLoader_or_strtype_or_None_type, 2) | |
3218 union_of_None_type_or_GalaxyTypeLoader = _UnionLoader((None_type, GalaxyTypeLoader,)) | |
3219 typedsl_union_of_None_type_or_GalaxyTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_GalaxyTypeLoader, 2) | |
3220 array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) | |
3221 union_of_None_type_or_array_of_WorkflowStepInputLoader = _UnionLoader((None_type, array_of_WorkflowStepInputLoader,)) | |
3222 idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader = _IdMapLoader(union_of_None_type_or_array_of_WorkflowStepInputLoader, 'id', 'source') | |
3223 union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader((strtype, WorkflowStepOutputLoader,)) | |
3224 array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader(union_of_strtype_or_WorkflowStepOutputLoader) | |
3225 union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _UnionLoader((array_of_union_of_strtype_or_WorkflowStepOutputLoader, None_type,)) | |
3226 idmap_out_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _IdMapLoader(union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type, 'id', 'source') | |
3227 union_of_None_type_or_WorkflowStepTypeLoader = _UnionLoader((None_type, WorkflowStepTypeLoader,)) | |
3228 typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_WorkflowStepTypeLoader, 2) | |
3229 union_of_None_type_or_GalaxyWorkflowLoader = _UnionLoader((None_type, GalaxyWorkflowLoader,)) | |
3230 uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None = _URILoader(union_of_None_type_or_GalaxyWorkflowLoader, False, False, None) | |
3231 union_of_None_type_or_array_of_strtype = _UnionLoader((None_type, array_of_strtype,)) | |
3232 uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2) | |
3233 union_of_None_type_or_booltype = _UnionLoader((None_type, booltype,)) | |
3234 array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) | |
3235 idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_WorkflowInputParameterLoader, 'id', 'type') | |
3236 array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) | |
3237 idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_WorkflowOutputParameterLoader, 'id', 'type') | |
3238 uri_strtype_False_True_None = _URILoader(strtype, False, True, None) | |
3239 array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) | |
3240 union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) | |
3241 idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader(union_of_array_of_WorkflowStepLoader, 'id', 'None') | |
3242 union_of_None_type_or_ReportLoader = _UnionLoader((None_type, ReportLoader,)) | |
3243 union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader,)) | |
3244 array_of_union_of_GalaxyWorkflowLoader = _ArrayLoader(union_of_GalaxyWorkflowLoader) | |
3245 union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader, array_of_union_of_GalaxyWorkflowLoader,)) | |
3246 | |
3247 | |
3248 def load_document(doc, baseuri=None, loadingOptions=None): | |
3249 # type: (Any, Optional[str], Optional[LoadingOptions]) -> Any | |
3250 if baseuri is None: | |
3251 baseuri = file_uri(os.getcwd()) + "/" | |
3252 if loadingOptions is None: | |
3253 loadingOptions = LoadingOptions() | |
3254 return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, doc, baseuri, loadingOptions) | |
3255 | |
3256 | |
3257 def load_document_by_string(string, uri, loadingOptions=None): | |
3258 # type: (Any, str, Optional[LoadingOptions]) -> Any | |
3259 result = yaml.main.round_trip_load(string, preserve_quotes=True) | |
3260 add_lc_filename(result, uri) | |
3261 | |
3262 if loadingOptions is None: | |
3263 loadingOptions = LoadingOptions(fileuri=uri) | |
3264 loadingOptions.idx[uri] = result | |
3265 | |
3266 return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, result, uri, loadingOptions) |