view env/lib/python3.9/site-packages/gxformat2/schema/v19_09.py @ 0:4f3585e2f14b draft default tip

"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author shellac
date Mon, 22 Mar 2021 18:12:50 +0000
parents
children
line wrap: on
line source

#
# This file was autogenerated using schema-salad-tool --codegen=python
# The code itself is released under the Apache 2.0 license and the help text is
# subject to the license of the original schema.
#
# type: ignore
import copy
import os
import re
import uuid as _uuid__  # pylint: disable=unused-import # noqa: F401
from io import StringIO
from typing import (
    Any,
    Dict,
    List,
    MutableMapping,
    MutableSequence,
    Optional,
    Sequence,
    Tuple,
    Type,
    Union,
)
from urllib.parse import quote, urlsplit, urlunsplit
from urllib.request import pathname2url

from ruamel import yaml
from ruamel.yaml.comments import CommentedMap
from schema_salad.exceptions import SchemaSaladException, ValidationException
from schema_salad.fetcher import DefaultFetcher, Fetcher
from schema_salad.sourceline import SourceLine, add_lc_filename

_vocab = {}  # type: Dict[str, str]
_rvocab = {}  # type: Dict[str, str]


class Savable(object):
    @classmethod
    def fromDoc(cls, _doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Savable
        pass

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, str]
        pass


class LoadingOptions(object):
    def __init__(
        self,
        fetcher=None,  # type: Optional[Fetcher]
        namespaces=None,  # type: Optional[Dict[str, str]]
        schemas=None,  # type: Optional[Dict[str, str]]
        fileuri=None,  # type: Optional[str]
        copyfrom=None,  # type: Optional[LoadingOptions]
        original_doc=None,  # type: Optional[Any]
    ):  # type: (...) -> None
        self.idx = {}  # type: Dict[str, Dict[str, Any]]
        self.fileuri = fileuri  # type: Optional[str]
        self.namespaces = namespaces
        self.schemas = schemas
        self.original_doc = original_doc
        if copyfrom is not None:
            self.idx = copyfrom.idx
            if fetcher is None:
                fetcher = copyfrom.fetcher
            if fileuri is None:
                self.fileuri = copyfrom.fileuri
            if namespaces is None:
                self.namespaces = copyfrom.namespaces
            if schemas is None:
                self.schemas = copyfrom.schemas

        if fetcher is None:
            import requests
            from cachecontrol.wrapper import CacheControl
            from cachecontrol.caches import FileCache

            if "HOME" in os.environ:
                session = CacheControl(
                    requests.Session(),
                    cache=FileCache(
                        os.path.join(os.environ["HOME"], ".cache", "salad")
                    ),
                )
            elif "TMPDIR" in os.environ:
                session = CacheControl(
                    requests.Session(),
                    cache=FileCache(
                        os.path.join(os.environ["TMPDIR"], ".cache", "salad")
                    ),
                )
            else:
                session = CacheControl(
                    requests.Session(), cache=FileCache("/tmp", ".cache", "salad")
                )
            self.fetcher = DefaultFetcher({}, session)  # type: Fetcher
        else:
            self.fetcher = fetcher

        self.vocab = _vocab
        self.rvocab = _rvocab

        if namespaces is not None:
            self.vocab = self.vocab.copy()
            self.rvocab = self.rvocab.copy()
            for k, v in namespaces.items():
                self.vocab[k] = v
                self.rvocab[v] = k


def load_field(val, fieldtype, baseuri, loadingOptions):
    # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any
    if isinstance(val, MutableMapping):
        if "$import" in val:
            if loadingOptions.fileuri is None:
                raise SchemaSaladException("Cannot load $import without fileuri")
            return _document_load_by_url(
                fieldtype,
                loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]),
                loadingOptions,
            )
        elif "$include" in val:
            if loadingOptions.fileuri is None:
                raise SchemaSaladException("Cannot load $import without fileuri")
            val = loadingOptions.fetcher.fetch_text(
                loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"])
            )
    return fieldtype.load(val, baseuri, loadingOptions)


save_type = Union[Dict[str, str], List[Union[Dict[str, str], List[Any], None]], None]


def save(
    val,  # type: Optional[Union[Savable, MutableSequence[Savable]]]
    top=True,  # type: bool
    base_url="",  # type: str
    relative_uris=True,  # type: bool
):  # type: (...) -> save_type

    if isinstance(val, Savable):
        return val.save(top=top, base_url=base_url, relative_uris=relative_uris)
    if isinstance(val, MutableSequence):
        return [
            save(v, top=False, base_url=base_url, relative_uris=relative_uris)
            for v in val
        ]
    if isinstance(val, MutableMapping):
        newdict = {}
        for key in val:
            newdict[key] = save(
                val[key], top=False, base_url=base_url, relative_uris=relative_uris
            )
        return newdict
    return val


def expand_url(
    url,  # type: str
    base_url,  # type: str
    loadingOptions,  # type: LoadingOptions
    scoped_id=False,  # type: bool
    vocab_term=False,  # type: bool
    scoped_ref=None,  # type: Optional[int]
):
    # type: (...) -> str
    if url in ("@id", "@type"):
        return url

    if vocab_term and url in loadingOptions.vocab:
        return url

    if bool(loadingOptions.vocab) and ":" in url:
        prefix = url.split(":")[0]
        if prefix in loadingOptions.vocab:
            url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :]

    split = urlsplit(url)

    if (
        (bool(split.scheme) and split.scheme in ["http", "https", "file"])
        or url.startswith("$(")
        or url.startswith("${")
    ):
        pass
    elif scoped_id and not bool(split.fragment):
        splitbase = urlsplit(base_url)
        frg = ""
        if bool(splitbase.fragment):
            frg = splitbase.fragment + "/" + split.path
        else:
            frg = split.path
        pt = splitbase.path if splitbase.path != "" else "/"
        url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg))
    elif scoped_ref is not None and not bool(split.fragment):
        splitbase = urlsplit(base_url)
        sp = splitbase.fragment.split("/")
        n = scoped_ref
        while n > 0 and len(sp) > 0:
            sp.pop()
            n -= 1
        sp.append(url)
        url = urlunsplit(
            (
                splitbase.scheme,
                splitbase.netloc,
                splitbase.path,
                splitbase.query,
                "/".join(sp),
            )
        )
    else:
        url = loadingOptions.fetcher.urljoin(base_url, url)

    if vocab_term:
        split = urlsplit(url)
        if bool(split.scheme):
            if url in loadingOptions.rvocab:
                return loadingOptions.rvocab[url]
        else:
            raise ValidationException("Term '{}' not in vocabulary".format(url))

    return url


class _Loader(object):
    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        pass


class _AnyLoader(_Loader):
    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if doc is not None:
            return doc
        raise ValidationException("Expected non-null")


class _PrimitiveLoader(_Loader):
    def __init__(self, tp):
        # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None
        self.tp = tp

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if not isinstance(doc, self.tp):
            raise ValidationException(
                "Expected a {} but got {}".format(
                    self.tp.__class__.__name__, doc.__class__.__name__
                )
            )
        return doc

    def __repr__(self):  # type: () -> str
        return str(self.tp)


class _ArrayLoader(_Loader):
    def __init__(self, items):
        # type: (_Loader) -> None
        self.items = items

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if not isinstance(doc, MutableSequence):
            raise ValidationException("Expected a list")
        r = []  # type: List[Any]
        errors = []  # type: List[SchemaSaladException]
        for i in range(0, len(doc)):
            try:
                lf = load_field(
                    doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions
                )
                if isinstance(lf, MutableSequence):
                    r.extend(lf)
                else:
                    r.append(lf)
            except ValidationException as e:
                errors.append(e.with_sourceline(SourceLine(doc, i, str)))
        if errors:
            raise ValidationException("", None, errors)
        return r

    def __repr__(self):  # type: () -> str
        return "array<{}>".format(self.items)


class _EnumLoader(_Loader):
    def __init__(self, symbols):
        # type: (Sequence[str]) -> None
        self.symbols = symbols

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if doc in self.symbols:
            return doc
        else:
            raise ValidationException("Expected one of {}".format(self.symbols))


class _RecordLoader(_Loader):
    def __init__(self, classtype):
        # type: (Type[Savable]) -> None
        self.classtype = classtype

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if not isinstance(doc, MutableMapping):
            raise ValidationException("Expected a dict")
        return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot)

    def __repr__(self):  # type: () -> str
        return str(self.classtype)


class _UnionLoader(_Loader):
    def __init__(self, alternates):
        # type: (Sequence[_Loader]) -> None
        self.alternates = alternates

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        errors = []
        for t in self.alternates:
            try:
                return t.load(doc, baseuri, loadingOptions, docRoot=docRoot)
            except ValidationException as e:
                errors.append(
                    ValidationException(
                        "tried {} but".format(t.__class__.__name__), None, [e]
                    )
                )
        raise ValidationException("", None, errors, "-")

    def __repr__(self):  # type: () -> str
        return " | ".join(str(a) for a in self.alternates)


class _URILoader(_Loader):
    def __init__(self, inner, scoped_id, vocab_term, scoped_ref):
        # type: (_Loader, bool, bool, Union[int, None]) -> None
        self.inner = inner
        self.scoped_id = scoped_id
        self.vocab_term = vocab_term
        self.scoped_ref = scoped_ref

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if isinstance(doc, MutableSequence):
            doc = [
                expand_url(
                    i,
                    baseuri,
                    loadingOptions,
                    self.scoped_id,
                    self.vocab_term,
                    self.scoped_ref,
                )
                for i in doc
            ]
        if isinstance(doc, str):
            doc = expand_url(
                doc,
                baseuri,
                loadingOptions,
                self.scoped_id,
                self.vocab_term,
                self.scoped_ref,
            )
        return self.inner.load(doc, baseuri, loadingOptions)


class _TypeDSLLoader(_Loader):
    typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$")

    def __init__(self, inner, refScope):
        # type: (_Loader, Union[int, None]) -> None
        self.inner = inner
        self.refScope = refScope

    def resolve(
        self,
        doc,  # type: str
        baseuri,  # type: str
        loadingOptions,  # type: LoadingOptions
    ):
        # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str]
        m = self.typeDSLregex.match(doc)
        if m:
            group1 = m.group(1)
            assert group1 is not None
            first = expand_url(
                group1, baseuri, loadingOptions, False, True, self.refScope
            )
            second = third = None
            if bool(m.group(2)):
                second = {"type": "array", "items": first}
                # second = CommentedMap((("type", "array"),
                #                       ("items", first)))
                # second.lc.add_kv_line_col("type", lc)
                # second.lc.add_kv_line_col("items", lc)
                # second.lc.filename = filename
            if bool(m.group(3)):
                third = ["null", second or first]
                # third = CommentedSeq(["null", second or first])
                # third.lc.add_kv_line_col(0, lc)
                # third.lc.add_kv_line_col(1, lc)
                # third.lc.filename = filename
            return third or second or first
        return doc

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if isinstance(doc, MutableSequence):
            r = []  # type: List[Any]
            for d in doc:
                if isinstance(d, str):
                    resolved = self.resolve(d, baseuri, loadingOptions)
                    if isinstance(resolved, MutableSequence):
                        for i in resolved:
                            if i not in r:
                                r.append(i)
                    else:
                        if resolved not in r:
                            r.append(resolved)
                else:
                    r.append(d)
            doc = r
        elif isinstance(doc, str):
            doc = self.resolve(doc, baseuri, loadingOptions)

        return self.inner.load(doc, baseuri, loadingOptions)


class _IdMapLoader(_Loader):
    def __init__(self, inner, mapSubject, mapPredicate):
        # type: (_Loader, str, Union[str, None]) -> None
        self.inner = inner
        self.mapSubject = mapSubject
        self.mapPredicate = mapPredicate

    def load(self, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Any
        if isinstance(doc, MutableMapping):
            r = []  # type: List[Any]
            for k in sorted(doc.keys()):
                val = doc[k]
                if isinstance(val, CommentedMap):
                    v = copy.copy(val)
                    v.lc.data = val.lc.data
                    v.lc.filename = val.lc.filename
                    v[self.mapSubject] = k
                    r.append(v)
                elif isinstance(val, MutableMapping):
                    v2 = copy.copy(val)
                    v2[self.mapSubject] = k
                    r.append(v2)
                else:
                    if self.mapPredicate:
                        v3 = {self.mapPredicate: val}
                        v3[self.mapSubject] = k
                        r.append(v3)
                    else:
                        raise ValidationException("No mapPredicate")
            doc = r
        return self.inner.load(doc, baseuri, loadingOptions)


def _document_load(loader, doc, baseuri, loadingOptions):
    # type: (_Loader, Any, str, LoadingOptions) -> Any
    if isinstance(doc, str):
        return _document_load_by_url(
            loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions
        )

    if isinstance(doc, MutableMapping):
        if "$namespaces" in doc or "$schemas" in doc:
            loadingOptions = LoadingOptions(
                copyfrom=loadingOptions,
                namespaces=doc.get("$namespaces", None),
                schemas=doc.get("$schemas", None),
            )
            doc = {k: v for k, v in doc.items() if k not in ["$namespaces", "$schemas"]}

        if "$base" in doc:
            baseuri = doc["$base"]

        if "$graph" in doc:
            return loader.load(doc["$graph"], baseuri, loadingOptions)
        else:
            return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri)

    if isinstance(doc, MutableSequence):
        return loader.load(doc, baseuri, loadingOptions)

    raise ValidationException("Oops, we shouldn't be here!")


def _document_load_by_url(loader, url, loadingOptions):
    # type: (_Loader, str, LoadingOptions) -> Any
    if url in loadingOptions.idx:
        return _document_load(loader, loadingOptions.idx[url], url, loadingOptions)

    text = loadingOptions.fetcher.fetch_text(url)
    if isinstance(text, bytes):
        textIO = StringIO(text.decode("utf-8"))
    else:
        textIO = StringIO(text)
    textIO.name = str(url)
    result = yaml.main.round_trip_load(textIO, preserve_quotes=True)
    add_lc_filename(result, url)

    loadingOptions.idx[url] = result

    loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url)

    return _document_load(loader, result, url, loadingOptions)


def file_uri(path, split_frag=False):  # type: (str, bool) -> str
    if path.startswith("file://"):
        return path
    if split_frag:
        pathsp = path.split("#", 2)
        frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else ""
        urlpath = pathname2url(str(pathsp[0]))
    else:
        urlpath = pathname2url(path)
        frag = ""
    if urlpath.startswith("//"):
        return "file:{}{}".format(urlpath, frag)
    else:
        return "file://{}{}".format(urlpath, frag)


def prefix_url(url, namespaces):  # type: (str, Dict[str, str]) -> str
    for k, v in namespaces.items():
        if url.startswith(v):
            return k + ":" + url[len(v) :]
    return url


def save_relative_uri(uri, base_url, scoped_id, ref_scope, relative_uris):
    # type: (str, str, bool, Optional[int], bool) -> Union[str, List[str]]
    if not relative_uris or uri == base_url:
        return uri
    if isinstance(uri, MutableSequence):
        return [
            save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris)
            for u in uri
        ]
    elif isinstance(uri, str):
        urisplit = urlsplit(uri)
        basesplit = urlsplit(base_url)
        if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc:
            if urisplit.path != basesplit.path:
                p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path))
                if urisplit.fragment:
                    p = p + "#" + urisplit.fragment
                return p

            basefrag = basesplit.fragment + "/"
            if ref_scope:
                sp = basefrag.split("/")
                i = 0
                while i < ref_scope:
                    sp.pop()
                    i += 1
                basefrag = "/".join(sp)

            if urisplit.fragment.startswith(basefrag):
                return urisplit.fragment[len(basefrag) :]
            else:
                return urisplit.fragment
        return uri
    else:
        return save(uri, top=False, base_url=base_url)


class Documented(Savable):
    pass


class RecordField(Documented):
    """
A field of a record.
    """
    def __init__(
        self,
        name,  # type: Any
        type,  # type: Any
        doc=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.doc = doc
        self.name = name
        self.type = type

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> RecordField

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'name' in _doc:
            try:
                name = load_field(_doc.get(
                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `name` field is not valid because:",
                        SourceLine(_doc, 'name', str),
                        [e]
                    )
                )
        else:
            name = None

        if name is None:
            if docRoot is not None:
                name = docRoot
            else:
                raise ValidationException("Missing name")
        baseuri = name
        if 'doc' in _doc:
            try:
                doc = load_field(_doc.get(
                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `doc` field is not valid because:",
                        SourceLine(_doc, 'doc', str),
                        [e]
                    )
                )
        else:
            doc = None
        try:
            type = load_field(_doc.get(
                'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `type` field is not valid because:",
                    SourceLine(_doc, 'type', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `doc`, `name`, `type`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'RecordField'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(doc=doc, name=name, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.name is not None:
            u = save_relative_uri(
                self.name,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['name'] = u

        if self.doc is not None:
            r['doc'] = save(
                self.doc,
                top=False,
                base_url=self.name,
                relative_uris=relative_uris)

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=self.name,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['doc', 'name', 'type'])


class RecordSchema(Savable):
    def __init__(
        self,
        type,  # type: Any
        fields=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.fields = fields
        self.type = type

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> RecordSchema

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'fields' in _doc:
            try:
                fields = load_field(_doc.get(
                    'fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `fields` field is not valid because:",
                        SourceLine(_doc, 'fields', str),
                        [e]
                    )
                )
        else:
            fields = None
        try:
            type = load_field(_doc.get(
                'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `type` field is not valid because:",
                    SourceLine(_doc, 'type', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `fields`, `type`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'RecordSchema'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(fields=fields, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.fields is not None:
            r['fields'] = save(
                self.fields,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['fields', 'type'])


class EnumSchema(Savable):
    """
Define an enumerated type.

    """
    def __init__(
        self,
        symbols,  # type: Any
        type,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.symbols = symbols
        self.type = type

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> EnumSchema

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        try:
            symbols = load_field(_doc.get(
                'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `symbols` field is not valid because:",
                    SourceLine(_doc, 'symbols', str),
                    [e]
                )
            )
        try:
            type = load_field(_doc.get(
                'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `type` field is not valid because:",
                    SourceLine(_doc, 'type', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `symbols`, `type`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'EnumSchema'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(symbols=symbols, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.symbols is not None:
            u = save_relative_uri(
                self.symbols,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['symbols'] = u

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['symbols', 'type'])


class ArraySchema(Savable):
    def __init__(
        self,
        items,  # type: Any
        type,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.items = items
        self.type = type

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> ArraySchema

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        try:
            items = load_field(_doc.get(
                'items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `items` field is not valid because:",
                    SourceLine(_doc, 'items', str),
                    [e]
                )
            )
        try:
            type = load_field(_doc.get(
                'type'), typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `type` field is not valid because:",
                    SourceLine(_doc, 'type', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `items`, `type`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'ArraySchema'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(items=items, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.items is not None:
            u = save_relative_uri(
                self.items,
                base_url,
                False,
                2,
                relative_uris)
            if u:
                r['items'] = u

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['items', 'type'])


class Labeled(Savable):
    pass


class Identified(Savable):
    pass


class Parameter(Documented, Identified):
    """
Define an input or output parameter to a process.

    """
    pass


class InputParameter(Parameter):
    pass


class OutputParameter(Parameter):
    pass


class Process(Identified, Labeled, Documented):
    """

The base executable type in CWL is the `Process` object defined by the
document.  Note that the `Process` object is abstract and cannot be
directly executed.

    """
    pass


class HasUUID(Savable):
    pass


class HasStepErrors(Savable):
    pass


class HasStepPosition(Savable):
    pass


class StepPosition(Savable):
    """
This field specifies the location of the step's node when rendered in the workflow editor.
    """
    def __init__(
        self,
        top,  # type: Any
        left,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.top = top
        self.left = left

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> StepPosition

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        try:
            top = load_field(_doc.get(
                'top'), union_of_floattype_or_inttype, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `top` field is not valid because:",
                    SourceLine(_doc, 'top', str),
                    [e]
                )
            )
        try:
            left = load_field(_doc.get(
                'left'), union_of_floattype_or_inttype, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `left` field is not valid because:",
                    SourceLine(_doc, 'left', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `top`, `left`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'StepPosition'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(top=top, left=left, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.top is not None:
            r['top'] = save(
                self.top,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        if self.left is not None:
            r['left'] = save(
                self.left,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['top', 'left'])


class ReferencesTool(Savable):
    pass


class ToolShedRepository(Savable):
    def __init__(
        self,
        changeset_revision,  # type: Any
        name,  # type: Any
        owner,  # type: Any
        tool_shed,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.changeset_revision = changeset_revision
        self.name = name
        self.owner = owner
        self.tool_shed = tool_shed

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> ToolShedRepository

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'name' in _doc:
            try:
                name = load_field(_doc.get(
                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `name` field is not valid because:",
                        SourceLine(_doc, 'name', str),
                        [e]
                    )
                )
        else:
            name = None

        if name is None:
            if docRoot is not None:
                name = docRoot
            else:
                raise ValidationException("Missing name")
        baseuri = name
        try:
            changeset_revision = load_field(_doc.get(
                'changeset_revision'), strtype, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `changeset_revision` field is not valid because:",
                    SourceLine(_doc, 'changeset_revision', str),
                    [e]
                )
            )
        try:
            owner = load_field(_doc.get(
                'owner'), strtype, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `owner` field is not valid because:",
                    SourceLine(_doc, 'owner', str),
                    [e]
                )
            )
        try:
            tool_shed = load_field(_doc.get(
                'tool_shed'), strtype, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `tool_shed` field is not valid because:",
                    SourceLine(_doc, 'tool_shed', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `changeset_revision`, `name`, `owner`, `tool_shed`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'ToolShedRepository'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(changeset_revision=changeset_revision, name=name, owner=owner, tool_shed=tool_shed, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.name is not None:
            u = save_relative_uri(
                self.name,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['name'] = u

        if self.changeset_revision is not None:
            r['changeset_revision'] = save(
                self.changeset_revision,
                top=False,
                base_url=self.name,
                relative_uris=relative_uris)

        if self.owner is not None:
            r['owner'] = save(
                self.owner,
                top=False,
                base_url=self.name,
                relative_uris=relative_uris)

        if self.tool_shed is not None:
            r['tool_shed'] = save(
                self.tool_shed,
                top=False,
                base_url=self.name,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['changeset_revision', 'name', 'owner', 'tool_shed'])


class WorkflowInputParameter(InputParameter, HasStepPosition):
    def __init__(
        self,
        type,  # type: Any
        doc=None,  # type: Any
        id=None,  # type: Any
        default=None,  # type: Any
        position=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.doc = doc
        self.id = id
        self.default = default
        self.position = position
        self.type = type

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowInputParameter

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'id' in _doc:
            try:
                id = load_field(_doc.get(
                    'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `id` field is not valid because:",
                        SourceLine(_doc, 'id', str),
                        [e]
                    )
                )
        else:
            id = None

        if id is None:
            if docRoot is not None:
                id = docRoot
            else:
                id = "_:" + str(_uuid__.uuid4())
        baseuri = id
        if 'doc' in _doc:
            try:
                doc = load_field(_doc.get(
                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `doc` field is not valid because:",
                        SourceLine(_doc, 'doc', str),
                        [e]
                    )
                )
        else:
            doc = None
        if 'default' in _doc:
            try:
                default = load_field(_doc.get(
                    'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `default` field is not valid because:",
                        SourceLine(_doc, 'default', str),
                        [e]
                    )
                )
        else:
            default = None
        if 'position' in _doc:
            try:
                position = load_field(_doc.get(
                    'position'), union_of_None_type_or_StepPositionLoader, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `position` field is not valid because:",
                        SourceLine(_doc, 'position', str),
                        [e]
                    )
                )
        else:
            position = None
        if 'type' in _doc:
            try:
                type = load_field(_doc.get(
                    'type'), typedsl_union_of_GalaxyTypeLoader_or_strtype_or_None_type_2, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `type` field is not valid because:",
                        SourceLine(_doc, 'type', str),
                        [e]
                    )
                )
        else:
            type = None

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `doc`, `id`, `default`, `position`, `type`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'WorkflowInputParameter'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(doc=doc, id=id, default=default, position=position, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.id is not None:
            u = save_relative_uri(
                self.id,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['id'] = u

        if self.doc is not None:
            r['doc'] = save(
                self.doc,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.default is not None:
            r['default'] = save(
                self.default,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.position is not None:
            r['position'] = save(
                self.position,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['doc', 'id', 'default', 'position', 'type'])


class WorkflowOutputParameter(OutputParameter):
    """
Describe an output parameter of a workflow.  The parameter must be
connected to one parameter defined in the workflow that
will provide the value of the output parameter. It is legal to
connect a WorkflowInputParameter to a WorkflowOutputParameter.

    """
    def __init__(
        self,
        doc=None,  # type: Any
        id=None,  # type: Any
        outputSource=None,  # type: Any
        type=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.doc = doc
        self.id = id
        self.outputSource = outputSource
        self.type = type

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowOutputParameter

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'id' in _doc:
            try:
                id = load_field(_doc.get(
                    'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `id` field is not valid because:",
                        SourceLine(_doc, 'id', str),
                        [e]
                    )
                )
        else:
            id = None

        if id is None:
            if docRoot is not None:
                id = docRoot
            else:
                id = "_:" + str(_uuid__.uuid4())
        baseuri = id
        if 'doc' in _doc:
            try:
                doc = load_field(_doc.get(
                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `doc` field is not valid because:",
                        SourceLine(_doc, 'doc', str),
                        [e]
                    )
                )
        else:
            doc = None
        if 'outputSource' in _doc:
            try:
                outputSource = load_field(_doc.get(
                    'outputSource'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `outputSource` field is not valid because:",
                        SourceLine(_doc, 'outputSource', str),
                        [e]
                    )
                )
        else:
            outputSource = None
        if 'type' in _doc:
            try:
                type = load_field(_doc.get(
                    'type'), typedsl_union_of_None_type_or_GalaxyTypeLoader_2, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `type` field is not valid because:",
                        SourceLine(_doc, 'type', str),
                        [e]
                    )
                )
        else:
            type = None

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `doc`, `id`, `outputSource`, `type`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(doc=doc, id=id, outputSource=outputSource, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.id is not None:
            u = save_relative_uri(
                self.id,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['id'] = u

        if self.doc is not None:
            r['doc'] = save(
                self.doc,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.outputSource is not None:
            r['outputSource'] = save(
                self.outputSource,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['doc', 'id', 'outputSource', 'type'])


class WorkflowStep(Identified, Labeled, Documented, HasStepPosition, ReferencesTool, HasStepErrors, HasUUID):
    """
This represents a non-input step a Galaxy Workflow.

# A note about `state` and `tool_state` fields.

Only one or the other should be specified. These are two ways to represent the "state"
of a tool at this workflow step. Both are essentially maps from parameter names to
parameter values.

`tool_state` is much more low-level and expects a flat dictionary with each value a JSON
dump. Nested tool structures such as conditionals and repeats should have all their values
in the JSON dumped string. In general `tool_state` may be present in workflows exported from
Galaxy but shouldn't be written by humans.

`state` can contained a typed map. Repeat values can be represented as YAML arrays. An alternative
to representing `state` this way is defining inputs with default values.

    """
    def __init__(
        self,
        out,  # type: Any
        id=None,  # type: Any
        label=None,  # type: Any
        doc=None,  # type: Any
        position=None,  # type: Any
        tool_id=None,  # type: Any
        tool_shed_repository=None,  # type: Any
        tool_version=None,  # type: Any
        errors=None,  # type: Any
        uuid=None,  # type: Any
        in_=None,  # type: Any
        state=None,  # type: Any
        tool_state=None,  # type: Any
        type=None,  # type: Any
        run=None,  # type: Any
        runtime_inputs=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.id = id
        self.label = label
        self.doc = doc
        self.position = position
        self.tool_id = tool_id
        self.tool_shed_repository = tool_shed_repository
        self.tool_version = tool_version
        self.errors = errors
        self.uuid = uuid
        self.in_ = in_
        self.out = out
        self.state = state
        self.tool_state = tool_state
        self.type = type
        self.run = run
        self.runtime_inputs = runtime_inputs

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowStep

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'id' in _doc:
            try:
                id = load_field(_doc.get(
                    'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `id` field is not valid because:",
                        SourceLine(_doc, 'id', str),
                        [e]
                    )
                )
        else:
            id = None

        if id is None:
            if docRoot is not None:
                id = docRoot
            else:
                id = "_:" + str(_uuid__.uuid4())
        baseuri = id
        if 'label' in _doc:
            try:
                label = load_field(_doc.get(
                    'label'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `label` field is not valid because:",
                        SourceLine(_doc, 'label', str),
                        [e]
                    )
                )
        else:
            label = None
        if 'doc' in _doc:
            try:
                doc = load_field(_doc.get(
                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `doc` field is not valid because:",
                        SourceLine(_doc, 'doc', str),
                        [e]
                    )
                )
        else:
            doc = None
        if 'position' in _doc:
            try:
                position = load_field(_doc.get(
                    'position'), union_of_None_type_or_StepPositionLoader, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `position` field is not valid because:",
                        SourceLine(_doc, 'position', str),
                        [e]
                    )
                )
        else:
            position = None
        if 'tool_id' in _doc:
            try:
                tool_id = load_field(_doc.get(
                    'tool_id'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `tool_id` field is not valid because:",
                        SourceLine(_doc, 'tool_id', str),
                        [e]
                    )
                )
        else:
            tool_id = None
        if 'tool_shed_repository' in _doc:
            try:
                tool_shed_repository = load_field(_doc.get(
                    'tool_shed_repository'), union_of_None_type_or_ToolShedRepositoryLoader, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `tool_shed_repository` field is not valid because:",
                        SourceLine(_doc, 'tool_shed_repository', str),
                        [e]
                    )
                )
        else:
            tool_shed_repository = None
        if 'tool_version' in _doc:
            try:
                tool_version = load_field(_doc.get(
                    'tool_version'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `tool_version` field is not valid because:",
                        SourceLine(_doc, 'tool_version', str),
                        [e]
                    )
                )
        else:
            tool_version = None
        if 'errors' in _doc:
            try:
                errors = load_field(_doc.get(
                    'errors'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `errors` field is not valid because:",
                        SourceLine(_doc, 'errors', str),
                        [e]
                    )
                )
        else:
            errors = None
        if 'uuid' in _doc:
            try:
                uuid = load_field(_doc.get(
                    'uuid'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `uuid` field is not valid because:",
                        SourceLine(_doc, 'uuid', str),
                        [e]
                    )
                )
        else:
            uuid = None
        if 'in' in _doc:
            try:
                in_ = load_field(_doc.get(
                    'in'), idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `in` field is not valid because:",
                        SourceLine(_doc, 'in', str),
                        [e]
                    )
                )
        else:
            in_ = None
        if 'out' in _doc:
            try:
                out = load_field(_doc.get(
                    'out'), idmap_out_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `out` field is not valid because:",
                        SourceLine(_doc, 'out', str),
                        [e]
                    )
                )
        else:
            out = None
        if 'state' in _doc:
            try:
                state = load_field(_doc.get(
                    'state'), union_of_None_type_or_Any_type, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `state` field is not valid because:",
                        SourceLine(_doc, 'state', str),
                        [e]
                    )
                )
        else:
            state = None
        if 'tool_state' in _doc:
            try:
                tool_state = load_field(_doc.get(
                    'tool_state'), union_of_None_type_or_Any_type, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `tool_state` field is not valid because:",
                        SourceLine(_doc, 'tool_state', str),
                        [e]
                    )
                )
        else:
            tool_state = None
        if 'type' in _doc:
            try:
                type = load_field(_doc.get(
                    'type'), typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `type` field is not valid because:",
                        SourceLine(_doc, 'type', str),
                        [e]
                    )
                )
        else:
            type = None
        if 'run' in _doc:
            try:
                run = load_field(_doc.get(
                    'run'), uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `run` field is not valid because:",
                        SourceLine(_doc, 'run', str),
                        [e]
                    )
                )
        else:
            run = None
        if 'runtime_inputs' in _doc:
            try:
                runtime_inputs = load_field(_doc.get(
                    'runtime_inputs'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `runtime_inputs` field is not valid because:",
                        SourceLine(_doc, 'runtime_inputs', str),
                        [e]
                    )
                )
        else:
            runtime_inputs = None

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `id`, `label`, `doc`, `position`, `tool_id`, `tool_shed_repository`, `tool_version`, `errors`, `uuid`, `in`, `out`, `state`, `tool_state`, `type`, `run`, `runtime_inputs`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'WorkflowStep'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(id=id, label=label, doc=doc, position=position, tool_id=tool_id, tool_shed_repository=tool_shed_repository, tool_version=tool_version, errors=errors, uuid=uuid, in_=in_, out=out, state=state, tool_state=tool_state, type=type, run=run, runtime_inputs=runtime_inputs, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.id is not None:
            u = save_relative_uri(
                self.id,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['id'] = u

        if self.label is not None:
            r['label'] = save(
                self.label,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.doc is not None:
            r['doc'] = save(
                self.doc,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.position is not None:
            r['position'] = save(
                self.position,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.tool_id is not None:
            r['tool_id'] = save(
                self.tool_id,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.tool_shed_repository is not None:
            r['tool_shed_repository'] = save(
                self.tool_shed_repository,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.tool_version is not None:
            r['tool_version'] = save(
                self.tool_version,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.errors is not None:
            r['errors'] = save(
                self.errors,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.uuid is not None:
            r['uuid'] = save(
                self.uuid,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.in_ is not None:
            r['in'] = save(
                self.in_,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.out is not None:
            r['out'] = save(
                self.out,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.state is not None:
            r['state'] = save(
                self.state,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.tool_state is not None:
            r['tool_state'] = save(
                self.tool_state,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.type is not None:
            r['type'] = save(
                self.type,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.run is not None:
            u = save_relative_uri(
                self.run,
                self.id,
                False,
                None,
                relative_uris)
            if u:
                r['run'] = u

        if self.runtime_inputs is not None:
            r['runtime_inputs'] = save(
                self.runtime_inputs,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['id', 'label', 'doc', 'position', 'tool_id', 'tool_shed_repository', 'tool_version', 'errors', 'uuid', 'in', 'out', 'state', 'tool_state', 'type', 'run', 'runtime_inputs'])


class Sink(Savable):
    pass


class WorkflowStepInput(Identified, Sink, Labeled):
    """
TODO:

    """
    def __init__(
        self,
        id=None,  # type: Any
        source=None,  # type: Any
        label=None,  # type: Any
        default=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.id = id
        self.source = source
        self.label = label
        self.default = default

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowStepInput

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'id' in _doc:
            try:
                id = load_field(_doc.get(
                    'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `id` field is not valid because:",
                        SourceLine(_doc, 'id', str),
                        [e]
                    )
                )
        else:
            id = None

        if id is None:
            if docRoot is not None:
                id = docRoot
            else:
                id = "_:" + str(_uuid__.uuid4())
        baseuri = id
        if 'source' in _doc:
            try:
                source = load_field(_doc.get(
                    'source'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `source` field is not valid because:",
                        SourceLine(_doc, 'source', str),
                        [e]
                    )
                )
        else:
            source = None
        if 'label' in _doc:
            try:
                label = load_field(_doc.get(
                    'label'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `label` field is not valid because:",
                        SourceLine(_doc, 'label', str),
                        [e]
                    )
                )
        else:
            label = None
        if 'default' in _doc:
            try:
                default = load_field(_doc.get(
                    'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `default` field is not valid because:",
                        SourceLine(_doc, 'default', str),
                        [e]
                    )
                )
        else:
            default = None

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `id`, `source`, `label`, `default`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(id=id, source=source, label=label, default=default, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.id is not None:
            u = save_relative_uri(
                self.id,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['id'] = u

        if self.source is not None:
            u = save_relative_uri(
                self.source,
                self.id,
                False,
                2,
                relative_uris)
            if u:
                r['source'] = u

        if self.label is not None:
            r['label'] = save(
                self.label,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.default is not None:
            r['default'] = save(
                self.default,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['id', 'source', 'label', 'default'])


class Report(Savable):
    """
Definition of an invocation report for this workflow. Currently the only
field is 'markdown'.

    """
    def __init__(
        self,
        markdown,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.markdown = markdown

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> Report

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        try:
            markdown = load_field(_doc.get(
                'markdown'), strtype, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `markdown` field is not valid because:",
                    SourceLine(_doc, 'markdown', str),
                    [e]
                )
            )

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `markdown`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'Report'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(markdown=markdown, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.markdown is not None:
            r['markdown'] = save(
                self.markdown,
                top=False,
                base_url=base_url,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['markdown'])


class WorkflowStepOutput(Identified):
    """
Associate an output parameter of the underlying process with a workflow
parameter.  The workflow parameter (given in the `id` field) be may be used
as a `source` to connect with input parameters of other workflow steps, or
with an output parameter of the process.

A unique identifier for this workflow output parameter.  This is
the identifier to use in the `source` field of `WorkflowStepInput`
to connect the output value to downstream parameters.

    """
    def __init__(
        self,
        id=None,  # type: Any
        add_tags=None,  # type: Any
        change_datatype=None,  # type: Any
        delete_intermediate_datasets=None,  # type: Any
        hide=None,  # type: Any
        remove_tags=None,  # type: Any
        rename=None,  # type: Any
        set_columns=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.id = id
        self.add_tags = add_tags
        self.change_datatype = change_datatype
        self.delete_intermediate_datasets = delete_intermediate_datasets
        self.hide = hide
        self.remove_tags = remove_tags
        self.rename = rename
        self.set_columns = set_columns

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> WorkflowStepOutput

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []
        if 'id' in _doc:
            try:
                id = load_field(_doc.get(
                    'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `id` field is not valid because:",
                        SourceLine(_doc, 'id', str),
                        [e]
                    )
                )
        else:
            id = None

        if id is None:
            if docRoot is not None:
                id = docRoot
            else:
                id = "_:" + str(_uuid__.uuid4())
        baseuri = id
        if 'add_tags' in _doc:
            try:
                add_tags = load_field(_doc.get(
                    'add_tags'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `add_tags` field is not valid because:",
                        SourceLine(_doc, 'add_tags', str),
                        [e]
                    )
                )
        else:
            add_tags = None
        if 'change_datatype' in _doc:
            try:
                change_datatype = load_field(_doc.get(
                    'change_datatype'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `change_datatype` field is not valid because:",
                        SourceLine(_doc, 'change_datatype', str),
                        [e]
                    )
                )
        else:
            change_datatype = None
        if 'delete_intermediate_datasets' in _doc:
            try:
                delete_intermediate_datasets = load_field(_doc.get(
                    'delete_intermediate_datasets'), union_of_None_type_or_booltype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `delete_intermediate_datasets` field is not valid because:",
                        SourceLine(_doc, 'delete_intermediate_datasets', str),
                        [e]
                    )
                )
        else:
            delete_intermediate_datasets = None
        if 'hide' in _doc:
            try:
                hide = load_field(_doc.get(
                    'hide'), union_of_None_type_or_booltype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `hide` field is not valid because:",
                        SourceLine(_doc, 'hide', str),
                        [e]
                    )
                )
        else:
            hide = None
        if 'remove_tags' in _doc:
            try:
                remove_tags = load_field(_doc.get(
                    'remove_tags'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `remove_tags` field is not valid because:",
                        SourceLine(_doc, 'remove_tags', str),
                        [e]
                    )
                )
        else:
            remove_tags = None
        if 'rename' in _doc:
            try:
                rename = load_field(_doc.get(
                    'rename'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `rename` field is not valid because:",
                        SourceLine(_doc, 'rename', str),
                        [e]
                    )
                )
        else:
            rename = None
        if 'set_columns' in _doc:
            try:
                set_columns = load_field(_doc.get(
                    'set_columns'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `set_columns` field is not valid because:",
                        SourceLine(_doc, 'set_columns', str),
                        [e]
                    )
                )
        else:
            set_columns = None

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `id`, `add_tags`, `change_datatype`, `delete_intermediate_datasets`, `hide`, `remove_tags`, `rename`, `set_columns`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(id=id, add_tags=add_tags, change_datatype=change_datatype, delete_intermediate_datasets=delete_intermediate_datasets, hide=hide, remove_tags=remove_tags, rename=rename, set_columns=set_columns, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        if self.id is not None:
            u = save_relative_uri(
                self.id,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['id'] = u

        if self.add_tags is not None:
            r['add_tags'] = save(
                self.add_tags,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.change_datatype is not None:
            r['change_datatype'] = save(
                self.change_datatype,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.delete_intermediate_datasets is not None:
            r['delete_intermediate_datasets'] = save(
                self.delete_intermediate_datasets,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.hide is not None:
            r['hide'] = save(
                self.hide,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.remove_tags is not None:
            r['remove_tags'] = save(
                self.remove_tags,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.rename is not None:
            r['rename'] = save(
                self.rename,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.set_columns is not None:
            r['set_columns'] = save(
                self.set_columns,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['id', 'add_tags', 'change_datatype', 'delete_intermediate_datasets', 'hide', 'remove_tags', 'rename', 'set_columns'])


class GalaxyWorkflow(Process, HasUUID):
    """
A Galaxy workflow description. This record corresponds to the description of a workflow that should be executable
on a Galaxy server that includes the contained tool definitions.

The workflows API or the user interface of Galaxy instances that are of version 19.09 or newer should be able to
import a document defining this record.

## A note about `label` field.

This is the name of the workflow in the Galaxy user interface. This is the mechanism that
users will primarily identify the workflow using. Legacy support - this may also be called 'name' and Galaxy will
consume the workflow document fine and treat this attribute correctly - however in order to validate against this
workflow definition schema the attribute should be called `label`.

    """
    def __init__(
        self,
        inputs,  # type: Any
        outputs,  # type: Any
        steps,  # type: Any
        id=None,  # type: Any
        label=None,  # type: Any
        doc=None,  # type: Any
        uuid=None,  # type: Any
        report=None,  # type: Any
        extension_fields=None,  # type: Optional[Dict[str, Any]]
        loadingOptions=None  # type: Optional[LoadingOptions]
    ):  # type: (...) -> None

        if extension_fields:
            self.extension_fields = extension_fields
        else:
            self.extension_fields = yaml.comments.CommentedMap()
        if loadingOptions:
            self.loadingOptions = loadingOptions
        else:
            self.loadingOptions = LoadingOptions()
        self.id = id
        self.label = label
        self.doc = doc
        self.inputs = inputs
        self.outputs = outputs
        self.uuid = uuid
        self.class_ = "GalaxyWorkflow"
        self.steps = steps
        self.report = report

    @classmethod
    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
        # type: (Any, str, LoadingOptions, Optional[str]) -> GalaxyWorkflow

        _doc = copy.copy(doc)
        if hasattr(doc, 'lc'):
            _doc.lc.data = doc.lc.data
            _doc.lc.filename = doc.lc.filename
        _errors__ = []

        if _doc.get('class') != 'GalaxyWorkflow':
            raise ValidationException("Not a GalaxyWorkflow")

        if 'id' in _doc:
            try:
                id = load_field(_doc.get(
                    'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `id` field is not valid because:",
                        SourceLine(_doc, 'id', str),
                        [e]
                    )
                )
        else:
            id = None

        if id is None:
            if docRoot is not None:
                id = docRoot
            else:
                id = "_:" + str(_uuid__.uuid4())
        baseuri = id
        if 'label' in _doc:
            try:
                label = load_field(_doc.get(
                    'label'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `label` field is not valid because:",
                        SourceLine(_doc, 'label', str),
                        [e]
                    )
                )
        else:
            label = None
        if 'doc' in _doc:
            try:
                doc = load_field(_doc.get(
                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `doc` field is not valid because:",
                        SourceLine(_doc, 'doc', str),
                        [e]
                    )
                )
        else:
            doc = None
        try:
            inputs = load_field(_doc.get(
                'inputs'), idmap_inputs_array_of_WorkflowInputParameterLoader, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `inputs` field is not valid because:",
                    SourceLine(_doc, 'inputs', str),
                    [e]
                )
            )
        try:
            outputs = load_field(_doc.get(
                'outputs'), idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `outputs` field is not valid because:",
                    SourceLine(_doc, 'outputs', str),
                    [e]
                )
            )
        if 'uuid' in _doc:
            try:
                uuid = load_field(_doc.get(
                    'uuid'), union_of_None_type_or_strtype, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `uuid` field is not valid because:",
                        SourceLine(_doc, 'uuid', str),
                        [e]
                    )
                )
        else:
            uuid = None
        try:
            steps = load_field(_doc.get(
                'steps'), idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions)
        except ValidationException as e:
            _errors__.append(
                ValidationException(
                    "the `steps` field is not valid because:",
                    SourceLine(_doc, 'steps', str),
                    [e]
                )
            )
        if 'report' in _doc:
            try:
                report = load_field(_doc.get(
                    'report'), union_of_None_type_or_ReportLoader, baseuri, loadingOptions)
            except ValidationException as e:
                _errors__.append(
                    ValidationException(
                        "the `report` field is not valid because:",
                        SourceLine(_doc, 'report', str),
                        [e]
                    )
                )
        else:
            report = None

        extension_fields = yaml.comments.CommentedMap()
        for k in _doc.keys():
            if k not in cls.attrs:
                if ":" in k:
                    ex = expand_url(k,
                                    "",
                                    loadingOptions,
                                    scoped_id=False,
                                    vocab_term=False)
                    extension_fields[ex] = _doc[k]
                else:
                    _errors__.append(
                        ValidationException(
                            "invalid field `%s`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `uuid`, `class`, `steps`, `report`" % (k),
                            SourceLine(_doc, k, str)
                        )
                    )
                    break

        if _errors__:
            raise ValidationException("Trying 'GalaxyWorkflow'", None, _errors__)
        loadingOptions = copy.deepcopy(loadingOptions)
        loadingOptions.original_doc = _doc
        return cls(id=id, label=label, doc=doc, inputs=inputs, outputs=outputs, uuid=uuid, steps=steps, report=report, extension_fields=extension_fields, loadingOptions=loadingOptions)

    def save(self, top=False, base_url="", relative_uris=True):
        # type: (bool, str, bool) -> Dict[str, Any]
        r = yaml.comments.CommentedMap()  # type: Dict[str, Any]
        for ef in self.extension_fields:
            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]

        r['class'] = 'GalaxyWorkflow'

        if self.id is not None:
            u = save_relative_uri(
                self.id,
                base_url,
                True,
                None,
                relative_uris)
            if u:
                r['id'] = u

        if self.label is not None:
            r['label'] = save(
                self.label,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.doc is not None:
            r['doc'] = save(
                self.doc,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.inputs is not None:
            r['inputs'] = save(
                self.inputs,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.outputs is not None:
            r['outputs'] = save(
                self.outputs,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.uuid is not None:
            r['uuid'] = save(
                self.uuid,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.steps is not None:
            r['steps'] = save(
                self.steps,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        if self.report is not None:
            r['report'] = save(
                self.report,
                top=False,
                base_url=self.id,
                relative_uris=relative_uris)

        # top refers to the directory level
        if top:
            if self.loadingOptions.namespaces:
                r["$namespaces"] = self.loadingOptions.namespaces
            if self.loadingOptions.schemas:
                r["$schemas"] = self.loadingOptions.schemas
        return r

    attrs = frozenset(['id', 'label', 'doc', 'inputs', 'outputs', 'uuid', 'class', 'steps', 'report'])


_vocab = {
    "Any": "https://w3id.org/cwl/salad#Any",
    "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema",
    "Documented": "https://w3id.org/cwl/salad#Documented",
    "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema",
    "File": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File",
    "GalaxyType": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType",
    "GalaxyWorkflow": "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow",
    "HasStepErrors": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepErrors",
    "HasStepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition",
    "HasUUID": "https://galaxyproject.org/gxformat2/gxformat2common#HasUUID",
    "Identified": "https://w3id.org/cwl/cwl#Identified",
    "InputParameter": "https://w3id.org/cwl/cwl#InputParameter",
    "Labeled": "https://w3id.org/cwl/cwl#Labeled",
    "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter",
    "Parameter": "https://w3id.org/cwl/cwl#Parameter",
    "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType",
    "Process": "https://w3id.org/cwl/cwl#Process",
    "RecordField": "https://w3id.org/cwl/salad#RecordField",
    "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema",
    "ReferencesTool": "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool",
    "Report": "https://galaxyproject.org/gxformat2/v19_09#Report",
    "Sink": "https://galaxyproject.org/gxformat2/v19_09#Sink",
    "StepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition",
    "ToolShedRepository": "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository",
    "WorkflowInputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter",
    "WorkflowOutputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter",
    "WorkflowStep": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep",
    "WorkflowStepInput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput",
    "WorkflowStepOutput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput",
    "WorkflowStepType": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType",
    "array": "https://w3id.org/cwl/salad#array",
    "boolean": "http://www.w3.org/2001/XMLSchema#boolean",
    "collection": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection",
    "data": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data",
    "double": "http://www.w3.org/2001/XMLSchema#double",
    "enum": "https://w3id.org/cwl/salad#enum",
    "float": "http://www.w3.org/2001/XMLSchema#float",
    "int": "http://www.w3.org/2001/XMLSchema#int",
    "integer": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/integer",
    "long": "http://www.w3.org/2001/XMLSchema#long",
    "null": "https://w3id.org/cwl/salad#null",
    "pause": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause",
    "record": "https://w3id.org/cwl/salad#record",
    "string": "http://www.w3.org/2001/XMLSchema#string",
    "subworkflow": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow",
    "text": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/text",
    "tool": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool",
}
_rvocab = {
    "https://w3id.org/cwl/salad#Any": "Any",
    "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema",
    "https://w3id.org/cwl/salad#Documented": "Documented",
    "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File": "File",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyType": "GalaxyType",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow": "GalaxyWorkflow",
    "https://galaxyproject.org/gxformat2/gxformat2common#HasStepErrors": "HasStepErrors",
    "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition": "HasStepPosition",
    "https://galaxyproject.org/gxformat2/gxformat2common#HasUUID": "HasUUID",
    "https://w3id.org/cwl/cwl#Identified": "Identified",
    "https://w3id.org/cwl/cwl#InputParameter": "InputParameter",
    "https://w3id.org/cwl/cwl#Labeled": "Labeled",
    "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter",
    "https://w3id.org/cwl/cwl#Parameter": "Parameter",
    "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType",
    "https://w3id.org/cwl/cwl#Process": "Process",
    "https://w3id.org/cwl/salad#RecordField": "RecordField",
    "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema",
    "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool": "ReferencesTool",
    "https://galaxyproject.org/gxformat2/v19_09#Report": "Report",
    "https://galaxyproject.org/gxformat2/v19_09#Sink": "Sink",
    "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition": "StepPosition",
    "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository": "ToolShedRepository",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter": "WorkflowInputParameter",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter": "WorkflowOutputParameter",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep": "WorkflowStep",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput": "WorkflowStepInput",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput": "WorkflowStepOutput",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType": "WorkflowStepType",
    "https://w3id.org/cwl/salad#array": "array",
    "http://www.w3.org/2001/XMLSchema#boolean": "boolean",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection": "collection",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data": "data",
    "http://www.w3.org/2001/XMLSchema#double": "double",
    "https://w3id.org/cwl/salad#enum": "enum",
    "http://www.w3.org/2001/XMLSchema#float": "float",
    "http://www.w3.org/2001/XMLSchema#int": "int",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/integer": "integer",
    "http://www.w3.org/2001/XMLSchema#long": "long",
    "https://w3id.org/cwl/salad#null": "null",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause": "pause",
    "https://w3id.org/cwl/salad#record": "record",
    "http://www.w3.org/2001/XMLSchema#string": "string",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow": "subworkflow",
    "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/text": "text",
    "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool": "tool",
}

strtype = _PrimitiveLoader((str, str))
inttype = _PrimitiveLoader(int)
floattype = _PrimitiveLoader(float)
booltype = _PrimitiveLoader(bool)
None_type = _PrimitiveLoader(type(None))
Any_type = _AnyLoader()
DocumentedLoader = _RecordLoader(Documented)
PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",))
AnyLoader = _EnumLoader(("Any",))
RecordFieldLoader = _RecordLoader(RecordField)
RecordSchemaLoader = _RecordLoader(RecordSchema)
EnumSchemaLoader = _RecordLoader(EnumSchema)
ArraySchemaLoader = _RecordLoader(ArraySchema)
LabeledLoader = _RecordLoader(Labeled)
IdentifiedLoader = _RecordLoader(Identified)
ParameterLoader = _RecordLoader(Parameter)
InputParameterLoader = _RecordLoader(InputParameter)
OutputParameterLoader = _RecordLoader(OutputParameter)
ProcessLoader = _RecordLoader(Process)
HasUUIDLoader = _RecordLoader(HasUUID)
HasStepErrorsLoader = _RecordLoader(HasStepErrors)
HasStepPositionLoader = _RecordLoader(HasStepPosition)
StepPositionLoader = _RecordLoader(StepPosition)
ReferencesToolLoader = _RecordLoader(ReferencesTool)
ToolShedRepositoryLoader = _RecordLoader(ToolShedRepository)
GalaxyTypeLoader = _EnumLoader(("integer", "text", "File", "data", "collection",))
WorkflowStepTypeLoader = _EnumLoader(("tool", "subworkflow", "pause",))
WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter)
WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter)
WorkflowStepLoader = _RecordLoader(WorkflowStep)
SinkLoader = _RecordLoader(Sink)
WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput)
ReportLoader = _RecordLoader(Report)
WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput)
GalaxyWorkflowLoader = _RecordLoader(GalaxyWorkflow)
array_of_strtype = _ArrayLoader(strtype)
union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,))
uri_strtype_True_False_None = _URILoader(strtype, True, False, None)
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,))
array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype)
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,))
typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2)
array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader)
union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,))
idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type')
enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(("record",))
typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2)
uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None)
enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(("enum",))
typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2)
uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2)
enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(("array",))
typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2)
union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,))
uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None)
union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,))
union_of_WorkflowInputParameterLoader = _UnionLoader((WorkflowInputParameterLoader,))
array_of_union_of_WorkflowInputParameterLoader = _ArrayLoader(union_of_WorkflowInputParameterLoader)
idmap_inputs_array_of_union_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowInputParameterLoader, 'id', 'type')
union_of_WorkflowOutputParameterLoader = _UnionLoader((WorkflowOutputParameterLoader,))
array_of_union_of_WorkflowOutputParameterLoader = _ArrayLoader(union_of_WorkflowOutputParameterLoader)
idmap_outputs_array_of_union_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowOutputParameterLoader, 'id', 'type')
union_of_None_type_or_StepPositionLoader = _UnionLoader((None_type, StepPositionLoader,))
union_of_floattype_or_inttype = _UnionLoader((floattype, inttype,))
union_of_None_type_or_ToolShedRepositoryLoader = _UnionLoader((None_type, ToolShedRepositoryLoader,))
union_of_GalaxyTypeLoader_or_strtype_or_None_type = _UnionLoader((GalaxyTypeLoader, strtype, None_type,))
typedsl_union_of_GalaxyTypeLoader_or_strtype_or_None_type_2 = _TypeDSLLoader(union_of_GalaxyTypeLoader_or_strtype_or_None_type, 2)
union_of_None_type_or_GalaxyTypeLoader = _UnionLoader((None_type, GalaxyTypeLoader,))
typedsl_union_of_None_type_or_GalaxyTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_GalaxyTypeLoader, 2)
array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader)
union_of_None_type_or_array_of_WorkflowStepInputLoader = _UnionLoader((None_type, array_of_WorkflowStepInputLoader,))
idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader = _IdMapLoader(union_of_None_type_or_array_of_WorkflowStepInputLoader, 'id', 'source')
union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader((strtype, WorkflowStepOutputLoader,))
array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader(union_of_strtype_or_WorkflowStepOutputLoader)
union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _UnionLoader((array_of_union_of_strtype_or_WorkflowStepOutputLoader, None_type,))
idmap_out_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _IdMapLoader(union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type, 'id', 'source')
union_of_None_type_or_WorkflowStepTypeLoader = _UnionLoader((None_type, WorkflowStepTypeLoader,))
typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_WorkflowStepTypeLoader, 2)
union_of_None_type_or_GalaxyWorkflowLoader = _UnionLoader((None_type, GalaxyWorkflowLoader,))
uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None = _URILoader(union_of_None_type_or_GalaxyWorkflowLoader, False, False, None)
union_of_None_type_or_array_of_strtype = _UnionLoader((None_type, array_of_strtype,))
uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2)
union_of_None_type_or_booltype = _UnionLoader((None_type, booltype,))
array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader)
idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_WorkflowInputParameterLoader, 'id', 'type')
array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader)
idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_WorkflowOutputParameterLoader, 'id', 'type')
uri_strtype_False_True_None = _URILoader(strtype, False, True, None)
array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader)
union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,))
idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader(union_of_array_of_WorkflowStepLoader, 'id', 'None')
union_of_None_type_or_ReportLoader = _UnionLoader((None_type, ReportLoader,))
union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader,))
array_of_union_of_GalaxyWorkflowLoader = _ArrayLoader(union_of_GalaxyWorkflowLoader)
union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader, array_of_union_of_GalaxyWorkflowLoader,))


def load_document(doc, baseuri=None, loadingOptions=None):
    # type: (Any, Optional[str], Optional[LoadingOptions]) -> Any
    if baseuri is None:
        baseuri = file_uri(os.getcwd()) + "/"
    if loadingOptions is None:
        loadingOptions = LoadingOptions()
    return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, doc, baseuri, loadingOptions)


def load_document_by_string(string, uri, loadingOptions=None):
    # type: (Any, str, Optional[LoadingOptions]) -> Any
    result = yaml.main.round_trip_load(string, preserve_quotes=True)
    add_lc_filename(result, uri)

    if loadingOptions is None:
        loadingOptions = LoadingOptions(fileuri=uri)
    loadingOptions.idx[uri] = result

    return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, result, uri, loadingOptions)