Last active
August 6, 2018 17:52
-
-
Save mr-c/c37df8e7c71970656a5c2ebc18f05b95 to your computer and use it in GitHub Desktop.
cwl-citation-extractor-proof-of-concept
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
import sys | |
import CommonWorkflowLanguage as cwl | |
def main(): | |
top = cwl.load_document( | |
"https://raw.githubusercontent.com/EBI-Metagenomics/ebi-metagenomics-cwl/master/workflows/emg-pipeline-v3-paired.cwl") | |
traverse(top) | |
def extract_software_packages(process: cwl.Process): | |
for req in extract_software_reqs(process): | |
print(process.id) | |
process_software_requirement(req) | |
def extract_software_reqs(process: cwl.Process): | |
if process.requirements: | |
for req in process.requirements: | |
if isinstance(req, cwl.SoftwareRequirement): | |
yield req | |
if process.hints: | |
for req in process.hints: | |
if req['class'] == "SoftwareRequirement": | |
yield cwl.load_field(req, cwl.SoftwareRequirementLoader, | |
process.id, process.loadingOptions) | |
def process_software_requirement(req: cwl.SoftwarePackage): | |
for package in req.packages: | |
print("Package: {}, version: {}, specs: {}".format( | |
package.package, package.version, package.specs)) | |
def traverse(process: cwl.Process): | |
extract_software_packages(process) | |
if isinstance(process, cwl.Workflow): | |
traverse_workflow(process) | |
def get_process_from_step(step: cwl.WorkflowStep): | |
if isinstance(step.run, str): | |
return cwl.load_document(step.run) | |
return step.run | |
def traverse_workflow(workflow: cwl.Workflow): | |
for step in workflow.steps: | |
extract_software_packages(step) | |
traverse(get_process_from_step(step)) | |
if __name__ == "__main__": | |
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# | |
# This file was autogenerated using schema-salad-tool --codegen=python | |
# | |
from __future__ import absolute_import | |
import ruamel.yaml | |
from ruamel.yaml.comments import CommentedBase, CommentedMap, CommentedSeq | |
import re | |
import os | |
import traceback | |
from typing import (Any, AnyStr, Callable, cast, Dict, List, Iterable, Tuple, | |
TypeVar, Union, Text) | |
import six | |
lineno_re = re.compile(u"^(.*?:[0-9]+:[0-9]+: )(( *)(.*))") | |
def _add_lc_filename(r, source): # type: (ruamel.yaml.comments.CommentedBase, AnyStr) -> None | |
if isinstance(r, ruamel.yaml.comments.CommentedBase): | |
r.lc.filename = source | |
if isinstance(r, list): | |
for d in r: | |
_add_lc_filename(d, source) | |
elif isinstance(r, dict): | |
for d in six.itervalues(r): | |
_add_lc_filename(d, source) | |
def relname(source): # type: (Text) -> Text | |
if source.startswith("file://"): | |
source = source[7:] | |
source = os.path.relpath(source) | |
return source | |
def add_lc_filename(r, source): # type: (ruamel.yaml.comments.CommentedBase, Text) -> None | |
_add_lc_filename(r, relname(source)) | |
def reflow(text, maxline, shift=""): # type: (Text, int, Text) -> Text | |
if maxline < 20: | |
maxline = 20 | |
if len(text) > maxline: | |
sp = text.rfind(' ', 0, maxline) | |
if sp < 1: | |
sp = text.find(' ', sp+1) | |
if sp == -1: | |
sp = len(text) | |
if sp < len(text): | |
return "%s\n%s%s" % (text[0:sp], shift, reflow(text[sp+1:], maxline, shift)) | |
return text | |
def indent(v, nolead=False, shift=u" ", bullet=u" "): # type: (Text, bool, Text, Text) -> Text | |
if nolead: | |
return v.splitlines()[0] + u"\n".join([shift + l for l in v.splitlines()[1:]]) | |
else: | |
def lineno(i, l): # type: (int, Text) -> Text | |
r = lineno_re.match(l) | |
if r is not None: | |
return r.group(1) + (bullet if i == 0 else shift) + r.group(2) | |
else: | |
return (bullet if i == 0 else shift) + l | |
return u"\n".join([lineno(i, l) for i, l in enumerate(v.splitlines())]) | |
def bullets(textlist, bul): # type: (List[Text], Text) -> Text | |
if len(textlist) == 1: | |
return textlist[0] | |
else: | |
return "\n".join(indent(t, bullet=bul) for t in textlist) | |
def strip_dup_lineno(text, maxline=None): # type: (Text, int) -> Text | |
if maxline is None: | |
maxline = int(os.environ.get("COLUMNS", "100")) | |
pre = None | |
msg = [] | |
maxno = 0 | |
for l in text.splitlines(): | |
g = lineno_re.match(l) | |
if not g: | |
continue | |
maxno = max(maxno, len(g.group(1))) | |
for l in text.splitlines(): | |
g = lineno_re.match(l) | |
if not g: | |
msg.append(l) | |
continue | |
if g.group(1) != pre: | |
shift = maxno + len(g.group(3)) | |
g2 = reflow(g.group(2), maxline-shift, " " * shift) | |
pre = g.group(1) | |
msg.append(pre + " " * (maxno-len(g.group(1))) + g2) | |
else: | |
g2 = reflow(g.group(2), maxline-maxno, " " * (maxno+len(g.group(3)))) | |
msg.append(" " * maxno + g2) | |
return "\n".join(msg) | |
def cmap(d, lc=None, fn=None): # type: (Union[int, float, str, Text, Dict, List], List[int], Text) -> Union[int, float, str, Text, CommentedMap, CommentedSeq] | |
if lc is None: | |
lc = [0, 0, 0, 0] | |
if fn is None: | |
fn = "test" | |
if isinstance(d, CommentedMap): | |
fn = d.lc.filename if hasattr(d.lc, "filename") else fn | |
for k,v in six.iteritems(d): | |
if k in d.lc.data: | |
d[k] = cmap(v, lc=d.lc.data[k], fn=fn) | |
else: | |
d[k] = cmap(v, lc, fn=fn) | |
return d | |
if isinstance(d, CommentedSeq): | |
fn = d.lc.filename if hasattr(d.lc, "filename") else fn | |
for k,v in enumerate(d): | |
if k in d.lc.data: | |
d[k] = cmap(v, lc=d.lc.data[k], fn=fn) | |
else: | |
d[k] = cmap(v, lc, fn=fn) | |
return d | |
if isinstance(d, dict): | |
cm = CommentedMap() | |
for k in sorted(d.keys()): | |
v = d[k] | |
if isinstance(v, CommentedBase): | |
uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] | |
vfn = v.lc.filename if hasattr(v.lc, "filename") else fn | |
else: | |
uselc = lc | |
vfn = fn | |
cm[k] = cmap(v, lc=uselc, fn=vfn) | |
cm.lc.add_kv_line_col(k, uselc) | |
cm.lc.filename = fn | |
return cm | |
if isinstance(d, list): | |
cs = CommentedSeq() | |
for k,v in enumerate(d): | |
if isinstance(v, CommentedBase): | |
uselc = [v.lc.line, v.lc.col, v.lc.line, v.lc.col] | |
vfn = v.lc.filename if hasattr(v.lc, "filename") else fn | |
else: | |
uselc = lc | |
vfn = fn | |
cs.append(cmap(v, lc=uselc, fn=vfn)) | |
cs.lc.add_kv_line_col(k, uselc) | |
cs.lc.filename = fn | |
return cs | |
else: | |
return d | |
class SourceLine(object): | |
def __init__(self, item, key=None, raise_type=six.text_type, include_traceback=False): # type: (Any, Any, Callable, bool) -> None | |
self.item = item | |
self.key = key | |
self.raise_type = raise_type | |
self.include_traceback = include_traceback | |
def __enter__(self): # type: () -> SourceLine | |
return self | |
def __exit__(self, | |
exc_type, # type: Any | |
exc_value, # type: Any | |
tb # type: Any | |
): # -> Any | |
if not exc_value: | |
return | |
if self.include_traceback: | |
raise self.makeError("\n".join(traceback.format_exception(exc_type, exc_value, tb))) | |
else: | |
raise self.makeError(six.text_type(exc_value)) | |
def makeLead(self): # type: () -> Text | |
if self.key is None or self.item.lc.data is None or self.key not in self.item.lc.data: | |
return "%s:%i:%i:" % (self.item.lc.filename if hasattr(self.item.lc, "filename") else "", | |
(self.item.lc.line or 0)+1, | |
(self.item.lc.col or 0)+1) | |
else: | |
return "%s:%i:%i:" % (self.item.lc.filename if hasattr(self.item.lc, "filename") else "", | |
(self.item.lc.data[self.key][0] or 0)+1, | |
(self.item.lc.data[self.key][1] or 0)+1) | |
def makeError(self, msg): # type: (Text) -> Any | |
if not isinstance(self.item, ruamel.yaml.comments.CommentedBase): | |
return self.raise_type(msg) | |
errs = [] | |
lead = self.makeLead() | |
for m in msg.splitlines(): | |
if bool(lineno_re.match(m)): | |
errs.append(m) | |
else: | |
errs.append("%s %s" % (lead, m)) | |
return self.raise_type("\n".join(errs)) | |
import six | |
from six.moves import urllib, StringIO | |
import ruamel.yaml as yaml | |
import copy | |
import re | |
from typing import List, Text, Dict, Union, Any, Sequence | |
import uuid | |
class ValidationException(Exception): | |
pass | |
class Savable(object): | |
pass | |
class LoadingOptions(object): | |
def __init__(self, fetcher=None, namespaces=None, fileuri=None, copyfrom=None, schemas=None): | |
if copyfrom is not None: | |
self.idx = copyfrom.idx | |
if fetcher is None: | |
fetcher = copyfrom.fetcher | |
if fileuri is None: | |
fileuri = copyfrom.fileuri | |
if namespaces is None: | |
namespaces = copyfrom.namespaces | |
if namespaces is None: | |
schemas = copyfrom.schemas | |
else: | |
self.idx = {} | |
if fetcher is None: | |
import os | |
import requests | |
from cachecontrol.wrapper import CacheControl | |
from cachecontrol.caches import FileCache | |
from schema_salad.ref_resolver import DefaultFetcher | |
if "HOME" in os.environ: | |
session = CacheControl( | |
requests.Session(), | |
cache=FileCache(os.path.join(os.environ["HOME"], ".cache", "salad"))) | |
elif "TMP" in os.environ: | |
session = CacheControl( | |
requests.Session(), | |
cache=FileCache(os.path.join(os.environ["TMP"], ".cache", "salad"))) | |
else: | |
session = CacheControl( | |
requests.Session(), | |
cache=FileCache("/tmp", ".cache", "salad")) | |
self.fetcher = DefaultFetcher({}, session) | |
else: | |
self.fetcher = fetcher | |
self.fileuri = fileuri | |
self.vocab = _vocab | |
self.rvocab = _rvocab | |
self.namespaces = namespaces | |
self.schemas = schemas | |
if namespaces is not None: | |
self.vocab = self.vocab.copy() | |
self.rvocab = self.rvocab.copy() | |
for k,v in six.iteritems(namespaces): | |
self.vocab[k] = v | |
self.rvocab[v] = k | |
def load_field(val, fieldtype, baseuri, loadingOptions): | |
if isinstance(val, dict): | |
if "$import" in val: | |
return _document_load_by_url(fieldtype, loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]), loadingOptions) | |
elif "$include" in val: | |
val = loadingOptions.fetcher.fetch_text(loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"])) | |
return fieldtype.load(val, baseuri, loadingOptions) | |
def save(val, top=True, base_url=""): | |
if isinstance(val, Savable): | |
return val.save(top=top, base_url=base_url) | |
if isinstance(val, list): | |
return [save(v, top=False, base_url=base_url) for v in val] | |
return val | |
def expand_url(url, # type: Union[str, Text] | |
base_url, # type: Union[str, Text] | |
loadingOptions, # type: LoadingOptions | |
scoped_id=False, # type: bool | |
vocab_term=False, # type: bool | |
scoped_ref=None # type: int | |
): | |
# type: (...) -> Text | |
if not isinstance(url, six.string_types): | |
return url | |
url = Text(url) | |
if url in (u"@id", u"@type"): | |
return url | |
if vocab_term and url in loadingOptions.vocab: | |
return url | |
if bool(loadingOptions.vocab) and u":" in url: | |
prefix = url.split(u":")[0] | |
if prefix in loadingOptions.vocab: | |
url = loadingOptions.vocab[prefix] + url[len(prefix) + 1:] | |
split = urllib.parse.urlsplit(url) | |
if ((bool(split.scheme) and split.scheme in [u'http', u'https', u'file']) or url.startswith(u"$(") | |
or url.startswith(u"${")): | |
pass | |
elif scoped_id and not bool(split.fragment): | |
splitbase = urllib.parse.urlsplit(base_url) | |
frg = u"" | |
if bool(splitbase.fragment): | |
frg = splitbase.fragment + u"/" + split.path | |
else: | |
frg = split.path | |
pt = splitbase.path if splitbase.path != '' else "/" | |
url = urllib.parse.urlunsplit( | |
(splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) | |
elif scoped_ref is not None and not bool(split.fragment): | |
splitbase = urllib.parse.urlsplit(base_url) | |
sp = splitbase.fragment.split(u"/") | |
n = scoped_ref | |
while n > 0 and len(sp) > 0: | |
sp.pop() | |
n -= 1 | |
sp.append(url) | |
url = urllib.parse.urlunsplit(( | |
splitbase.scheme, splitbase.netloc, splitbase.path, splitbase.query, | |
u"/".join(sp))) | |
else: | |
url = loadingOptions.fetcher.urljoin(base_url, url) | |
if vocab_term: | |
split = urllib.parse.urlsplit(url) | |
if bool(split.scheme): | |
if url in loadingOptions.rvocab: | |
return loadingOptions.rvocab[url] | |
else: | |
raise ValidationException("Term '%s' not in vocabulary" % url) | |
return url | |
class _Loader(object): | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
# type: (Any, Text, LoadingOptions, Union[Text, None]) -> Any | |
pass | |
class _AnyLoader(_Loader): | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if doc is not None: | |
return doc | |
raise ValidationException("Expected non-null") | |
class _PrimitiveLoader(_Loader): | |
def __init__(self, tp): | |
# type: (Union[type, Sequence[type]]) -> None | |
self.tp = tp | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if not isinstance(doc, self.tp): | |
raise ValidationException("Expected a %s but got %s" % (self.tp, type(doc))) | |
return doc | |
def __repr__(self): | |
return str(self.tp) | |
class _ArrayLoader(_Loader): | |
def __init__(self, items): | |
# type: (_Loader) -> None | |
self.items = items | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if not isinstance(doc, list): | |
raise ValidationException("Expected a list") | |
r = [] | |
errors = [] | |
for i in range(0, len(doc)): | |
try: | |
lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) | |
if isinstance(lf, list): | |
r.extend(lf) | |
else: | |
r.append(lf) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, i, str).makeError(six.text_type(e))) | |
if errors: | |
raise ValidationException("\n".join(errors)) | |
return r | |
def __repr__(self): | |
return "array<%s>" % self.items | |
class _EnumLoader(_Loader): | |
def __init__(self, symbols): | |
# type: (Sequence[Text]) -> None | |
self.symbols = symbols | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if doc in self.symbols: | |
return doc | |
else: | |
raise ValidationException("Expected one of %s" % (self.symbols,)) | |
class _RecordLoader(_Loader): | |
def __init__(self, classtype): | |
# type: (type) -> None | |
self.classtype = classtype | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if not isinstance(doc, dict): | |
raise ValidationException("Expected a dict") | |
return self.classtype(doc, baseuri, loadingOptions, docRoot=docRoot) | |
def __repr__(self): | |
return str(self.classtype) | |
class _UnionLoader(_Loader): | |
def __init__(self, alternates): | |
# type: (Sequence[_Loader]) -> None | |
self.alternates = alternates | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
errors = [] | |
for t in self.alternates: | |
try: | |
return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) | |
except ValidationException as e: | |
errors.append("tried %s but\n%s" % (t, indent(str(e)))) | |
raise ValidationException(bullets(errors, "- ")) | |
def __repr__(self): | |
return " | ".join(str(a) for a in self.alternates) | |
class _URILoader(_Loader): | |
def __init__(self, inner, scoped_id, vocab_term, scoped_ref): | |
# type: (_Loader, bool, bool, Union[int, None]) -> None | |
self.inner = inner | |
self.scoped_id = scoped_id | |
self.vocab_term = vocab_term | |
self.scoped_ref = scoped_ref | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if isinstance(doc, list): | |
doc = [expand_url(i, baseuri, loadingOptions, | |
self.scoped_id, self.vocab_term, self.scoped_ref) for i in doc] | |
if isinstance(doc, six.string_types): | |
doc = expand_url(doc, baseuri, loadingOptions, | |
self.scoped_id, self.vocab_term, self.scoped_ref) | |
return self.inner.load(doc, baseuri, loadingOptions) | |
class _TypeDSLLoader(_Loader): | |
typeDSLregex = re.compile(u"^([^[?]+)(\[\])?(\?)?$") | |
def __init__(self, inner, refScope): | |
# type: (_Loader, Union[int, None]) -> None | |
self.inner = inner | |
self.refScope = refScope | |
def resolve(self, doc, baseuri, loadingOptions): | |
m = self.typeDSLregex.match(doc) | |
if m: | |
first = expand_url(m.group(1), baseuri, loadingOptions, False, True, self.refScope) | |
second = third = None | |
if bool(m.group(2)): | |
second = {"type": "array", "items": first} | |
#second = CommentedMap((("type", "array"), | |
# ("items", first))) | |
#second.lc.add_kv_line_col("type", lc) | |
#second.lc.add_kv_line_col("items", lc) | |
#second.lc.filename = filename | |
if bool(m.group(3)): | |
third = [u"null", second or first] | |
#third = CommentedSeq([u"null", second or first]) | |
#third.lc.add_kv_line_col(0, lc) | |
#third.lc.add_kv_line_col(1, lc) | |
#third.lc.filename = filename | |
doc = third or second or first | |
return doc | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if isinstance(doc, list): | |
r = [] | |
for d in doc: | |
if isinstance(d, six.string_types): | |
resolved = self.resolve(d, baseuri, loadingOptions) | |
if isinstance(resolved, list): | |
for i in resolved: | |
if i not in r: | |
r.append(i) | |
else: | |
if resolved not in r: | |
r.append(resolved) | |
else: | |
r.append(d) | |
doc = r | |
elif isinstance(doc, six.string_types): | |
doc = self.resolve(doc, baseuri, loadingOptions) | |
return self.inner.load(doc, baseuri, loadingOptions) | |
class _IdMapLoader(_Loader): | |
def __init__(self, inner, mapSubject, mapPredicate): | |
# type: (_Loader, Text, Union[Text, None]) -> None | |
self.inner = inner | |
self.mapSubject = mapSubject | |
self.mapPredicate = mapPredicate | |
def load(self, doc, baseuri, loadingOptions, docRoot=None): | |
if isinstance(doc, dict): | |
r = [] | |
for k in sorted(doc.keys()): | |
val = doc[k] | |
if isinstance(val, dict): | |
v = copy.copy(val) | |
if hasattr(val, 'lc'): | |
v.lc.data = val.lc.data | |
v.lc.filename = val.lc.filename | |
else: | |
if self.mapPredicate: | |
v = {self.mapPredicate: val} | |
else: | |
raise ValidationException("No mapPredicate") | |
v[self.mapSubject] = k | |
r.append(v) | |
doc = r | |
return self.inner.load(doc, baseuri, loadingOptions) | |
def _document_load(loader, doc, baseuri, loadingOptions): | |
if isinstance(doc, six.string_types): | |
return _document_load_by_url(loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions) | |
if isinstance(doc, dict): | |
if "$namespaces" in doc: | |
loadingOptions = LoadingOptions(copyfrom=loadingOptions, namespaces=doc["$namespaces"]) | |
doc = {k: v for k,v in doc.items() if k != "$namespaces"} | |
if "$schemas" in doc: | |
loadingOptions = LoadingOptions(copyfrom=loadingOptions, schemas=doc["$schemas"]) | |
doc = {k: v for k,v in doc.items() if k != "$schemas"} | |
if "$base" in doc: | |
baseuri = doc["$base"] | |
if "$graph" in doc: | |
return loader.load(doc["$graph"], baseuri, loadingOptions) | |
else: | |
return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri) | |
if isinstance(doc, list): | |
return loader.load(doc, baseuri, loadingOptions) | |
raise ValidationException() | |
def _document_load_by_url(loader, url, loadingOptions): | |
if url in loadingOptions.idx: | |
return _document_load(loader, loadingOptions.idx[url], url, loadingOptions) | |
text = loadingOptions.fetcher.fetch_text(url) | |
if isinstance(text, bytes): | |
textIO = StringIO(text.decode('utf-8')) | |
else: | |
textIO = StringIO(text) | |
textIO.name = url # type: ignore | |
result = yaml.round_trip_load(textIO) | |
add_lc_filename(result, url) | |
loadingOptions.idx[url] = result | |
loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url) | |
return _document_load(loader, result, url, loadingOptions) | |
def file_uri(path, split_frag=False): # type: (str, bool) -> str | |
if path.startswith("file://"): | |
return path | |
if split_frag: | |
pathsp = path.split("#", 2) | |
frag = "#" + urllib.parse.quote(str(pathsp[1])) if len(pathsp) == 2 else "" | |
urlpath = urllib.request.pathname2url(str(pathsp[0])) | |
else: | |
urlpath = urllib.request.pathname2url(path) | |
frag = "" | |
if urlpath.startswith("//"): | |
return "file:%s%s" % (urlpath, frag) | |
else: | |
return "file://%s%s" % (urlpath, frag) | |
def prefix_url(url, namespaces): | |
for k,v in namespaces.items(): | |
if url.startswith(v): | |
return k+":"+url[len(v):] | |
return url | |
def relative_uri(uri, base_url, scoped_id): | |
if isinstance(uri, list): | |
return [relative_uri(u, base_url, scoped_id) for u in uri] | |
else: | |
urisplit = urllib.parse.urlsplit(uri) | |
basesplit = urllib.parse.urlsplit(base_url) | |
if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: | |
if urisplit.path == basesplit.path: | |
p = "" | |
else: | |
p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) | |
if urisplit.fragment: | |
p = p + "#" + urisplit.fragment | |
return p | |
return uri | |
class RecordField(Savable): | |
""" | |
A field of a record. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
raise ValidationException("Missing name") | |
baseuri = self.name | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `name`, `doc`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'RecordField'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['name', 'doc', 'type']) | |
class RecordSchema(Savable): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'fields' in doc: | |
try: | |
self.fields = load_field(doc.get('fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'fields', str).makeError("the `fields` field is not valid because:\n"+str(e))) | |
else: | |
self.fields = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Record_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `fields`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'RecordSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.fields is not None: | |
r['fields'] = save(self.fields, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['fields', 'type']) | |
class EnumSchema(Savable): | |
""" | |
Define an enumerated type. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.symbols = load_field(doc.get('symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'symbols', str).makeError("the `symbols` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Enum_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `symbols`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'EnumSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.symbols is not None: | |
r['symbols'] = relative_uri(self.symbols, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['symbols', 'type']) | |
class ArraySchema(Savable): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.items = load_field(doc.get('items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'items', str).makeError("the `items` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Array_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `items`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'ArraySchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.items is not None: | |
r['items'] = relative_uri(self.items, base_url, False) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['items', 'type']) | |
class File(Savable): | |
""" | |
Represents a file (or group of files when `secondaryFiles` is provided) that | |
will be accessible by tools using standard POSIX file system call API such as | |
open(2) and read(2). | |
Files are represented as objects with `class` of `File`. File objects have | |
a number of properties that provide metadata about the file. | |
The `location` property of a File is a URI that uniquely identifies the | |
file. Implementations must support the file:// URI scheme and may support | |
other schemes such as http://. The value of `location` may also be a | |
relative reference, in which case it must be resolved relative to the URI | |
of the document it appears in. Alternately to `location`, implementations | |
must also accept the `path` property on File, which must be a filesystem | |
path available on the same host as the CWL runner (for inputs) or the | |
runtime environment of a command line tool execution (for command line tool | |
outputs). | |
If no `location` or `path` is specified, a file object must specify | |
`contents` with the UTF-8 text content of the file. This is a "file | |
literal". File literals do not correspond to external resources, but are | |
created on disk with `contents` with when needed for a executing a tool. | |
Where appropriate, expressions can return file literals to define new files | |
on a runtime. The maximum size of `contents` is 64 kilobytes. | |
The `basename` property defines the filename on disk where the file is | |
staged. This may differ from the resource name. If not provided, | |
`basename` must be computed from the last path part of `location` and made | |
available to expressions. | |
The `secondaryFiles` property is a list of File or Directory objects that | |
must be staged in the same directory as the primary file. It is an error | |
for file names to be duplicated in `secondaryFiles`. | |
The `size` property is the size in bytes of the File. It must be computed | |
from the resource and made available to expressions. The `checksum` field | |
contains a cryptographic hash of the file content for use it verifying file | |
contents. Implementations may, at user option, enable or disable | |
computation of the `checksum` field for performance or other reasons. | |
However, the ability to compute output checksums is required to pass the | |
CWL conformance test suite. | |
When executing a CommandLineTool, the files and secondary files may be | |
staged to an arbitrary directory, but must use the value of `basename` for | |
the filename. The `path` property must be file path in the context of the | |
tool execution runtime (local to the compute node, or within the executing | |
container). All computed properties should be available to expressions. | |
File literals also must be staged and `path` must be set. | |
When collecting CommandLineTool outputs, `glob` matching returns file paths | |
(with the `path` property) and the derived properties. This can all be | |
modified by `outputEval`. Alternately, if the file `cwl.output.json` is | |
present in the output, `outputBinding` is ignored. | |
File objects in the output must provide either a `location` URI or a `path` | |
property in the context of the tool execution runtime (local to the compute | |
node, or within the executing container). | |
When evaluating an ExpressionTool, file objects must be referenced via | |
`location` (the expression tool does not have access to files on disk so | |
`path` is meaningless) or as file literals. It is legal to return a file | |
object with an existing `location` but a different `basename`. The | |
`loadContents` field of ExpressionTool inputs behaves the same as on | |
CommandLineTool inputs, however it is not meaningful on the outputs. | |
An ExpressionTool may forward file references from input to output by using | |
the same value for `location`. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'File': | |
raise ValidationException("Not a File") | |
if 'location' in doc: | |
try: | |
self.location = load_field(doc.get('location'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'location', str).makeError("the `location` field is not valid because:\n"+str(e))) | |
else: | |
self.location = None | |
if 'path' in doc: | |
try: | |
self.path = load_field(doc.get('path'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'path', str).makeError("the `path` field is not valid because:\n"+str(e))) | |
else: | |
self.path = None | |
if 'basename' in doc: | |
try: | |
self.basename = load_field(doc.get('basename'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'basename', str).makeError("the `basename` field is not valid because:\n"+str(e))) | |
else: | |
self.basename = None | |
if 'dirname' in doc: | |
try: | |
self.dirname = load_field(doc.get('dirname'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dirname', str).makeError("the `dirname` field is not valid because:\n"+str(e))) | |
else: | |
self.dirname = None | |
if 'nameroot' in doc: | |
try: | |
self.nameroot = load_field(doc.get('nameroot'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'nameroot', str).makeError("the `nameroot` field is not valid because:\n"+str(e))) | |
else: | |
self.nameroot = None | |
if 'nameext' in doc: | |
try: | |
self.nameext = load_field(doc.get('nameext'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'nameext', str).makeError("the `nameext` field is not valid because:\n"+str(e))) | |
else: | |
self.nameext = None | |
if 'checksum' in doc: | |
try: | |
self.checksum = load_field(doc.get('checksum'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'checksum', str).makeError("the `checksum` field is not valid because:\n"+str(e))) | |
else: | |
self.checksum = None | |
if 'size' in doc: | |
try: | |
self.size = load_field(doc.get('size'), union_of_None_type_or_inttype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'size', str).makeError("the `size` field is not valid because:\n"+str(e))) | |
else: | |
self.size = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
if 'contents' in doc: | |
try: | |
self.contents = load_field(doc.get('contents'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'contents', str).makeError("the `contents` field is not valid because:\n"+str(e))) | |
else: | |
self.contents = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'File'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'File' | |
if self.location is not None: | |
r['location'] = relative_uri(self.location, base_url, False) | |
if self.path is not None: | |
r['path'] = relative_uri(self.path, base_url, False) | |
if self.basename is not None: | |
r['basename'] = save(self.basename, top=False, base_url=base_url) | |
if self.dirname is not None: | |
r['dirname'] = save(self.dirname, top=False, base_url=base_url) | |
if self.nameroot is not None: | |
r['nameroot'] = save(self.nameroot, top=False, base_url=base_url) | |
if self.nameext is not None: | |
r['nameext'] = save(self.nameext, top=False, base_url=base_url) | |
if self.checksum is not None: | |
r['checksum'] = save(self.checksum, top=False, base_url=base_url) | |
if self.size is not None: | |
r['size'] = save(self.size, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if self.contents is not None: | |
r['contents'] = save(self.contents, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'location', 'path', 'basename', 'dirname', 'nameroot', 'nameext', 'checksum', 'size', 'secondaryFiles', 'format', 'contents']) | |
class Directory(Savable): | |
""" | |
Represents a directory to present to a command line tool. | |
Directories are represented as objects with `class` of `Directory`. Directory objects have | |
a number of properties that provide metadata about the directory. | |
The `location` property of a Directory is a URI that uniquely identifies | |
the directory. Implementations must support the file:// URI scheme and may | |
support other schemes such as http://. Alternately to `location`, | |
implementations must also accept the `path` property on Direcotry, which | |
must be a filesystem path available on the same host as the CWL runner (for | |
inputs) or the runtime environment of a command line tool execution (for | |
command line tool outputs). | |
A Directory object may have a `listing` field. This is a list of File and | |
Directory objects that are contained in the Directory. For each entry in | |
`listing`, the `basename` property defines the name of the File or | |
Subdirectory when staged to disk. If `listing` is not provided, the | |
implementation must have some way of fetching the Directory listing at | |
runtime based on the `location` field. | |
If a Directory does not have `location`, it is a Directory literal. A | |
Directory literal must provide `listing`. Directory literals must be | |
created on disk at runtime as needed. | |
The resources in a Directory literal do not need to have any implied | |
relationship in their `location`. For example, a Directory listing may | |
contain two files located on different hosts. It is the responsibility of | |
the runtime to ensure that those files are staged to disk appropriately. | |
Secondary files associated with files in `listing` must also be staged to | |
the same Directory. | |
When executing a CommandLineTool, Directories must be recursively staged | |
first and have local values of `path` assigend. | |
Directory objects in CommandLineTool output must provide either a | |
`location` URI or a `path` property in the context of the tool execution | |
runtime (local to the compute node, or within the executing container). | |
An ExpressionTool may forward file references from input to output by using | |
the same value for `location`. | |
Name conflicts (the same `basename` appearing multiple times in `listing` | |
or in any entry in `secondaryFiles` in the listing) is a fatal error. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'Directory': | |
raise ValidationException("Not a Directory") | |
if 'location' in doc: | |
try: | |
self.location = load_field(doc.get('location'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'location', str).makeError("the `location` field is not valid because:\n"+str(e))) | |
else: | |
self.location = None | |
if 'path' in doc: | |
try: | |
self.path = load_field(doc.get('path'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'path', str).makeError("the `path` field is not valid because:\n"+str(e))) | |
else: | |
self.path = None | |
if 'basename' in doc: | |
try: | |
self.basename = load_field(doc.get('basename'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'basename', str).makeError("the `basename` field is not valid because:\n"+str(e))) | |
else: | |
self.basename = None | |
if 'listing' in doc: | |
try: | |
self.listing = load_field(doc.get('listing'), union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'listing', str).makeError("the `listing` field is not valid because:\n"+str(e))) | |
else: | |
self.listing = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `location`, `path`, `basename`, `listing`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'Directory'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'Directory' | |
if self.location is not None: | |
r['location'] = relative_uri(self.location, base_url, False) | |
if self.path is not None: | |
r['path'] = relative_uri(self.path, base_url, False) | |
if self.basename is not None: | |
r['basename'] = save(self.basename, top=False, base_url=base_url) | |
if self.listing is not None: | |
r['listing'] = save(self.listing, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'location', 'path', 'basename', 'listing']) | |
class SchemaBase(Savable): | |
pass | |
class Parameter(SchemaBase): | |
""" | |
Define an input or output parameter to a process. | |
""" | |
pass | |
class InputBinding(Savable): | |
pass | |
class OutputBinding(Savable): | |
pass | |
class InputSchema(SchemaBase): | |
pass | |
class OutputSchema(SchemaBase): | |
pass | |
class InputRecordField(RecordField): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
raise ValidationException("Missing name") | |
baseuri = self.name | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `name`, `doc`, `type`, `inputBinding`, `label`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InputRecordField'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['name', 'doc', 'type', 'inputBinding', 'label']) | |
class InputRecordSchema(RecordSchema, InputSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
self.name = "_:" + str(uuid.uuid4()) | |
baseuri = self.name | |
if 'fields' in doc: | |
try: | |
self.fields = load_field(doc.get('fields'), idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'fields', str).makeError("the `fields` field is not valid because:\n"+str(e))) | |
else: | |
self.fields = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Record_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `fields`, `type`, `label`, `name`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InputRecordSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.fields is not None: | |
r['fields'] = save(self.fields, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['fields', 'type', 'label', 'name']) | |
class InputEnumSchema(EnumSchema, InputSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
self.name = "_:" + str(uuid.uuid4()) | |
baseuri = self.name | |
try: | |
self.symbols = load_field(doc.get('symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'symbols', str).makeError("the `symbols` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Enum_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `symbols`, `type`, `label`, `name`, `inputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InputEnumSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.symbols is not None: | |
r['symbols'] = relative_uri(self.symbols, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['symbols', 'type', 'label', 'name', 'inputBinding']) | |
class InputArraySchema(ArraySchema, InputSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.items = load_field(doc.get('items'), uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'items', str).makeError("the `items` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Array_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `items`, `type`, `label`, `inputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InputArraySchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.items is not None: | |
r['items'] = relative_uri(self.items, base_url, False) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['items', 'type', 'label', 'inputBinding']) | |
class OutputRecordField(RecordField): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
raise ValidationException("Missing name") | |
baseuri = self.name | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `name`, `doc`, `type`, `outputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'OutputRecordField'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['name', 'doc', 'type', 'outputBinding']) | |
class OutputRecordSchema(RecordSchema, OutputSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'fields' in doc: | |
try: | |
self.fields = load_field(doc.get('fields'), idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'fields', str).makeError("the `fields` field is not valid because:\n"+str(e))) | |
else: | |
self.fields = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Record_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `fields`, `type`, `label`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'OutputRecordSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.fields is not None: | |
r['fields'] = save(self.fields, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['fields', 'type', 'label']) | |
class OutputEnumSchema(EnumSchema, OutputSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.symbols = load_field(doc.get('symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'symbols', str).makeError("the `symbols` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Enum_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `symbols`, `type`, `label`, `outputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'OutputEnumSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.symbols is not None: | |
r['symbols'] = relative_uri(self.symbols, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['symbols', 'type', 'label', 'outputBinding']) | |
class OutputArraySchema(ArraySchema, OutputSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.items = load_field(doc.get('items'), uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'items', str).makeError("the `items` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Array_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `items`, `type`, `label`, `outputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'OutputArraySchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.items is not None: | |
r['items'] = relative_uri(self.items, base_url, False) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['items', 'type', 'label', 'outputBinding']) | |
class InputParameter(Parameter): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'streamable' in doc: | |
try: | |
self.streamable = load_field(doc.get('streamable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'streamable', str).makeError("the `streamable` field is not valid because:\n"+str(e))) | |
else: | |
self.streamable = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
if 'default' in doc: | |
try: | |
self.default = load_field(doc.get('default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'default', str).makeError("the `default` field is not valid because:\n"+str(e))) | |
else: | |
self.default = None | |
if 'type' in doc: | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
else: | |
self.type = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InputParameter'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.streamable is not None: | |
r['streamable'] = save(self.streamable, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if self.default is not None: | |
r['default'] = save(self.default, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['label', 'secondaryFiles', 'streamable', 'doc', 'id', 'format', 'inputBinding', 'default', 'type']) | |
class OutputParameter(Parameter): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'streamable' in doc: | |
try: | |
self.streamable = load_field(doc.get('streamable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'streamable', str).makeError("the `streamable` field is not valid because:\n"+str(e))) | |
else: | |
self.streamable = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'OutputParameter'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.streamable is not None: | |
r['streamable'] = save(self.streamable, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['label', 'secondaryFiles', 'streamable', 'doc', 'id', 'outputBinding', 'format']) | |
class ProcessRequirement(Savable): | |
""" | |
A process requirement declares a prerequisite that may or must be fulfilled | |
before executing a process. See [`Process.hints`](#process) and | |
[`Process.requirements`](#process). | |
Process requirements are the primary mechanism for specifying extensions to | |
the CWL core specification. | |
""" | |
pass | |
class Process(Savable): | |
""" | |
The base executable type in CWL is the `Process` object defined by the | |
document. Note that the `Process` object is abstract and cannot be | |
directly executed. | |
""" | |
pass | |
class InlineJavascriptRequirement(ProcessRequirement): | |
""" | |
Indicates that the workflow platform must support inline Javascript expressions. | |
If this requirement is not present, the workflow platform must not perform expression | |
interpolatation. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'InlineJavascriptRequirement': | |
raise ValidationException("Not a InlineJavascriptRequirement") | |
if 'expressionLib' in doc: | |
try: | |
self.expressionLib = load_field(doc.get('expressionLib'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'expressionLib', str).makeError("the `expressionLib` field is not valid because:\n"+str(e))) | |
else: | |
self.expressionLib = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `expressionLib`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InlineJavascriptRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'InlineJavascriptRequirement' | |
if self.expressionLib is not None: | |
r['expressionLib'] = save(self.expressionLib, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'expressionLib']) | |
class SchemaDefRequirement(ProcessRequirement): | |
""" | |
This field consists of an array of type definitions which must be used when | |
interpreting the `inputs` and `outputs` fields. When a `type` field | |
contain a IRI, the implementation must check if the type is defined in | |
`schemaDefs` and use that definition. If the type is not found in | |
`schemaDefs`, it is an error. The entries in `schemaDefs` must be | |
processed in the order listed such that later schema definitions may refer | |
to earlier schema definitions. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'SchemaDefRequirement': | |
raise ValidationException("Not a SchemaDefRequirement") | |
try: | |
self.types = load_field(doc.get('types'), array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'types', str).makeError("the `types` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `types`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'SchemaDefRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'SchemaDefRequirement' | |
if self.types is not None: | |
r['types'] = save(self.types, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'types']) | |
class EnvironmentDef(Savable): | |
""" | |
Define an environment variable that will be set in the runtime environment | |
by the workflow platform when executing the command line tool. May be the | |
result of executing an expression, such as getting a parameter from input. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.envName = load_field(doc.get('envName'), strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'envName', str).makeError("the `envName` field is not valid because:\n"+str(e))) | |
try: | |
self.envValue = load_field(doc.get('envValue'), union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'envValue', str).makeError("the `envValue` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `envName`, `envValue`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'EnvironmentDef'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.envName is not None: | |
r['envName'] = save(self.envName, top=False, base_url=base_url) | |
if self.envValue is not None: | |
r['envValue'] = save(self.envValue, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['envName', 'envValue']) | |
class CommandLineBinding(InputBinding): | |
""" | |
When listed under `inputBinding` in the input schema, the term | |
"value" refers to the the corresponding value in the input object. For | |
binding objects listed in `CommandLineTool.arguments`, the term "value" | |
refers to the effective value after evaluating `valueFrom`. | |
The binding behavior when building the command line depends on the data | |
type of the value. If there is a mismatch between the type described by | |
the input schema and the effective value, such as resulting from an | |
expression evaluation, an implementation must use the data type of the | |
effective value. | |
- **string**: Add `prefix` and the string to the command line. | |
- **number**: Add `prefix` and decimal representation to command line. | |
- **boolean**: If true, add `prefix` to the command line. If false, add | |
nothing. | |
- **File**: Add `prefix` and the value of | |
[`File.path`](#File) to the command line. | |
- **array**: If `itemSeparator` is specified, add `prefix` and the join | |
the array into a single string with `itemSeparator` separating the | |
items. Otherwise first add `prefix`, then recursively process | |
individual elements. | |
- **object**: Add `prefix` only, and recursively add object fields for | |
which `inputBinding` is specified. | |
- **null**: Add nothing. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'loadContents' in doc: | |
try: | |
self.loadContents = load_field(doc.get('loadContents'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'loadContents', str).makeError("the `loadContents` field is not valid because:\n"+str(e))) | |
else: | |
self.loadContents = None | |
if 'position' in doc: | |
try: | |
self.position = load_field(doc.get('position'), union_of_None_type_or_inttype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'position', str).makeError("the `position` field is not valid because:\n"+str(e))) | |
else: | |
self.position = None | |
if 'prefix' in doc: | |
try: | |
self.prefix = load_field(doc.get('prefix'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'prefix', str).makeError("the `prefix` field is not valid because:\n"+str(e))) | |
else: | |
self.prefix = None | |
if 'separate' in doc: | |
try: | |
self.separate = load_field(doc.get('separate'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'separate', str).makeError("the `separate` field is not valid because:\n"+str(e))) | |
else: | |
self.separate = None | |
if 'itemSeparator' in doc: | |
try: | |
self.itemSeparator = load_field(doc.get('itemSeparator'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'itemSeparator', str).makeError("the `itemSeparator` field is not valid because:\n"+str(e))) | |
else: | |
self.itemSeparator = None | |
if 'valueFrom' in doc: | |
try: | |
self.valueFrom = load_field(doc.get('valueFrom'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'valueFrom', str).makeError("the `valueFrom` field is not valid because:\n"+str(e))) | |
else: | |
self.valueFrom = None | |
if 'shellQuote' in doc: | |
try: | |
self.shellQuote = load_field(doc.get('shellQuote'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'shellQuote', str).makeError("the `shellQuote` field is not valid because:\n"+str(e))) | |
else: | |
self.shellQuote = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandLineBinding'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.loadContents is not None: | |
r['loadContents'] = save(self.loadContents, top=False, base_url=base_url) | |
if self.position is not None: | |
r['position'] = save(self.position, top=False, base_url=base_url) | |
if self.prefix is not None: | |
r['prefix'] = save(self.prefix, top=False, base_url=base_url) | |
if self.separate is not None: | |
r['separate'] = save(self.separate, top=False, base_url=base_url) | |
if self.itemSeparator is not None: | |
r['itemSeparator'] = save(self.itemSeparator, top=False, base_url=base_url) | |
if self.valueFrom is not None: | |
r['valueFrom'] = save(self.valueFrom, top=False, base_url=base_url) | |
if self.shellQuote is not None: | |
r['shellQuote'] = save(self.shellQuote, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['loadContents', 'position', 'prefix', 'separate', 'itemSeparator', 'valueFrom', 'shellQuote']) | |
class CommandOutputBinding(OutputBinding): | |
""" | |
Describes how to generate an output parameter based on the files produced | |
by a CommandLineTool. | |
The output parameter value is generated by applying these operations in the | |
following order: | |
- glob | |
- loadContents | |
- outputEval | |
- secondaryFiles | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'glob' in doc: | |
try: | |
self.glob = load_field(doc.get('glob'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'glob', str).makeError("the `glob` field is not valid because:\n"+str(e))) | |
else: | |
self.glob = None | |
if 'loadContents' in doc: | |
try: | |
self.loadContents = load_field(doc.get('loadContents'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'loadContents', str).makeError("the `loadContents` field is not valid because:\n"+str(e))) | |
else: | |
self.loadContents = None | |
if 'outputEval' in doc: | |
try: | |
self.outputEval = load_field(doc.get('outputEval'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputEval', str).makeError("the `outputEval` field is not valid because:\n"+str(e))) | |
else: | |
self.outputEval = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `glob`, `loadContents`, `outputEval`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandOutputBinding'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.glob is not None: | |
r['glob'] = save(self.glob, top=False, base_url=base_url) | |
if self.loadContents is not None: | |
r['loadContents'] = save(self.loadContents, top=False, base_url=base_url) | |
if self.outputEval is not None: | |
r['outputEval'] = save(self.outputEval, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['glob', 'loadContents', 'outputEval']) | |
class CommandInputRecordField(InputRecordField): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
raise ValidationException("Missing name") | |
baseuri = self.name | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `name`, `doc`, `type`, `inputBinding`, `label`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandInputRecordField'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['name', 'doc', 'type', 'inputBinding', 'label']) | |
class CommandInputRecordSchema(InputRecordSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
self.name = "_:" + str(uuid.uuid4()) | |
baseuri = self.name | |
if 'fields' in doc: | |
try: | |
self.fields = load_field(doc.get('fields'), idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'fields', str).makeError("the `fields` field is not valid because:\n"+str(e))) | |
else: | |
self.fields = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Record_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `fields`, `type`, `label`, `name`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandInputRecordSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.fields is not None: | |
r['fields'] = save(self.fields, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['fields', 'type', 'label', 'name']) | |
class CommandInputEnumSchema(InputEnumSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
self.name = "_:" + str(uuid.uuid4()) | |
baseuri = self.name | |
try: | |
self.symbols = load_field(doc.get('symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'symbols', str).makeError("the `symbols` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Enum_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `symbols`, `type`, `label`, `name`, `inputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandInputEnumSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.symbols is not None: | |
r['symbols'] = relative_uri(self.symbols, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['symbols', 'type', 'label', 'name', 'inputBinding']) | |
class CommandInputArraySchema(InputArraySchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.items = load_field(doc.get('items'), uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'items', str).makeError("the `items` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Array_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `items`, `type`, `label`, `inputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandInputArraySchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.items is not None: | |
r['items'] = relative_uri(self.items, base_url, False) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['items', 'type', 'label', 'inputBinding']) | |
class CommandOutputRecordField(OutputRecordField): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
raise ValidationException("Missing name") | |
baseuri = self.name | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `name`, `doc`, `type`, `outputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandOutputRecordField'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['name', 'doc', 'type', 'outputBinding']) | |
class CommandOutputRecordSchema(OutputRecordSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'name' in doc: | |
try: | |
self.name = load_field(doc.get('name'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'name', str).makeError("the `name` field is not valid because:\n"+str(e))) | |
else: | |
self.name = None | |
if self.name is None: | |
if docRoot is not None: | |
self.name = docRoot | |
else: | |
self.name = "_:" + str(uuid.uuid4()) | |
baseuri = self.name | |
if 'fields' in doc: | |
try: | |
self.fields = load_field(doc.get('fields'), idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'fields', str).makeError("the `fields` field is not valid because:\n"+str(e))) | |
else: | |
self.fields = None | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Record_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `fields`, `type`, `label`, `name`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandOutputRecordSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.name is not None: | |
r['name'] = relative_uri(self.name, base_url, True) | |
if self.fields is not None: | |
r['fields'] = save(self.fields, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['fields', 'type', 'label', 'name']) | |
class CommandOutputEnumSchema(OutputEnumSchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.symbols = load_field(doc.get('symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'symbols', str).makeError("the `symbols` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Enum_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `symbols`, `type`, `label`, `outputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandOutputEnumSchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.symbols is not None: | |
r['symbols'] = relative_uri(self.symbols, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['symbols', 'type', 'label', 'outputBinding']) | |
class CommandOutputArraySchema(OutputArraySchema): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.items = load_field(doc.get('items'), uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'items', str).makeError("the `items` field is not valid because:\n"+str(e))) | |
try: | |
self.type = load_field(doc.get('type'), typedsl_Array_symbolLoader_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `items`, `type`, `label`, `outputBinding`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandOutputArraySchema'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.items is not None: | |
r['items'] = relative_uri(self.items, base_url, False) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['items', 'type', 'label', 'outputBinding']) | |
class CommandInputParameter(InputParameter): | |
""" | |
An input parameter for a CommandLineTool. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'streamable' in doc: | |
try: | |
self.streamable = load_field(doc.get('streamable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'streamable', str).makeError("the `streamable` field is not valid because:\n"+str(e))) | |
else: | |
self.streamable = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
if 'inputBinding' in doc: | |
try: | |
self.inputBinding = load_field(doc.get('inputBinding'), union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputBinding', str).makeError("the `inputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.inputBinding = None | |
if 'default' in doc: | |
try: | |
self.default = load_field(doc.get('default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'default', str).makeError("the `default` field is not valid because:\n"+str(e))) | |
else: | |
self.default = None | |
if 'type' in doc: | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
else: | |
self.type = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandInputParameter'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.streamable is not None: | |
r['streamable'] = save(self.streamable, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if self.inputBinding is not None: | |
r['inputBinding'] = save(self.inputBinding, top=False, base_url=base_url) | |
if self.default is not None: | |
r['default'] = save(self.default, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['label', 'secondaryFiles', 'streamable', 'doc', 'id', 'format', 'inputBinding', 'default', 'type']) | |
class CommandOutputParameter(OutputParameter): | |
""" | |
An output parameter for a CommandLineTool. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'streamable' in doc: | |
try: | |
self.streamable = load_field(doc.get('streamable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'streamable', str).makeError("the `streamable` field is not valid because:\n"+str(e))) | |
else: | |
self.streamable = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
if 'type' in doc: | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
else: | |
self.type = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandOutputParameter'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.streamable is not None: | |
r['streamable'] = save(self.streamable, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['label', 'secondaryFiles', 'streamable', 'doc', 'id', 'outputBinding', 'format', 'type']) | |
class CommandLineTool(Process): | |
""" | |
This defines the schema of the CWL Command Line Tool Description document. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'CommandLineTool': | |
raise ValidationException("Not a CommandLineTool") | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
self.id = "_:" + str(uuid.uuid4()) | |
baseuri = self.id | |
try: | |
self.inputs = load_field(doc.get('inputs'), idmap_inputs_array_of_CommandInputParameterLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputs', str).makeError("the `inputs` field is not valid because:\n"+str(e))) | |
try: | |
self.outputs = load_field(doc.get('outputs'), idmap_outputs_array_of_CommandOutputParameterLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputs', str).makeError("the `outputs` field is not valid because:\n"+str(e))) | |
if 'requirements' in doc: | |
try: | |
self.requirements = load_field(doc.get('requirements'), idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'requirements', str).makeError("the `requirements` field is not valid because:\n"+str(e))) | |
else: | |
self.requirements = None | |
if 'hints' in doc: | |
try: | |
self.hints = load_field(doc.get('hints'), idmap_hints_union_of_None_type_or_array_of_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'hints', str).makeError("the `hints` field is not valid because:\n"+str(e))) | |
else: | |
self.hints = None | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'cwlVersion' in doc: | |
try: | |
self.cwlVersion = load_field(doc.get('cwlVersion'), uri_union_of_None_type_or_CWLVersionLoader_False_True_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'cwlVersion', str).makeError("the `cwlVersion` field is not valid because:\n"+str(e))) | |
else: | |
self.cwlVersion = None | |
if 'baseCommand' in doc: | |
try: | |
self.baseCommand = load_field(doc.get('baseCommand'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'baseCommand', str).makeError("the `baseCommand` field is not valid because:\n"+str(e))) | |
else: | |
self.baseCommand = None | |
if 'arguments' in doc: | |
try: | |
self.arguments = load_field(doc.get('arguments'), union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'arguments', str).makeError("the `arguments` field is not valid because:\n"+str(e))) | |
else: | |
self.arguments = None | |
if 'stdin' in doc: | |
try: | |
self.stdin = load_field(doc.get('stdin'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'stdin', str).makeError("the `stdin` field is not valid because:\n"+str(e))) | |
else: | |
self.stdin = None | |
if 'stderr' in doc: | |
try: | |
self.stderr = load_field(doc.get('stderr'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'stderr', str).makeError("the `stderr` field is not valid because:\n"+str(e))) | |
else: | |
self.stderr = None | |
if 'stdout' in doc: | |
try: | |
self.stdout = load_field(doc.get('stdout'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'stdout', str).makeError("the `stdout` field is not valid because:\n"+str(e))) | |
else: | |
self.stdout = None | |
if 'successCodes' in doc: | |
try: | |
self.successCodes = load_field(doc.get('successCodes'), union_of_None_type_or_array_of_inttype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'successCodes', str).makeError("the `successCodes` field is not valid because:\n"+str(e))) | |
else: | |
self.successCodes = None | |
if 'temporaryFailCodes' in doc: | |
try: | |
self.temporaryFailCodes = load_field(doc.get('temporaryFailCodes'), union_of_None_type_or_array_of_inttype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'temporaryFailCodes', str).makeError("the `temporaryFailCodes` field is not valid because:\n"+str(e))) | |
else: | |
self.temporaryFailCodes = None | |
if 'permanentFailCodes' in doc: | |
try: | |
self.permanentFailCodes = load_field(doc.get('permanentFailCodes'), union_of_None_type_or_array_of_inttype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'permanentFailCodes', str).makeError("the `permanentFailCodes` field is not valid because:\n"+str(e))) | |
else: | |
self.permanentFailCodes = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'CommandLineTool'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'CommandLineTool' | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.inputs is not None: | |
r['inputs'] = save(self.inputs, top=False, base_url=base_url) | |
if self.outputs is not None: | |
r['outputs'] = save(self.outputs, top=False, base_url=base_url) | |
if self.requirements is not None: | |
r['requirements'] = save(self.requirements, top=False, base_url=base_url) | |
if self.hints is not None: | |
r['hints'] = save(self.hints, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.cwlVersion is not None: | |
r['cwlVersion'] = relative_uri(self.cwlVersion, base_url, False) | |
if self.baseCommand is not None: | |
r['baseCommand'] = save(self.baseCommand, top=False, base_url=base_url) | |
if self.arguments is not None: | |
r['arguments'] = save(self.arguments, top=False, base_url=base_url) | |
if self.stdin is not None: | |
r['stdin'] = save(self.stdin, top=False, base_url=base_url) | |
if self.stderr is not None: | |
r['stderr'] = save(self.stderr, top=False, base_url=base_url) | |
if self.stdout is not None: | |
r['stdout'] = save(self.stdout, top=False, base_url=base_url) | |
if self.successCodes is not None: | |
r['successCodes'] = save(self.successCodes, top=False, base_url=base_url) | |
if self.temporaryFailCodes is not None: | |
r['temporaryFailCodes'] = save(self.temporaryFailCodes, top=False, base_url=base_url) | |
if self.permanentFailCodes is not None: | |
r['permanentFailCodes'] = save(self.permanentFailCodes, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['id', 'inputs', 'outputs', 'requirements', 'hints', 'label', 'doc', 'cwlVersion', 'class', 'baseCommand', 'arguments', 'stdin', 'stderr', 'stdout', 'successCodes', 'temporaryFailCodes', 'permanentFailCodes']) | |
class DockerRequirement(ProcessRequirement): | |
""" | |
Indicates that a workflow component should be run in a | |
[Docker](http://docker.com) container, and specifies how to fetch or build | |
the image. | |
If a CommandLineTool lists `DockerRequirement` under | |
`hints` (or `requirements`), it may (or must) be run in the specified Docker | |
container. | |
The platform must first acquire or install the correct Docker image as | |
specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. | |
The platform must execute the tool in the container using `docker run` with | |
the appropriate Docker image and tool command line. | |
The workflow platform may provide input files and the designated output | |
directory through the use of volume bind mounts. The platform may rewrite | |
file paths in the input object to correspond to the Docker bind mounted | |
locations. | |
When running a tool contained in Docker, the workflow platform must not | |
assume anything about the contents of the Docker container, such as the | |
presence or absence of specific software, except to assume that the | |
generated command line represents a valid command within the runtime | |
environment of the container. | |
## Interaction with other requirements | |
If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a | |
DockerRequirement, the environment variables must be provided to Docker | |
using `--env` or `--env-file` and interact with the container's preexisting | |
environment as defined by Docker. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'DockerRequirement': | |
raise ValidationException("Not a DockerRequirement") | |
if 'dockerPull' in doc: | |
try: | |
self.dockerPull = load_field(doc.get('dockerPull'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dockerPull', str).makeError("the `dockerPull` field is not valid because:\n"+str(e))) | |
else: | |
self.dockerPull = None | |
if 'dockerLoad' in doc: | |
try: | |
self.dockerLoad = load_field(doc.get('dockerLoad'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dockerLoad', str).makeError("the `dockerLoad` field is not valid because:\n"+str(e))) | |
else: | |
self.dockerLoad = None | |
if 'dockerFile' in doc: | |
try: | |
self.dockerFile = load_field(doc.get('dockerFile'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dockerFile', str).makeError("the `dockerFile` field is not valid because:\n"+str(e))) | |
else: | |
self.dockerFile = None | |
if 'dockerImport' in doc: | |
try: | |
self.dockerImport = load_field(doc.get('dockerImport'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dockerImport', str).makeError("the `dockerImport` field is not valid because:\n"+str(e))) | |
else: | |
self.dockerImport = None | |
if 'dockerImageId' in doc: | |
try: | |
self.dockerImageId = load_field(doc.get('dockerImageId'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dockerImageId', str).makeError("the `dockerImageId` field is not valid because:\n"+str(e))) | |
else: | |
self.dockerImageId = None | |
if 'dockerOutputDirectory' in doc: | |
try: | |
self.dockerOutputDirectory = load_field(doc.get('dockerOutputDirectory'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'dockerOutputDirectory', str).makeError("the `dockerOutputDirectory` field is not valid because:\n"+str(e))) | |
else: | |
self.dockerOutputDirectory = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'DockerRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'DockerRequirement' | |
if self.dockerPull is not None: | |
r['dockerPull'] = save(self.dockerPull, top=False, base_url=base_url) | |
if self.dockerLoad is not None: | |
r['dockerLoad'] = save(self.dockerLoad, top=False, base_url=base_url) | |
if self.dockerFile is not None: | |
r['dockerFile'] = save(self.dockerFile, top=False, base_url=base_url) | |
if self.dockerImport is not None: | |
r['dockerImport'] = save(self.dockerImport, top=False, base_url=base_url) | |
if self.dockerImageId is not None: | |
r['dockerImageId'] = save(self.dockerImageId, top=False, base_url=base_url) | |
if self.dockerOutputDirectory is not None: | |
r['dockerOutputDirectory'] = save(self.dockerOutputDirectory, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'dockerPull', 'dockerLoad', 'dockerFile', 'dockerImport', 'dockerImageId', 'dockerOutputDirectory']) | |
class SoftwareRequirement(ProcessRequirement): | |
""" | |
A list of software packages that should be configured in the environment of | |
the defined process. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'SoftwareRequirement': | |
raise ValidationException("Not a SoftwareRequirement") | |
try: | |
self.packages = load_field(doc.get('packages'), idmap_packages_array_of_SoftwarePackageLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'packages', str).makeError("the `packages` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `packages`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'SoftwareRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'SoftwareRequirement' | |
if self.packages is not None: | |
r['packages'] = save(self.packages, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'packages']) | |
class SoftwarePackage(Savable): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
try: | |
self.package = load_field(doc.get('package'), strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'package', str).makeError("the `package` field is not valid because:\n"+str(e))) | |
if 'version' in doc: | |
try: | |
self.version = load_field(doc.get('version'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'version', str).makeError("the `version` field is not valid because:\n"+str(e))) | |
else: | |
self.version = None | |
if 'specs' in doc: | |
try: | |
self.specs = load_field(doc.get('specs'), union_of_None_type_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'specs', str).makeError("the `specs` field is not valid because:\n"+str(e))) | |
else: | |
self.specs = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `package`, `version`, `specs`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'SoftwarePackage'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.package is not None: | |
r['package'] = save(self.package, top=False, base_url=base_url) | |
if self.version is not None: | |
r['version'] = save(self.version, top=False, base_url=base_url) | |
if self.specs is not None: | |
r['specs'] = save(self.specs, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['package', 'version', 'specs']) | |
class Dirent(Savable): | |
""" | |
Define a file or subdirectory that must be placed in the designated output | |
directory prior to executing the command line tool. May be the result of | |
executing an expression, such as building a configuration file from a | |
template. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'entryname' in doc: | |
try: | |
self.entryname = load_field(doc.get('entryname'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'entryname', str).makeError("the `entryname` field is not valid because:\n"+str(e))) | |
else: | |
self.entryname = None | |
try: | |
self.entry = load_field(doc.get('entry'), union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'entry', str).makeError("the `entry` field is not valid because:\n"+str(e))) | |
if 'writable' in doc: | |
try: | |
self.writable = load_field(doc.get('writable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'writable', str).makeError("the `writable` field is not valid because:\n"+str(e))) | |
else: | |
self.writable = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `entryname`, `entry`, `writable`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'Dirent'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.entryname is not None: | |
r['entryname'] = save(self.entryname, top=False, base_url=base_url) | |
if self.entry is not None: | |
r['entry'] = save(self.entry, top=False, base_url=base_url) | |
if self.writable is not None: | |
r['writable'] = save(self.writable, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['entryname', 'entry', 'writable']) | |
class InitialWorkDirRequirement(ProcessRequirement): | |
""" | |
Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'InitialWorkDirRequirement': | |
raise ValidationException("Not a InitialWorkDirRequirement") | |
try: | |
self.listing = load_field(doc.get('listing'), union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'listing', str).makeError("the `listing` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `listing`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'InitialWorkDirRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'InitialWorkDirRequirement' | |
if self.listing is not None: | |
r['listing'] = save(self.listing, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'listing']) | |
class EnvVarRequirement(ProcessRequirement): | |
""" | |
Define a list of environment variables which will be set in the | |
execution environment of the tool. See `EnvironmentDef` for details. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'EnvVarRequirement': | |
raise ValidationException("Not a EnvVarRequirement") | |
try: | |
self.envDef = load_field(doc.get('envDef'), idmap_envDef_array_of_EnvironmentDefLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'envDef', str).makeError("the `envDef` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `envDef`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'EnvVarRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'EnvVarRequirement' | |
if self.envDef is not None: | |
r['envDef'] = save(self.envDef, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'envDef']) | |
class ShellCommandRequirement(ProcessRequirement): | |
""" | |
Modify the behavior of CommandLineTool to generate a single string | |
containing a shell command line. Each item in the argument list must be | |
joined into a string separated by single spaces and quoted to prevent | |
intepretation by the shell, unless `CommandLineBinding` for that argument | |
contains `shellQuote: false`. If `shellQuote: false` is specified, the | |
argument is joined into the command string without quoting, which allows | |
the use of shell metacharacters such as `|` for pipes. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'ShellCommandRequirement': | |
raise ValidationException("Not a ShellCommandRequirement") | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'ShellCommandRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'ShellCommandRequirement' | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class']) | |
class ResourceRequirement(ProcessRequirement): | |
""" | |
Specify basic hardware resource requirements. | |
"min" is the minimum amount of a resource that must be reserved to schedule | |
a job. If "min" cannot be satisfied, the job should not be run. | |
"max" is the maximum amount of a resource that the job shall be permitted | |
to use. If a node has sufficient resources, multiple jobs may be scheduled | |
on a single node provided each job's "max" resource requirements are | |
met. If a job attempts to exceed its "max" resource allocation, an | |
implementation may deny additional resources, which may result in job | |
failure. | |
If "min" is specified but "max" is not, then "max" == "min" | |
If "max" is specified by "min" is not, then "min" == "max". | |
It is an error if max < min. | |
It is an error if the value of any of these fields is negative. | |
If neither "min" nor "max" is specified for a resource, an implementation may provide a default. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'ResourceRequirement': | |
raise ValidationException("Not a ResourceRequirement") | |
if 'coresMin' in doc: | |
try: | |
self.coresMin = load_field(doc.get('coresMin'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'coresMin', str).makeError("the `coresMin` field is not valid because:\n"+str(e))) | |
else: | |
self.coresMin = None | |
if 'coresMax' in doc: | |
try: | |
self.coresMax = load_field(doc.get('coresMax'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'coresMax', str).makeError("the `coresMax` field is not valid because:\n"+str(e))) | |
else: | |
self.coresMax = None | |
if 'ramMin' in doc: | |
try: | |
self.ramMin = load_field(doc.get('ramMin'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'ramMin', str).makeError("the `ramMin` field is not valid because:\n"+str(e))) | |
else: | |
self.ramMin = None | |
if 'ramMax' in doc: | |
try: | |
self.ramMax = load_field(doc.get('ramMax'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'ramMax', str).makeError("the `ramMax` field is not valid because:\n"+str(e))) | |
else: | |
self.ramMax = None | |
if 'tmpdirMin' in doc: | |
try: | |
self.tmpdirMin = load_field(doc.get('tmpdirMin'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'tmpdirMin', str).makeError("the `tmpdirMin` field is not valid because:\n"+str(e))) | |
else: | |
self.tmpdirMin = None | |
if 'tmpdirMax' in doc: | |
try: | |
self.tmpdirMax = load_field(doc.get('tmpdirMax'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'tmpdirMax', str).makeError("the `tmpdirMax` field is not valid because:\n"+str(e))) | |
else: | |
self.tmpdirMax = None | |
if 'outdirMin' in doc: | |
try: | |
self.outdirMin = load_field(doc.get('outdirMin'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outdirMin', str).makeError("the `outdirMin` field is not valid because:\n"+str(e))) | |
else: | |
self.outdirMin = None | |
if 'outdirMax' in doc: | |
try: | |
self.outdirMax = load_field(doc.get('outdirMax'), union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outdirMax', str).makeError("the `outdirMax` field is not valid because:\n"+str(e))) | |
else: | |
self.outdirMax = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'ResourceRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'ResourceRequirement' | |
if self.coresMin is not None: | |
r['coresMin'] = save(self.coresMin, top=False, base_url=base_url) | |
if self.coresMax is not None: | |
r['coresMax'] = save(self.coresMax, top=False, base_url=base_url) | |
if self.ramMin is not None: | |
r['ramMin'] = save(self.ramMin, top=False, base_url=base_url) | |
if self.ramMax is not None: | |
r['ramMax'] = save(self.ramMax, top=False, base_url=base_url) | |
if self.tmpdirMin is not None: | |
r['tmpdirMin'] = save(self.tmpdirMin, top=False, base_url=base_url) | |
if self.tmpdirMax is not None: | |
r['tmpdirMax'] = save(self.tmpdirMax, top=False, base_url=base_url) | |
if self.outdirMin is not None: | |
r['outdirMin'] = save(self.outdirMin, top=False, base_url=base_url) | |
if self.outdirMax is not None: | |
r['outdirMax'] = save(self.outdirMax, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class', 'coresMin', 'coresMax', 'ramMin', 'ramMax', 'tmpdirMin', 'tmpdirMax', 'outdirMin', 'outdirMax']) | |
class ExpressionToolOutputParameter(OutputParameter): | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'streamable' in doc: | |
try: | |
self.streamable = load_field(doc.get('streamable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'streamable', str).makeError("the `streamable` field is not valid because:\n"+str(e))) | |
else: | |
self.streamable = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
if 'type' in doc: | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
else: | |
self.type = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'ExpressionToolOutputParameter'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.streamable is not None: | |
r['streamable'] = save(self.streamable, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['label', 'secondaryFiles', 'streamable', 'doc', 'id', 'outputBinding', 'format', 'type']) | |
class ExpressionTool(Process): | |
""" | |
Execute an expression as a Workflow step. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'ExpressionTool': | |
raise ValidationException("Not a ExpressionTool") | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
self.id = "_:" + str(uuid.uuid4()) | |
baseuri = self.id | |
try: | |
self.inputs = load_field(doc.get('inputs'), idmap_inputs_array_of_InputParameterLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputs', str).makeError("the `inputs` field is not valid because:\n"+str(e))) | |
try: | |
self.outputs = load_field(doc.get('outputs'), idmap_outputs_array_of_ExpressionToolOutputParameterLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputs', str).makeError("the `outputs` field is not valid because:\n"+str(e))) | |
if 'requirements' in doc: | |
try: | |
self.requirements = load_field(doc.get('requirements'), idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'requirements', str).makeError("the `requirements` field is not valid because:\n"+str(e))) | |
else: | |
self.requirements = None | |
if 'hints' in doc: | |
try: | |
self.hints = load_field(doc.get('hints'), idmap_hints_union_of_None_type_or_array_of_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'hints', str).makeError("the `hints` field is not valid because:\n"+str(e))) | |
else: | |
self.hints = None | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'cwlVersion' in doc: | |
try: | |
self.cwlVersion = load_field(doc.get('cwlVersion'), uri_union_of_None_type_or_CWLVersionLoader_False_True_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'cwlVersion', str).makeError("the `cwlVersion` field is not valid because:\n"+str(e))) | |
else: | |
self.cwlVersion = None | |
try: | |
self.expression = load_field(doc.get('expression'), union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'expression', str).makeError("the `expression` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `expression`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'ExpressionTool'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'ExpressionTool' | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.inputs is not None: | |
r['inputs'] = save(self.inputs, top=False, base_url=base_url) | |
if self.outputs is not None: | |
r['outputs'] = save(self.outputs, top=False, base_url=base_url) | |
if self.requirements is not None: | |
r['requirements'] = save(self.requirements, top=False, base_url=base_url) | |
if self.hints is not None: | |
r['hints'] = save(self.hints, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.cwlVersion is not None: | |
r['cwlVersion'] = relative_uri(self.cwlVersion, base_url, False) | |
if self.expression is not None: | |
r['expression'] = save(self.expression, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['id', 'inputs', 'outputs', 'requirements', 'hints', 'label', 'doc', 'cwlVersion', 'class', 'expression']) | |
class WorkflowOutputParameter(OutputParameter): | |
""" | |
Describe an output parameter of a workflow. The parameter must be | |
connected to one or more parameters defined in the workflow that will | |
provide the value of the output parameter. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'secondaryFiles' in doc: | |
try: | |
self.secondaryFiles = load_field(doc.get('secondaryFiles'), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'secondaryFiles', str).makeError("the `secondaryFiles` field is not valid because:\n"+str(e))) | |
else: | |
self.secondaryFiles = None | |
if 'streamable' in doc: | |
try: | |
self.streamable = load_field(doc.get('streamable'), union_of_None_type_or_booltype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'streamable', str).makeError("the `streamable` field is not valid because:\n"+str(e))) | |
else: | |
self.streamable = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'outputBinding' in doc: | |
try: | |
self.outputBinding = load_field(doc.get('outputBinding'), union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputBinding', str).makeError("the `outputBinding` field is not valid because:\n"+str(e))) | |
else: | |
self.outputBinding = None | |
if 'format' in doc: | |
try: | |
self.format = load_field(doc.get('format'), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'format', str).makeError("the `format` field is not valid because:\n"+str(e))) | |
else: | |
self.format = None | |
if 'outputSource' in doc: | |
try: | |
self.outputSource = load_field(doc.get('outputSource'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputSource', str).makeError("the `outputSource` field is not valid because:\n"+str(e))) | |
else: | |
self.outputSource = None | |
if 'linkMerge' in doc: | |
try: | |
self.linkMerge = load_field(doc.get('linkMerge'), union_of_None_type_or_LinkMergeMethodLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'linkMerge', str).makeError("the `linkMerge` field is not valid because:\n"+str(e))) | |
else: | |
self.linkMerge = None | |
if 'type' in doc: | |
try: | |
self.type = load_field(doc.get('type'), typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'type', str).makeError("the `type` field is not valid because:\n"+str(e))) | |
else: | |
self.type = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `outputSource`, `linkMerge`, `type`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'WorkflowOutputParameter'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.secondaryFiles is not None: | |
r['secondaryFiles'] = save(self.secondaryFiles, top=False, base_url=base_url) | |
if self.streamable is not None: | |
r['streamable'] = save(self.streamable, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.outputBinding is not None: | |
r['outputBinding'] = save(self.outputBinding, top=False, base_url=base_url) | |
if self.format is not None: | |
r['format'] = relative_uri(self.format, base_url, True) | |
if self.outputSource is not None: | |
r['outputSource'] = relative_uri(self.outputSource, base_url, False) | |
if self.linkMerge is not None: | |
r['linkMerge'] = save(self.linkMerge, top=False, base_url=base_url) | |
if self.type is not None: | |
r['type'] = save(self.type, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['label', 'secondaryFiles', 'streamable', 'doc', 'id', 'outputBinding', 'format', 'outputSource', 'linkMerge', 'type']) | |
class Sink(Savable): | |
pass | |
class WorkflowStepInput(Sink): | |
""" | |
The input of a workflow step connects an upstream parameter (from the | |
workflow inputs, or the outputs of other workflows steps) with the input | |
parameters of the underlying step. | |
## Input object | |
A WorkflowStepInput object must contain an `id` field in the form | |
`#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash | |
`/` the field name consists of the characters following the final slash | |
(the prefix portion may contain one or more slashes to indicate scope). | |
This defines a field of the workflow step input object with the value of | |
the `source` parameter(s). | |
## Merging | |
To merge multiple inbound data links, | |
[MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified | |
in the workflow or workflow step requirements. | |
If the sink parameter is an array, or named in a [workflow | |
scatter](#WorkflowStep) operation, there may be multiple inbound data links | |
listed in the `source` field. The values from the input links are merged | |
depending on the method specified in the `linkMerge` field. If not | |
specified, the default method is "merge_nested". | |
* **merge_nested** | |
The input must be an array consisting of exactly one entry for each | |
input link. If "merge_nested" is specified with a single link, the value | |
from the link must be wrapped in a single-item list. | |
* **merge_flattened** | |
1. The source and sink parameters must be compatible types, or the source | |
type must be compatible with single element from the "items" type of | |
the destination array parameter. | |
2. Source parameters which are arrays are concatenated. | |
Source parameters which are single element types are appended as | |
single elements. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
if 'source' in doc: | |
try: | |
self.source = load_field(doc.get('source'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'source', str).makeError("the `source` field is not valid because:\n"+str(e))) | |
else: | |
self.source = None | |
if 'linkMerge' in doc: | |
try: | |
self.linkMerge = load_field(doc.get('linkMerge'), union_of_None_type_or_LinkMergeMethodLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'linkMerge', str).makeError("the `linkMerge` field is not valid because:\n"+str(e))) | |
else: | |
self.linkMerge = None | |
if 'default' in doc: | |
try: | |
self.default = load_field(doc.get('default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'default', str).makeError("the `default` field is not valid because:\n"+str(e))) | |
else: | |
self.default = None | |
if 'valueFrom' in doc: | |
try: | |
self.valueFrom = load_field(doc.get('valueFrom'), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'valueFrom', str).makeError("the `valueFrom` field is not valid because:\n"+str(e))) | |
else: | |
self.valueFrom = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `source`, `linkMerge`, `id`, `default`, `valueFrom`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'WorkflowStepInput'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.source is not None: | |
r['source'] = relative_uri(self.source, base_url, False) | |
if self.linkMerge is not None: | |
r['linkMerge'] = save(self.linkMerge, top=False, base_url=base_url) | |
if self.default is not None: | |
r['default'] = save(self.default, top=False, base_url=base_url) | |
if self.valueFrom is not None: | |
r['valueFrom'] = save(self.valueFrom, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['source', 'linkMerge', 'id', 'default', 'valueFrom']) | |
class WorkflowStepOutput(Savable): | |
""" | |
Associate an output parameter of the underlying process with a workflow | |
parameter. The workflow parameter (given in the `id` field) be may be used | |
as a `source` to connect with input parameters of other workflow steps, or | |
with an output parameter of the process. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `id`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'WorkflowStepOutput'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['id']) | |
class WorkflowStep(Savable): | |
""" | |
A workflow step is an executable element of a workflow. It specifies the | |
underlying process implementation (such as `CommandLineTool` or another | |
`Workflow`) in the `run` field and connects the input and output parameters | |
of the underlying process to workflow parameters. | |
# Scatter/gather | |
To use scatter/gather, | |
[ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified | |
in the workflow or workflow step requirements. | |
A "scatter" operation specifies that the associated workflow step or | |
subworkflow should execute separately over a list of input elements. Each | |
job making up a scatter operation is independent and may be executed | |
concurrently. | |
The `scatter` field specifies one or more input parameters which will be | |
scattered. An input parameter may be listed more than once. The declared | |
type of each input parameter is implicitly becomes an array of items of the | |
input parameter type. If a parameter is listed more than once, it becomes | |
a nested array. As a result, upstream parameters which are connected to | |
scattered parameters must be arrays. | |
All output parameter types are also implicitly wrapped in arrays. Each job | |
in the scatter results in an entry in the output array. | |
If any scattered parameter runtime value is an empty array, all outputs are | |
set to empty arrays and no work is done for the step, according to | |
applicable scattering rules. | |
If `scatter` declares more than one input parameter, `scatterMethod` | |
describes how to decompose the input into a discrete set of jobs. | |
* **dotproduct** specifies that each of the input arrays are aligned and one | |
element taken from each array to construct each job. It is an error | |
if all input arrays are not the same length. | |
* **nested_crossproduct** specifies the Cartesian product of the inputs, | |
producing a job for every combination of the scattered inputs. The | |
output must be nested arrays for each level of scattering, in the | |
order that the input arrays are listed in the `scatter` field. | |
* **flat_crossproduct** specifies the Cartesian product of the inputs, | |
producing a job for every combination of the scattered inputs. The | |
output arrays must be flattened to a single level, but otherwise listed in the | |
order that the input arrays are listed in the `scatter` field. | |
# Subworkflows | |
To specify a nested workflow as part of a workflow step, | |
[SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be | |
specified in the workflow or workflow step requirements. | |
It is a fatal error if a workflow directly or indirectly invokes itself as | |
a subworkflow (recursive workflows are not allowed). | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
raise ValidationException("Missing id") | |
baseuri = self.id | |
try: | |
self.in_ = load_field(doc.get('in'), idmap_in__array_of_WorkflowStepInputLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'in', str).makeError("the `in` field is not valid because:\n"+str(e))) | |
try: | |
self.out = load_field(doc.get('out'), uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'out', str).makeError("the `out` field is not valid because:\n"+str(e))) | |
if 'requirements' in doc: | |
try: | |
self.requirements = load_field(doc.get('requirements'), idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'requirements', str).makeError("the `requirements` field is not valid because:\n"+str(e))) | |
else: | |
self.requirements = None | |
if 'hints' in doc: | |
try: | |
self.hints = load_field(doc.get('hints'), idmap_hints_union_of_None_type_or_array_of_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'hints', str).makeError("the `hints` field is not valid because:\n"+str(e))) | |
else: | |
self.hints = None | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
try: | |
self.run = load_field(doc.get('run'), uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'run', str).makeError("the `run` field is not valid because:\n"+str(e))) | |
if 'scatter' in doc: | |
try: | |
self.scatter = load_field(doc.get('scatter'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'scatter', str).makeError("the `scatter` field is not valid because:\n"+str(e))) | |
else: | |
self.scatter = None | |
if 'scatterMethod' in doc: | |
try: | |
self.scatterMethod = load_field(doc.get('scatterMethod'), uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'scatterMethod', str).makeError("the `scatterMethod` field is not valid because:\n"+str(e))) | |
else: | |
self.scatterMethod = None | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `id`, `in`, `out`, `requirements`, `hints`, `label`, `doc`, `run`, `scatter`, `scatterMethod`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'WorkflowStep'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.in_ is not None: | |
r['in'] = save(self.in_, top=False, base_url=base_url) | |
if self.out is not None: | |
r['out'] = relative_uri(self.out, base_url, True) | |
if self.requirements is not None: | |
r['requirements'] = save(self.requirements, top=False, base_url=base_url) | |
if self.hints is not None: | |
r['hints'] = save(self.hints, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.run is not None: | |
r['run'] = relative_uri(self.run, base_url, False) | |
if self.scatter is not None: | |
r['scatter'] = relative_uri(self.scatter, base_url, False) | |
if self.scatterMethod is not None: | |
r['scatterMethod'] = relative_uri(self.scatterMethod, base_url, False) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['id', 'in', 'out', 'requirements', 'hints', 'label', 'doc', 'run', 'scatter', 'scatterMethod']) | |
class Workflow(Process): | |
""" | |
A workflow describes a set of **steps** and the **dependencies** between | |
those steps. When a step produces output that will be consumed by a | |
second step, the first step is a dependency of the second step. | |
When there is a dependency, the workflow engine must execute the preceeding | |
step and wait for it to successfully produce output before executing the | |
dependent step. If two steps are defined in the workflow graph that | |
are not directly or indirectly dependent, these steps are **independent**, | |
and may execute in any order or execute concurrently. A workflow is | |
complete when all steps have been executed. | |
Dependencies between parameters are expressed using the `source` field on | |
[workflow step input parameters](#WorkflowStepInput) and [workflow output | |
parameters](#WorkflowOutputParameter). | |
The `source` field expresses the dependency of one parameter on another | |
such that when a value is associated with the parameter specified by | |
`source`, that value is propagated to the destination parameter. When all | |
data links inbound to a given step are fufilled, the step is ready to | |
execute. | |
## Workflow success and failure | |
A completed step must result in one of `success`, `temporaryFailure` or | |
`permanentFailure` states. An implementation may choose to retry a step | |
execution which resulted in `temporaryFailure`. An implementation may | |
choose to either continue running other steps of a workflow, or terminate | |
immediately upon `permanentFailure`. | |
* If any step of a workflow execution results in `permanentFailure`, then | |
the workflow status is `permanentFailure`. | |
* If one or more steps result in `temporaryFailure` and all other steps | |
complete `success` or are not executed, then the workflow status is | |
`temporaryFailure`. | |
* If all workflow steps are executed and complete with `success`, then the | |
workflow status is `success`. | |
# Extensions | |
[ScatterFeatureRequirement](#ScatterFeatureRequirement) and | |
[SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are | |
available as standard [extensions](#Extensions_and_Metadata) to core | |
workflow semantics. | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'Workflow': | |
raise ValidationException("Not a Workflow") | |
if 'id' in doc: | |
try: | |
self.id = load_field(doc.get('id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'id', str).makeError("the `id` field is not valid because:\n"+str(e))) | |
else: | |
self.id = None | |
if self.id is None: | |
if docRoot is not None: | |
self.id = docRoot | |
else: | |
self.id = "_:" + str(uuid.uuid4()) | |
baseuri = self.id | |
try: | |
self.inputs = load_field(doc.get('inputs'), idmap_inputs_array_of_InputParameterLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'inputs', str).makeError("the `inputs` field is not valid because:\n"+str(e))) | |
try: | |
self.outputs = load_field(doc.get('outputs'), idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'outputs', str).makeError("the `outputs` field is not valid because:\n"+str(e))) | |
if 'requirements' in doc: | |
try: | |
self.requirements = load_field(doc.get('requirements'), idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'requirements', str).makeError("the `requirements` field is not valid because:\n"+str(e))) | |
else: | |
self.requirements = None | |
if 'hints' in doc: | |
try: | |
self.hints = load_field(doc.get('hints'), idmap_hints_union_of_None_type_or_array_of_Any_type, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'hints', str).makeError("the `hints` field is not valid because:\n"+str(e))) | |
else: | |
self.hints = None | |
if 'label' in doc: | |
try: | |
self.label = load_field(doc.get('label'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'label', str).makeError("the `label` field is not valid because:\n"+str(e))) | |
else: | |
self.label = None | |
if 'doc' in doc: | |
try: | |
self.doc = load_field(doc.get('doc'), union_of_None_type_or_strtype, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'doc', str).makeError("the `doc` field is not valid because:\n"+str(e))) | |
else: | |
self.doc = None | |
if 'cwlVersion' in doc: | |
try: | |
self.cwlVersion = load_field(doc.get('cwlVersion'), uri_union_of_None_type_or_CWLVersionLoader_False_True_None, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'cwlVersion', str).makeError("the `cwlVersion` field is not valid because:\n"+str(e))) | |
else: | |
self.cwlVersion = None | |
try: | |
self.steps = load_field(doc.get('steps'), idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions) | |
except ValidationException as e: | |
errors.append(SourceLine(doc, 'steps', str).makeError("the `steps` field is not valid because:\n"+str(e))) | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `steps`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'Workflow'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'Workflow' | |
if self.id is not None: | |
r['id'] = relative_uri(self.id, base_url, True) | |
if self.inputs is not None: | |
r['inputs'] = save(self.inputs, top=False, base_url=base_url) | |
if self.outputs is not None: | |
r['outputs'] = save(self.outputs, top=False, base_url=base_url) | |
if self.requirements is not None: | |
r['requirements'] = save(self.requirements, top=False, base_url=base_url) | |
if self.hints is not None: | |
r['hints'] = save(self.hints, top=False, base_url=base_url) | |
if self.label is not None: | |
r['label'] = save(self.label, top=False, base_url=base_url) | |
if self.doc is not None: | |
r['doc'] = save(self.doc, top=False, base_url=base_url) | |
if self.cwlVersion is not None: | |
r['cwlVersion'] = relative_uri(self.cwlVersion, base_url, False) | |
if self.steps is not None: | |
r['steps'] = save(self.steps, top=False, base_url=base_url) | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['id', 'inputs', 'outputs', 'requirements', 'hints', 'label', 'doc', 'cwlVersion', 'class', 'steps']) | |
class SubworkflowFeatureRequirement(ProcessRequirement): | |
""" | |
Indicates that the workflow platform must support nested workflows in | |
the `run` field of [WorkflowStep](#WorkflowStep). | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'SubworkflowFeatureRequirement': | |
raise ValidationException("Not a SubworkflowFeatureRequirement") | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'SubworkflowFeatureRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'SubworkflowFeatureRequirement' | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class']) | |
class ScatterFeatureRequirement(ProcessRequirement): | |
""" | |
Indicates that the workflow platform must support the `scatter` and | |
`scatterMethod` fields of [WorkflowStep](#WorkflowStep). | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'ScatterFeatureRequirement': | |
raise ValidationException("Not a ScatterFeatureRequirement") | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'ScatterFeatureRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'ScatterFeatureRequirement' | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class']) | |
class MultipleInputFeatureRequirement(ProcessRequirement): | |
""" | |
Indicates that the workflow platform must support multiple inbound data links | |
listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'MultipleInputFeatureRequirement': | |
raise ValidationException("Not a MultipleInputFeatureRequirement") | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'MultipleInputFeatureRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'MultipleInputFeatureRequirement' | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class']) | |
class StepInputExpressionRequirement(ProcessRequirement): | |
""" | |
Indicate that the workflow platform must support the `valueFrom` field | |
of [WorkflowStepInput](#WorkflowStepInput). | |
""" | |
def __init__(self, _doc, baseuri, loadingOptions, docRoot=None): | |
doc = copy.copy(_doc) | |
if hasattr(_doc, 'lc'): | |
doc.lc.data = _doc.lc.data | |
doc.lc.filename = _doc.lc.filename | |
errors = [] | |
self.loadingOptions = loadingOptions | |
if doc.get('class') != 'StepInputExpressionRequirement': | |
raise ValidationException("Not a StepInputExpressionRequirement") | |
self.extension_fields = {} | |
for k in doc.keys(): | |
if k not in self.attrs: | |
if ":" in k: | |
ex = expand_url(k, u"", loadingOptions, scoped_id=False, vocab_term=False) | |
self.extension_fields[ex] = doc[k] | |
else: | |
errors.append(SourceLine(doc, k, str).makeError("invalid field `%s`, expected one of: `class`" % (k))) | |
break | |
if errors: | |
raise ValidationException("Trying 'StepInputExpressionRequirement'\n"+"\n".join(errors)) | |
def save(self, top=False, base_url=""): | |
r = {} | |
for ef in self.extension_fields: | |
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] | |
r['class'] = 'StepInputExpressionRequirement' | |
if top and self.loadingOptions.namespaces: | |
r["$namespaces"] = self.loadingOptions.namespaces | |
return r | |
attrs = frozenset(['class']) | |
_vocab = { | |
"Any": "https://w3id.org/cwl/salad#Any", | |
"ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", | |
"CWLType": "https://w3id.org/cwl/cwl#CWLType", | |
"CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", | |
"CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", | |
"CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", | |
"CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", | |
"CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", | |
"CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", | |
"CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", | |
"CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", | |
"CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", | |
"CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", | |
"CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", | |
"CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", | |
"CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", | |
"CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", | |
"Directory": "https://w3id.org/cwl/cwl#Directory", | |
"Dirent": "https://w3id.org/cwl/cwl#Dirent", | |
"DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", | |
"EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", | |
"EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", | |
"EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", | |
"Expression": "https://w3id.org/cwl/cwl#Expression", | |
"ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", | |
"ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", | |
"ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", | |
"File": "https://w3id.org/cwl/cwl#File", | |
"InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", | |
"InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", | |
"InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", | |
"InputBinding": "https://w3id.org/cwl/cwl#InputBinding", | |
"InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", | |
"InputParameter": "https://w3id.org/cwl/cwl#InputParameter", | |
"InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", | |
"InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", | |
"InputSchema": "https://w3id.org/cwl/cwl#InputSchema", | |
"LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", | |
"MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", | |
"OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", | |
"OutputBinding": "https://w3id.org/cwl/cwl#OutputBinding", | |
"OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", | |
"OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", | |
"OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", | |
"OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", | |
"OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", | |
"Parameter": "https://w3id.org/cwl/cwl#Parameter", | |
"PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", | |
"Process": "https://w3id.org/cwl/cwl#Process", | |
"ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", | |
"RecordField": "https://w3id.org/cwl/salad#RecordField", | |
"RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", | |
"ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", | |
"ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", | |
"ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", | |
"SchemaBase": "https://w3id.org/cwl/cwl#SchemaBase", | |
"SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", | |
"ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", | |
"Sink": "https://w3id.org/cwl/cwl#Sink", | |
"SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", | |
"SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", | |
"StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", | |
"SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", | |
"Workflow": "https://w3id.org/cwl/cwl#Workflow", | |
"WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", | |
"WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", | |
"WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", | |
"WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", | |
"array": "https://w3id.org/cwl/salad#array", | |
"boolean": "http://www.w3.org/2001/XMLSchema#boolean", | |
"dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", | |
"double": "http://www.w3.org/2001/XMLSchema#double", | |
"draft-2": "https://w3id.org/cwl/cwl#draft-2", | |
"draft-3": "https://w3id.org/cwl/cwl#draft-3", | |
"draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", | |
"draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", | |
"draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", | |
"draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", | |
"draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", | |
"draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", | |
"draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", | |
"draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", | |
"enum": "https://w3id.org/cwl/salad#enum", | |
"flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", | |
"float": "http://www.w3.org/2001/XMLSchema#float", | |
"int": "http://www.w3.org/2001/XMLSchema#int", | |
"long": "http://www.w3.org/2001/XMLSchema#long", | |
"merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", | |
"merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", | |
"nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", | |
"null": "https://w3id.org/cwl/salad#null", | |
"record": "https://w3id.org/cwl/salad#record", | |
"stderr": "https://w3id.org/cwl/cwl#stderr", | |
"stdout": "https://w3id.org/cwl/cwl#stdout", | |
"string": "http://www.w3.org/2001/XMLSchema#string", | |
"v1.0": "https://w3id.org/cwl/cwl#v1.0", | |
"v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", | |
} | |
_rvocab = { | |
"https://w3id.org/cwl/salad#Any": "Any", | |
"https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", | |
"https://w3id.org/cwl/cwl#CWLType": "CWLType", | |
"https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", | |
"https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", | |
"https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", | |
"https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", | |
"https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", | |
"https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", | |
"https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", | |
"https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", | |
"https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", | |
"https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", | |
"https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", | |
"https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", | |
"https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", | |
"https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", | |
"https://w3id.org/cwl/cwl#Directory": "Directory", | |
"https://w3id.org/cwl/cwl#Dirent": "Dirent", | |
"https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", | |
"https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", | |
"https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", | |
"https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", | |
"https://w3id.org/cwl/cwl#Expression": "Expression", | |
"https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", | |
"https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", | |
"https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", | |
"https://w3id.org/cwl/cwl#File": "File", | |
"https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", | |
"https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", | |
"https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", | |
"https://w3id.org/cwl/cwl#InputBinding": "InputBinding", | |
"https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", | |
"https://w3id.org/cwl/cwl#InputParameter": "InputParameter", | |
"https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", | |
"https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", | |
"https://w3id.org/cwl/cwl#InputSchema": "InputSchema", | |
"https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", | |
"https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", | |
"https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", | |
"https://w3id.org/cwl/cwl#OutputBinding": "OutputBinding", | |
"https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", | |
"https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", | |
"https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", | |
"https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", | |
"https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", | |
"https://w3id.org/cwl/cwl#Parameter": "Parameter", | |
"https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", | |
"https://w3id.org/cwl/cwl#Process": "Process", | |
"https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", | |
"https://w3id.org/cwl/salad#RecordField": "RecordField", | |
"https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", | |
"https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", | |
"https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", | |
"https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", | |
"https://w3id.org/cwl/cwl#SchemaBase": "SchemaBase", | |
"https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", | |
"https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", | |
"https://w3id.org/cwl/cwl#Sink": "Sink", | |
"https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", | |
"https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", | |
"https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", | |
"https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", | |
"https://w3id.org/cwl/cwl#Workflow": "Workflow", | |
"https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", | |
"https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", | |
"https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", | |
"https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", | |
"https://w3id.org/cwl/salad#array": "array", | |
"http://www.w3.org/2001/XMLSchema#boolean": "boolean", | |
"https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", | |
"http://www.w3.org/2001/XMLSchema#double": "double", | |
"https://w3id.org/cwl/cwl#draft-2": "draft-2", | |
"https://w3id.org/cwl/cwl#draft-3": "draft-3", | |
"https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", | |
"https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", | |
"https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", | |
"https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", | |
"https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", | |
"https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", | |
"https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", | |
"https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", | |
"https://w3id.org/cwl/salad#enum": "enum", | |
"https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", | |
"http://www.w3.org/2001/XMLSchema#float": "float", | |
"http://www.w3.org/2001/XMLSchema#int": "int", | |
"http://www.w3.org/2001/XMLSchema#long": "long", | |
"https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", | |
"https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", | |
"https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", | |
"https://w3id.org/cwl/salad#null": "null", | |
"https://w3id.org/cwl/salad#record": "record", | |
"https://w3id.org/cwl/cwl#stderr": "stderr", | |
"https://w3id.org/cwl/cwl#stdout": "stdout", | |
"http://www.w3.org/2001/XMLSchema#string": "string", | |
"https://w3id.org/cwl/cwl#v1.0": "v1.0", | |
"https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", | |
} | |
strtype = _PrimitiveLoader((str, six.text_type)) | |
inttype = _PrimitiveLoader(int) | |
Any_type = _AnyLoader() | |
booltype = _PrimitiveLoader(bool) | |
floattype = _PrimitiveLoader(float) | |
None_type = _PrimitiveLoader(type(None)) | |
PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",)) | |
AnyLoader = _EnumLoader(("Any",)) | |
RecordFieldLoader = _RecordLoader(RecordField) | |
RecordSchemaLoader = _RecordLoader(RecordSchema) | |
EnumSchemaLoader = _RecordLoader(EnumSchema) | |
ArraySchemaLoader = _RecordLoader(ArraySchema) | |
CWLVersionLoader = _EnumLoader(("draft-2", "draft-3.dev1", "draft-3.dev2", "draft-3.dev3", "draft-3.dev4", "draft-3.dev5", "draft-3", "draft-4.dev1", "draft-4.dev2", "draft-4.dev3", "v1.0.dev4", "v1.0",)) | |
CWLTypeLoader = _EnumLoader(("File", "Directory",)) | |
FileLoader = _RecordLoader(File) | |
DirectoryLoader = _RecordLoader(Directory) | |
SchemaBaseLoader = _RecordLoader(SchemaBase) | |
ParameterLoader = _RecordLoader(Parameter) | |
ExpressionLoader = _EnumLoader(("ExpressionPlaceholder",)) | |
InputBindingLoader = _RecordLoader(InputBinding) | |
OutputBindingLoader = _RecordLoader(OutputBinding) | |
InputSchemaLoader = _RecordLoader(InputSchema) | |
OutputSchemaLoader = _RecordLoader(OutputSchema) | |
InputRecordFieldLoader = _RecordLoader(InputRecordField) | |
InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) | |
InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) | |
InputArraySchemaLoader = _RecordLoader(InputArraySchema) | |
OutputRecordFieldLoader = _RecordLoader(OutputRecordField) | |
OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) | |
OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) | |
OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) | |
InputParameterLoader = _RecordLoader(InputParameter) | |
OutputParameterLoader = _RecordLoader(OutputParameter) | |
ProcessRequirementLoader = _RecordLoader(ProcessRequirement) | |
ProcessLoader = _RecordLoader(Process) | |
InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) | |
SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) | |
EnvironmentDefLoader = _RecordLoader(EnvironmentDef) | |
CommandLineBindingLoader = _RecordLoader(CommandLineBinding) | |
CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) | |
CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) | |
CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) | |
CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) | |
CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) | |
CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) | |
CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) | |
CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) | |
CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) | |
CommandInputParameterLoader = _RecordLoader(CommandInputParameter) | |
CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) | |
stdoutLoader = _EnumLoader(("stdout",)) | |
stderrLoader = _EnumLoader(("stderr",)) | |
CommandLineToolLoader = _RecordLoader(CommandLineTool) | |
DockerRequirementLoader = _RecordLoader(DockerRequirement) | |
SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) | |
SoftwarePackageLoader = _RecordLoader(SoftwarePackage) | |
DirentLoader = _RecordLoader(Dirent) | |
InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) | |
EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) | |
ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) | |
ResourceRequirementLoader = _RecordLoader(ResourceRequirement) | |
ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) | |
ExpressionToolLoader = _RecordLoader(ExpressionTool) | |
LinkMergeMethodLoader = _EnumLoader(("merge_nested", "merge_flattened",)) | |
WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) | |
SinkLoader = _RecordLoader(Sink) | |
WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) | |
WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) | |
ScatterMethodLoader = _EnumLoader(("dotproduct", "nested_crossproduct", "flat_crossproduct",)) | |
WorkflowStepLoader = _RecordLoader(WorkflowStep) | |
WorkflowLoader = _RecordLoader(Workflow) | |
SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) | |
ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) | |
MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) | |
StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) | |
uri_strtype_True_False_None = _URILoader(strtype, True, False, None) | |
union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,)) | |
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,)) | |
array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype) | |
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2) | |
array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) | |
union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,)) | |
idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type') | |
Record_symbolLoader = _EnumLoader(("record",)) | |
typedsl_Record_symbolLoader_2 = _TypeDSLLoader(Record_symbolLoader, 2) | |
array_of_strtype = _ArrayLoader(strtype) | |
uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) | |
Enum_symbolLoader = _EnumLoader(("enum",)) | |
typedsl_Enum_symbolLoader_2 = _TypeDSLLoader(Enum_symbolLoader, 2) | |
uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2) | |
Array_symbolLoader = _EnumLoader(("array",)) | |
typedsl_Array_symbolLoader_2 = _TypeDSLLoader(Array_symbolLoader, 2) | |
File_classLoader = _EnumLoader(("File",)) | |
uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) | |
uri_union_of_None_type_or_strtype_False_False_None = _URILoader(union_of_None_type_or_strtype, False, False, None) | |
union_of_None_type_or_inttype = _UnionLoader((None_type, inttype,)) | |
union_of_FileLoader_or_DirectoryLoader = _UnionLoader((FileLoader, DirectoryLoader,)) | |
array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader(union_of_FileLoader_or_DirectoryLoader) | |
union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader((None_type, array_of_union_of_FileLoader_or_DirectoryLoader,)) | |
uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None) | |
Directory_classLoader = _EnumLoader(("Directory",)) | |
uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) | |
union_of_strtype_or_ExpressionLoader = _UnionLoader((strtype, ExpressionLoader,)) | |
array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader(union_of_strtype_or_ExpressionLoader) | |
union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader((None_type, strtype, ExpressionLoader, array_of_union_of_strtype_or_ExpressionLoader,)) | |
union_of_None_type_or_booltype = _UnionLoader((None_type, booltype,)) | |
union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,)) | |
union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, InputRecordSchemaLoader, InputEnumSchemaLoader, InputArraySchemaLoader, strtype,)) | |
array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader(union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype) | |
union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, InputRecordSchemaLoader, InputEnumSchemaLoader, InputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, 2) | |
union_of_None_type_or_CommandLineBindingLoader = _UnionLoader((None_type, CommandLineBindingLoader,)) | |
array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) | |
union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader((None_type, array_of_InputRecordFieldLoader,)) | |
idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_InputRecordFieldLoader, 'name', 'type') | |
uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, False, True, 2) | |
union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, OutputRecordSchemaLoader, OutputEnumSchemaLoader, OutputArraySchemaLoader, strtype,)) | |
array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader(union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype) | |
union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, OutputRecordSchemaLoader, OutputEnumSchemaLoader, OutputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, 2) | |
union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader((None_type, CommandOutputBindingLoader,)) | |
array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) | |
union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader((None_type, array_of_OutputRecordFieldLoader,)) | |
idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_OutputRecordFieldLoader, 'name', 'type') | |
uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, False, True, 2) | |
union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader((None_type, strtype, array_of_strtype, ExpressionLoader,)) | |
uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, True, False, None) | |
union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,)) | |
union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader((None_type, CWLTypeLoader, InputRecordSchemaLoader, InputEnumSchemaLoader, InputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, 2) | |
union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader((None_type, strtype, ExpressionLoader,)) | |
uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader(union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None) | |
array_of_InputParameterLoader = _ArrayLoader(InputParameterLoader) | |
idmap_inputs_array_of_InputParameterLoader = _IdMapLoader(array_of_InputParameterLoader, 'id', 'type') | |
array_of_OutputParameterLoader = _ArrayLoader(OutputParameterLoader) | |
idmap_outputs_array_of_OutputParameterLoader = _IdMapLoader(array_of_OutputParameterLoader, 'id', 'type') | |
union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader((InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, DockerRequirementLoader, SoftwareRequirementLoader, InitialWorkDirRequirementLoader, EnvVarRequirementLoader, ShellCommandRequirementLoader, ResourceRequirementLoader, SubworkflowFeatureRequirementLoader, ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader,)) | |
array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader(union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader) | |
union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader((None_type, array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader,)) | |
idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader(union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, 'class', 'None') | |
array_of_Any_type = _ArrayLoader(Any_type) | |
union_of_None_type_or_array_of_Any_type = _UnionLoader((None_type, array_of_Any_type,)) | |
idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader(union_of_None_type_or_array_of_Any_type, 'class', 'None') | |
union_of_None_type_or_CWLVersionLoader = _UnionLoader((None_type, CWLVersionLoader,)) | |
uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader(union_of_None_type_or_CWLVersionLoader, False, True, None) | |
uri_strtype_False_True_None = _URILoader(strtype, False, True, None) | |
union_of_None_type_or_array_of_strtype = _UnionLoader((None_type, array_of_strtype,)) | |
union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _UnionLoader((InputRecordSchemaLoader, InputEnumSchemaLoader, InputArraySchemaLoader,)) | |
array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader(union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader) | |
union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader((None_type, strtype, ExpressionLoader, array_of_strtype,)) | |
union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, CommandInputRecordSchemaLoader, CommandInputEnumSchemaLoader, CommandInputArraySchemaLoader, strtype,)) | |
array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader(union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype) | |
union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, CommandInputRecordSchemaLoader, CommandInputEnumSchemaLoader, CommandInputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2) | |
array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) | |
union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader((None_type, array_of_CommandInputRecordFieldLoader,)) | |
idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_CommandInputRecordFieldLoader, 'name', 'type') | |
uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, False, True, 2) | |
union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, CommandOutputRecordSchemaLoader, CommandOutputEnumSchemaLoader, CommandOutputArraySchemaLoader, strtype,)) | |
array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader(union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype) | |
union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader((CWLTypeLoader, CommandOutputRecordSchemaLoader, CommandOutputEnumSchemaLoader, CommandOutputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2) | |
array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) | |
union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader((None_type, array_of_CommandOutputRecordFieldLoader,)) | |
idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, 'name', 'type') | |
uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, False, True, 2) | |
union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader((None_type, CWLTypeLoader, CommandInputRecordSchemaLoader, CommandInputEnumSchemaLoader, CommandInputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2) | |
union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader((None_type, CWLTypeLoader, stdoutLoader, stderrLoader, CommandOutputRecordSchemaLoader, CommandOutputEnumSchemaLoader, CommandOutputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2) | |
array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) | |
idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader(array_of_CommandInputParameterLoader, 'id', 'type') | |
array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) | |
idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader(array_of_CommandOutputParameterLoader, 'id', 'type') | |
union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader((strtype, ExpressionLoader, CommandLineBindingLoader,)) | |
array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) | |
union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader((None_type, array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader,)) | |
array_of_inttype = _ArrayLoader(inttype) | |
union_of_None_type_or_array_of_inttype = _UnionLoader((None_type, array_of_inttype,)) | |
array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) | |
idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader(array_of_SoftwarePackageLoader, 'package', 'specs') | |
union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader((FileLoader, DirectoryLoader, DirentLoader, strtype, ExpressionLoader,)) | |
array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader(union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader) | |
union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader((array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader, strtype, ExpressionLoader,)) | |
array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) | |
idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader(array_of_EnvironmentDefLoader, 'envName', 'envValue') | |
union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader = _UnionLoader((None_type, inttype, strtype, ExpressionLoader,)) | |
union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader((None_type, CWLTypeLoader, OutputRecordSchemaLoader, OutputEnumSchemaLoader, OutputArraySchemaLoader, strtype, array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype,)) | |
typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, 2) | |
array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) | |
idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader(array_of_ExpressionToolOutputParameterLoader, 'id', 'type') | |
uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0) | |
union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader((None_type, LinkMergeMethodLoader,)) | |
uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2) | |
array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) | |
idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader(array_of_WorkflowStepInputLoader, 'id', 'source') | |
union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader((strtype, WorkflowStepOutputLoader,)) | |
array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader(union_of_strtype_or_WorkflowStepOutputLoader) | |
union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader((array_of_union_of_strtype_or_WorkflowStepOutputLoader,)) | |
uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader(union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, True, False, None) | |
union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader((strtype, CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader,)) | |
uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader(union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, False, False, None) | |
union_of_None_type_or_ScatterMethodLoader = _UnionLoader((None_type, ScatterMethodLoader,)) | |
uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader(union_of_None_type_or_ScatterMethodLoader, False, True, None) | |
array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) | |
idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_WorkflowOutputParameterLoader, 'id', 'type') | |
array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) | |
union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) | |
idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader(union_of_array_of_WorkflowStepLoader, 'id', 'None') | |
union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader((CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader,)) | |
array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader(union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader) | |
union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader((CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader,)) | |
def load_document(doc, baseuri=None, loadingOptions=None): | |
if baseuri is None: | |
baseuri = file_uri(os.getcwd()) + "/" | |
if loadingOptions is None: | |
loadingOptions = LoadingOptions() | |
return _document_load(union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, doc, baseuri, loadingOptions) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
import sys | |
import CommonWorkflowLanguage as cwl | |
def main(): | |
top = cwl.load_document( | |
"https://raw.githubusercontent.com/mscheremetjew/workflow-is-cwl/" | |
"master/workflows/TranscriptsAnnotation-wf.cwl") | |
traverse(top) | |
def extract_software_packages(process: cwl.Process): | |
for req in extract_docker_reqs(process): | |
print(process.id) | |
process_docker_requirement(req) | |
def extract_docker_reqs(process: cwl.Process): | |
if process.requirements: | |
for req in process.requirements: | |
if isinstance(req, cwl.DockerRequirement): | |
yield req | |
if process.hints: | |
for req in process.hints: | |
if req['class'] == "DockerRequirement": | |
yield cwl.load_field(req, cwl.DockerRequirementLoader, | |
process.id, process.loadingOptions) | |
def process_docker_requirement(req: cwl.SoftwarePackage): | |
print("dockerPull: {}".format(req.dockerPull)) | |
def traverse(process: cwl.Process): | |
extract_software_packages(process) | |
if isinstance(process, cwl.Workflow): | |
traverse_workflow(process) | |
def get_process_from_step(step: cwl.WorkflowStep): | |
if isinstance(step.run, str): | |
return cwl.load_document(step.run) | |
return step.run | |
def traverse_workflow(workflow: cwl.Workflow): | |
for step in workflow.steps: | |
extract_software_packages(step) | |
traverse(get_process_from_step(step)) | |
if __name__ == "__main__": | |
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ruamel.yaml | |
six | |
requests | |
cachecontrol | |
schema_salad |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment