Skip to content

more type improvements #72

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 28, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cwltool/__main__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import main
from . import main
import sys

sys.exit(main.main())
16 changes: 8 additions & 8 deletions cwltool/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,20 +33,20 @@ class Builder(object):

def __init__(self): # type: () -> None
self.names = None # type: avro.schema.Names
self.schemaDefs = None # type: Dict[str,Dict[str,str]]
self.files = None # type: List[str]
self.schemaDefs = None # type: Dict[str,Dict[unicode, Any]]
self.files = None # type: List[Dict[str, str]]
self.fs_access = None # type: StdFsAccess
self.job = None # type: Dict[str,str]
self.job = None # type: Dict[str, Any]
self.requirements = None # type: List[Dict[str,Any]]
self.outdir = None # type: str
self.tmpdir = None # type: str
self.resources = None # type: Dict[str,str]
self.bindings = [] # type: List[Dict[str,str]]
self.resources = None # type: Dict[str, Union[int, str]]
self.bindings = [] # type: List[Dict[str, Any]]
self.timeout = None # type: int
self.pathmapper = None # type: PathMapper

def bind_input(self, schema, datum, lead_pos=[], tail_pos=[]):
# type: (Dict[str,Any], Any, List[int], List[int]) -> List[Dict[str,str]]
# type: (Dict[unicode, Any], Any, List[int], List[int]) -> List[Dict[str, Any]]
bindings = [] # type: List[Dict[str,str]]
binding = None # type: Dict[str,Any]
if "inputBinding" in schema and isinstance(schema["inputBinding"], dict):
Expand All @@ -64,7 +64,7 @@ def bind_input(self, schema, datum, lead_pos=[], tail_pos=[]):
# Handle union types
if isinstance(schema["type"], list):
for t in schema["type"]:
if isinstance(t, basestring) and self.names.has_name(t, ""):
if isinstance(t, (str, unicode)) and self.names.has_name(t, ""):
avsc = self.names.get_name(t, "")
elif isinstance(t, dict) and "name" in t and self.names.has_name(t["name"], ""):
avsc = self.names.get_name(t["name"], "")
Expand Down Expand Up @@ -148,7 +148,7 @@ def _capture_files(f):

return bindings

def tostr(self, value): # type(Any) -> str
def tostr(self, value): # type: (Any) -> str
if isinstance(value, dict) and value.get("class") == "File":
if "path" not in value:
raise WorkflowException(u"File object must have \"path\": %s" % (value))
Expand Down
19 changes: 11 additions & 8 deletions cwltool/cwlrdf.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import json
import urlparse
from schema_salad.ref_resolver import Loader
from rdflib import Graph, plugin, URIRef
from rdflib.serializer import Serializer
from typing import Any, Union, Dict, IO

def makerdf(workflow, wf, ctx):
# type: (str, Dict[str,Any], Dict[str,Union[str, Dict[str,str]]]) -> Graph
# type: (str, Dict[str,Any], Loader.ContextType) -> Graph
prefixes = {}
for k,v in ctx.iteritems():
if isinstance(v, dict):
v = v["@id"]
doc_url, frg = urlparse.urldefrag(v)
url = v["@id"]
else:
url = v
doc_url, frg = urlparse.urldefrag(url)
if "/" in frg:
p, _ = frg.split("/")
prefixes[p] = u"%s#%s/" % (doc_url, p)
Expand All @@ -22,13 +25,13 @@ def makerdf(workflow, wf, ctx):
for s,p,o in g.triples((None, URIRef("@id"), None)):
g.remove((s, p, o))

for k,v in prefixes.iteritems():
g.namespace_manager.bind(k, v)
for k2,v2 in prefixes.iteritems():
g.namespace_manager.bind(k2, v2)

return g

def printrdf(workflow, wf, ctx, sr, stdout):
# type: (str, Dict[str,Any], Dict[str,Union[str, Dict[str,str]]], str, IO[Any]) -> None
# type: (str, Dict[str,Any], Loader.ContextType, str, IO[Any]) -> None
stdout.write(makerdf(workflow, wf, ctx).serialize(format=sr))

def lastpart(uri): # type: (Any) -> str
Expand Down Expand Up @@ -158,7 +161,7 @@ def dot_without_parameters(g, stdout): # type: (Graph, IO[Any]) -> None
}""")

for src, sink, srcrun, sinkrun in qres:
attr = ""
attr = u""
if srcrun in clusternode:
attr += u'ltail="%s"' % dotname[srcrun]
src = clusternode[srcrun]
Expand All @@ -169,7 +172,7 @@ def dot_without_parameters(g, stdout): # type: (Graph, IO[Any]) -> None


def printdot(workflow, wf, ctx, stdout, include_parameters=False):
# type: (str, Dict[str,Any], Dict[str,Union[str, Dict[str,str]]], Any, bool) -> None
# type: (str, Dict[str,Any], Loader.ContextType, Any, bool) -> None
g = makerdf(workflow, wf, ctx)

stdout.write("digraph {")
Expand Down
9 changes: 6 additions & 3 deletions cwltool/cwltest.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,12 @@ def main(): # type: () -> int
tests = []
for t in alltests:
loader = schema_salad.ref_resolver.Loader({"id": "@id"})
cwl, _ = loader.resolve_ref(t["tool"])
if cwl["class"] == "CommandLineTool":
tests.append(t)
cwl = loader.resolve_ref(t["tool"])[0]
if isinstance(cwl, dict):
if cwl["class"] == "CommandLineTool":
tests.append(t)
else:
raise Exception("Unexpected code path.")

if args.l:
for i, t in enumerate(tests):
Expand Down
8 changes: 4 additions & 4 deletions cwltool/draft2tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def __init__(self, toolpath_object, **kwargs):
# type: (Dict[str,Any], **Any) -> None
super(CommandLineTool, self).__init__(toolpath_object, **kwargs)

def makeJobRunner(self):
def makeJobRunner(self): # type: () -> CommandLineJob
return CommandLineJob()

def makePathMapper(self, reffiles, input_basedir, **kwargs):
Expand Down Expand Up @@ -267,7 +267,7 @@ def collect_output_ports(self, ports, builder, outdir):
raise WorkflowException("Error validating output record, " + str(e) + "\n in " + json.dumps(ret, indent=4))

def collect_output(self, schema, builder, outdir):
# type: (Dict[str,Any],Builder,str) -> Union[Dict[str,Any],List[Union[Dict[str,Any],str]]]
# type: (Dict[str,Any], Builder, str) -> Union[Dict[str, Any], List[Union[Dict[str, Any], str]]]
r = [] # type: List[Any]
if "outputBinding" in schema:
binding = schema["outputBinding"]
Expand Down Expand Up @@ -372,9 +372,9 @@ def collect_output(self, schema, builder, outdir):

if (not r and isinstance(schema["type"], dict) and
schema["type"]["type"] == "record"):
out = {} # type: Dict[str, Any]
out = {}
for f in schema["type"]["fields"]:
out[shortname(f["name"])] = self.collect_output(
out[shortname(f["name"])] = self.collect_output( # type: ignore
f, builder, outdir)
return out
return r
8 changes: 4 additions & 4 deletions cwltool/expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@
_logger = logging.getLogger("cwltool")

def jshead(engineConfig, rootvars):
# type: (List[str],Dict[str,str]) -> str
return "\n".join(engineConfig + [u"var %s = %s;" % (k, json.dumps(v, indent=4)) for k, v in rootvars.items()])
# type: (List[unicode],Dict[str,str]) -> unicode
return u"\n".join(engineConfig + [u"var %s = %s;" % (k, json.dumps(v, indent=4)) for k, v in rootvars.items()])

def exeval(ex, jobinput, requirements, outdir, tmpdir, context, pull_image):
# type: (Dict[str,Any], Dict[str,str], List[Dict[str, Any]], str, str, Any, bool) -> sandboxjs.JSON

if ex["engine"] == "https://w3id.org/cwl/cwl#JavascriptEngine":
engineConfig = [] # type: List[str]
engineConfig = [] # type: List[unicode]
for r in reversed(requirements):
if r["class"] == "ExpressionEngineRequirement" and r["id"] == "https://w3id.org/cwl/cwl#JavascriptEngine":
engineConfig = r.get("engineConfig", [])
Expand Down Expand Up @@ -126,7 +126,7 @@ def param_interpolate(ex, obj, strip=True):

def do_eval(ex, jobinput, requirements, outdir, tmpdir, resources,
context=None, pull_image=True, timeout=None):
# type: (Any, Dict[str,str], List[Dict[str,Any]], str, str, Dict[str,str], Any, bool, int) -> Any
# type: (Any, Dict[str,str], List[Dict[str,Any]], str, str, Dict[str, Union[int, str]], Any, bool, int) -> Any

runtime = resources.copy()
runtime["tmpdir"] = tmpdir
Expand Down
8 changes: 4 additions & 4 deletions cwltool/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ def __init__(self): # type: () -> None
self.successCodes = None # type: Iterable[int]
self.temporaryFailCodes = None # type: Iterable[int]
self.permanentFailCodes = None # type: Iterable[int]
self.requirements = None # type: Dict[str,str]
self.requirements = None # type: List[Dict[str, str]]
self.hints = None # type: Dict[str,str]
self.name = None # type: str
self.command_line = None # type: List[str]
self.name = None # type: unicode
self.command_line = None # type: List[unicode]
self.pathmapper = None # type: PathMapper
self.collect_outputs = None # type: Union[Callable[[Any], Any],functools.partial[Any]]
self.output_callback = None # type: Callable[[Any, Any], Any]
Expand All @@ -69,7 +69,7 @@ def run(self, dry_run=False, pull_image=True, rm_container=True,
#with open(os.path.join(outdir, "cwl.input.json"), "w") as fp:
# json.dump(self.joborder, fp)

runtime = [] # type: List[str]
runtime = [] # type: List[unicode]
env = {"TMPDIR": self.tmpdir} # type: Mapping[str,str]

(docker_req, docker_is_req) = get_feature(self, "DockerRequirement")
Expand Down
23 changes: 14 additions & 9 deletions cwltool/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from .process import shortname, Process
import rdflib
from .utils import aslist
from typing import Union, Any, cast, Callable, Tuple, IO
from typing import Union, Any, cast, Callable, Dict, Tuple, IO

_logger = logging.getLogger("cwltool")

Expand Down Expand Up @@ -294,15 +294,15 @@ def load_tool(argsworkflow, updateonly, strict, makeTool, debug,
rdf_serializer=None,
stdout=sys.stdout,
urifrag=None):
# type: (Union[str,unicode,dict[str,Any]], bool, bool, Callable[...,Process], bool, bool, bool, bool, bool, bool, Any, Any, Any) -> Any
# type: (Union[str,unicode,dict[unicode,Any]], bool, bool, Callable[...,Process], bool, bool, bool, bool, bool, bool, Any, Any, Any) -> Any
(document_loader, avsc_names, schema_metadata) = process.get_schema()

if isinstance(avsc_names, Exception):
raise avsc_names

jobobj = None
uri = None # type: str
workflowobj = None # type: Dict[str, Any]
workflowobj = None # type: Dict[unicode, Any]
if isinstance(argsworkflow, (basestring)):
split = urlparse.urlsplit(cast(str, argsworkflow))
if split.scheme:
Expand Down Expand Up @@ -343,9 +343,11 @@ def load_tool(argsworkflow, updateonly, strict, makeTool, debug,
return 0

try:
processobj, metadata = schema_salad.schema.load_and_validate(document_loader, avsc_names, workflowobj, strict)
processobj, metadata = schema_salad.schema.load_and_validate(
document_loader, avsc_names, workflowobj, strict)
except (schema_salad.validate.ValidationException, RuntimeError) as e:
_logger.error(u"Tool definition failed validation:\n%s", e, exc_info=(e if debug else False))
_logger.error(u"Tool definition failed validation:\n%s", e,
exc_info=(e if debug else False))
return 1

if print_pre:
Expand Down Expand Up @@ -401,7 +403,10 @@ def load_job_order(args, t, parser, stdin, print_input_deps=False, relative_deps
if args.conformance_test:
loader = Loader({})
else:
jobloaderctx = {"path": {"@type": "@id"}, "format": {"@type": "@id"}, "id": "@id"}
jobloaderctx = {
"path": {"@type": "@id"},
"format": {"@type": "@id"},
"id": "@id"}
jobloaderctx.update(t.metadata.get("$namespaces", {}))
loader = Loader(jobloaderctx)

Expand Down Expand Up @@ -478,7 +483,7 @@ def load_job_order(args, t, parser, stdin, print_input_deps=False, relative_deps


def printdeps(obj, document_loader, stdout, relative_deps, basedir=None):
# type: (Dict[str,Any], Loader, IO[Any], bool, str) -> None
# type: (Dict[unicode, Any], Loader, IO[Any], bool, str) -> None
deps = {"class": "File",
"path": obj.get("id", "#")}

Expand Down Expand Up @@ -507,7 +512,7 @@ def makeRelative(u):
stdout.write(json.dumps(deps, indent=4))

def versionstring():
# type: () -> str
# type: () -> unicode
pkg = pkg_resources.require("cwltool")
if pkg:
return u"%s %s" % (sys.argv[0], pkg[0].version)
Expand All @@ -524,7 +529,7 @@ def main(argsl=None,
stdout=sys.stdout,
stderr=sys.stderr,
versionfunc=versionstring):
# type: (List[str],Callable[...,Union[str,Dict[str,str]]],Callable[...,Process],Callable[[Dict[str,int]],Dict[str,int]],argparse.ArgumentParser,IO[Any],IO[Any],IO[Any],Callable[[],str]) -> int
# type: (List[str],Callable[...,Union[str,Dict[str,str]]],Callable[...,Process],Callable[[Dict[str,int]],Dict[str,int]],argparse.ArgumentParser,IO[Any],IO[Any],IO[Any],Callable[[],unicode]) -> int

_logger.removeHandler(defaultStreamHandler)
_logger.addHandler(logging.StreamHandler(stderr))
Expand Down
40 changes: 20 additions & 20 deletions cwltool/pathmapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,12 @@ class PathMapper(object):
"""Mapping of files from relative path provided in the file to a tuple of
(absolute local path, absolute container path)"""

def __new__(cls, referenced_files, basedir, *args, **kwargs):
# type: (Set[str], str) -> Any
instance = super(PathMapper,cls).__new__(cls)
instance._pathmap = {} # type: Dict[str, Tuple[str, str]]
return instance

def __init__(self, referenced_files, basedir):
# type: (Set[str], str) -> None
self._pathmap = {} # type: Dict[str, Tuple[str, str]]
self.setup(referenced_files, basedir)

def setup(self, referenced_files, basedir):
# type: (Set[str], str) -> None
for src in referenced_files:
ab = abspath(src, basedir)
Expand All @@ -52,30 +51,28 @@ def reversemap(self, target): # type: (str) -> Tuple[str, str]

class DockerPathMapper(PathMapper):

def __new__(cls, referenced_files, basedir):
# type: (Set[str], str) -> None
instance = super(DockerPathMapper,cls).__new__(cls, referenced_files, basedir)
instance.dirs = {} # type: Dict[str, Union[bool, str]]
return instance

def __init__(self, referenced_files, basedir):
# type: (Set[str], str) -> None
self.dirs = {} # type: Dict[str, Union[bool, str]]
super(DockerPathMapper, self).__init__(referenced_files, basedir)

def setup(self, referenced_files, basedir):
for src in referenced_files:
ab = abspath(src, basedir)
dir, fn = os.path.split(ab)
dirn, fn = os.path.split(ab)

subdir = False
for d in self.dirs:
if dir.startswith(d):
if dirn.startswith(d):
subdir = True
break

if not subdir:
for d in list(self.dirs):
if d.startswith(dir):
# 'dir' is a parent of 'd'
if d.startswith(dirn):
# 'dirn' is a parent of 'd'
del self.dirs[d]
self.dirs[dir] = True
self.dirs[dirn] = True

prefix = "job" + str(random.randint(1, 1000000000)) + "_"

Expand All @@ -85,7 +82,8 @@ def __init__(self, referenced_files, basedir):
i = 1
while name in names:
i += 1
name = os.path.join("/var/lib/cwl", prefix + os.path.basename(d) + str(i))
name = os.path.join("/var/lib/cwl",
prefix + os.path.basename(d) + str(i))
names.add(name)
self.dirs[d] = name

Expand All @@ -96,9 +94,11 @@ def __init__(self, referenced_files, basedir):
st = os.lstat(deref)
while stat.S_ISLNK(st.st_mode):
rl = os.readlink(deref)
deref = rl if os.path.isabs(rl) else os.path.join(os.path.dirname(deref), rl)
deref = rl if os.path.isabs(rl) else os.path.join(
os.path.dirname(deref), rl)
st = os.lstat(deref)

for d in self.dirs:
if ab.startswith(d):
self._pathmap[src] = (deref, os.path.join(self.dirs[d], ab[len(d)+1:]))
self._pathmap[src] = (deref, os.path.join(
self.dirs[d], ab[len(d)+1:]))
Loading