Skip to content

Commit

Permalink
WIP continued, some fixes
Browse files Browse the repository at this point in the history
Signed-off-by: Dan Ryan <dan@danryan.co>
  • Loading branch information
techalchemy committed Nov 25, 2018
1 parent 2798733 commit bd78dae
Showing 1 changed file with 41 additions and 24 deletions.
65 changes: 41 additions & 24 deletions pipenv/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,23 +217,24 @@ def resolve_separate(req):
This includes non-editable urls to zip or tarballs, non-editable paths, etc.
"""

constraints = []
constraints = set()
if req.is_file_or_url and not req.is_vcs:
setup_info = req.run_requires()
requirements = [v for v in setup_info.get("requires", {}).values()]
for r in requirements:
if getattr(r, "url", None) and not getattr(r, "editable", False):
from .vendor.requirementslib.models.requirements import Requirement
requirement = Requirement.from_line("".join(str(r).split()))
constraints.extend(resolve_separate(requirement))
constraints |= resolve_separate(requirement)
continue
constraints.append(str(r))
constraints.add(str(r))
return constraints


def get_resolver_metadata(deps, index_lookup, markers_lookup, project, sources):
from .vendor.requirementslib.models.requirements import Requirement
constraints = []
constraints = set()
skipped = {}
for dep in deps:
if not dep:
continue
Expand All @@ -245,9 +246,11 @@ def get_resolver_metadata(deps, index_lookup, markers_lookup, project, sources):
req = Requirement.from_line(dep)
if req.is_file_or_url and not req.is_vcs:
# TODO: This is a significant hack, should probably be reworked
constraints.extend(resolve_separate(req))
constraints |= resolve_separate(req)
name, entry = req.pipfile_entry
skipped[name] = entry
continue
constraints.append(req.constraint_line)
constraints.add(req.constraint_line)

if url:
index_lookup[req.name] = project.get_source(url=url).get("name")
Expand All @@ -256,7 +259,7 @@ def get_resolver_metadata(deps, index_lookup, markers_lookup, project, sources):
# eg pypiwin32
if req.markers:
markers_lookup[req.name] = req.markers.replace('"', "'")
return constraints
return constraints, skipped


class Resolver(object):
Expand Down Expand Up @@ -492,17 +495,31 @@ def actually_resolve_deps(
req_dir=None,
):
from pipenv.vendor.vistir.path import create_tracked_tempdir
from pipenv.vendor.requirementslib.models.requirements import Requirement
from pipenv.vendor import pip_shims

if not req_dir:
req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-")
warning_list = []

with warnings.catch_warnings(record=True) as warning_list:
constraints = get_resolver_metadata(
constraints, skipped = get_resolver_metadata(
deps, index_lookup, markers_lookup, project, sources,
)
resolver = Resolver(constraints, req_dir, project, sources, clear=clear, pre=pre)
resolved_tree = resolver.resolve()
for k, v in skipped.items():
url = v.get("file")
req = Requirement.from_pipfile(k, v)
is_url = url and not url.startswith("file:")
path = v.get("path")
if not is_url and not path:
path = pip_shims.shims.url_to_path(url)
if is_url or (path and os.path.exists(path) and not os.path.isdir(path)):
existing = next(iter(req for req in resolved_tree if req.name == k), None)
if existing:
resolved_tree.remove(existing)
resolved_tree.add(req.as_ireq())
hashes = resolver.resolve_hashes()

for warning in warning_list:
Expand Down Expand Up @@ -630,14 +647,12 @@ def venv_resolve_deps(
results = []
pipfile_section = "dev_packages" if dev else "packages"
lockfile_section = "develop" if dev else "default"
# TODO: We can only use all of the requirements here because we weed them out later
# in `get_resolver_metadata` via `resolve_separate` which uses requirementslib
# to handle the resolution of special case dependencies (including local paths and
# file urls). We should probably rework this at some point.
deps = project._pipfile.dev_requirements if dev else project._pipfile.requirements
vcs_deps = [r for r in deps if r.is_vcs]
vcs_section = "vcs_{0}".format(pipfile_section)
vcs_deps = getattr(project, vcs_section, {})
if project.pipfile_exists:
deps = project._pipfile.dev_requirements if dev else project._pipfile.requirements
vcs_deps = [r for r in deps if r.is_vcs]
else:
vcs_deps = getattr(project, vcs_section, {})
if not deps and not vcs_deps:
return {}

Expand Down Expand Up @@ -714,8 +729,12 @@ def venv_resolve_deps(
raise RuntimeError("There was a problem with locking.")
lockfile[lockfile_section] = prepare_lockfile(results, pipfile, lockfile[lockfile_section])
for k, v in vcs_lockfile.items():
if k in getattr(project, vcs_section, {}) or k not in lockfile[lockfile_section]:
if k in getattr(project, vcs_section, {}):
lockfile[lockfile_section][k].update(v)
print(v)
else:
lockfile[lockfile_section][k] = v
print(v)


def resolve_deps(
Expand Down Expand Up @@ -795,6 +814,12 @@ def resolve_deps(
collected_hashes = []
if result in hashes:
collected_hashes = list(hashes.get(result))
elif resolver._should_include_hashes(result):
try:
hash_map = resolver.resolver.resolve_hashes([result])
collected_hashes = next(iter(hash_map.values()), [])
except (ValueError, KeyError, IndexError, ConnectionError):
pass
elif any(
"python.org" in source["url"] or "pypi.org" in source["url"]
for source in sources
Expand All @@ -819,14 +844,6 @@ def resolve_deps(
crayons.red("Warning", bold=True), name
), err=True
)
# # Collect un-collectable hashes (should work with devpi).
# try:
# collected_hashes = collected_hashes + list(
# list(resolver.resolve_hashes([result]).items())[0][1]
# )
# except (ValueError, KeyError, ConnectionError, IndexError):
# if verbose:
# print('Error generating hash for {}'.format(name))
req.hashes = sorted(set(collected_hashes))
name, _entry = req.pipfile_entry
entry = {}
Expand Down

0 comments on commit bd78dae

Please sign in to comment.