Skip to content

Fix sequential sync losing track of pip install processes #3537

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Jun 4, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions news/3537.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix ``sync --sequential`` ignoring ``pip install`` errors and logs.
35 changes: 18 additions & 17 deletions pipenv/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -645,10 +645,10 @@ def do_where(virtualenv=False, bare=True):
click.echo(location)


def _cleanup_procs(procs, concurrent, failed_deps_queue, retry=True):
def _cleanup_procs(procs, failed_deps_queue, retry=True):
while not procs.empty():
c = procs.get()
if concurrent:
if not c.blocking:
c.block()
failed = False
if c.return_code != 0:
Expand Down Expand Up @@ -682,7 +682,7 @@ def _cleanup_procs(procs, concurrent, failed_deps_queue, retry=True):
def batch_install(deps_list, procs, failed_deps_queue,
requirements_dir, no_deps=False, ignore_hashes=False,
allow_global=False, blocking=False, pypi_mirror=None,
nprocs=PIPENV_MAX_SUBPROCESS, retry=True):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We support passing nprocs here to allow us to fall back to --sequential on failure. Please revert this aspect of the change.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm sorry I overlooked the fallback mode case. Eliminating the issue requires removing redundancy between nprocs and maxsize though. I'd propose just dropping the maxsize then:

diff --git a/pipenv/core.py b/pipenv/core.py
index 7daff2be..ba5b3dc2 100644
--- a/pipenv/core.py
+++ b/pipenv/core.py
@@ -747,11 +747,10 @@ def batch_install(deps_list, procs, failed_deps_queue,
             )
             if dep.is_vcs or dep.editable:
                 c.block()
-            if procs.qsize() < nprocs:
-                c.dep = dep
-                procs.put(c)
+            c.dep = dep
+            procs.put(c)
 
-            if procs.full() or procs.qsize() == len(deps_list):
+            if procs.qsize() >= nprocs:
                 _cleanup_procs(procs, not blocking, failed_deps_queue, retry=retry)
 
 
@@ -812,7 +811,7 @@ def do_install_dependencies(
         )
         sys.exit(0)
 
-    procs = queue.Queue(maxsize=PIPENV_MAX_SUBPROCESS)
+    procs = queue.Queue()
     failed_deps_queue = queue.Queue()
     if skip_lock:
         ignore_hashes = True

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hm that makes sense, i'm good with this approach. Thanks for your patience

retry=True):
from .vendor.requirementslib.models.utils import strip_extras_markers_from_requirement
failed = (not retry)
if not failed:
Expand Down Expand Up @@ -753,12 +753,13 @@ def batch_install(deps_list, procs, failed_deps_queue,
extra_indexes=extra_indexes,
use_pep517=not failed,
)
if procs.qsize() < nprocs:
c.dep = dep
procs.put(c)
c.dep = dep
if dep.is_vcs or dep.editable:
c.block()

procs.put(c)
if procs.full() or procs.qsize() == len(deps_list):
_cleanup_procs(procs, not blocking, failed_deps_queue, retry=retry)
_cleanup_procs(procs, failed_deps_queue, retry=retry)


def do_install_dependencies(
Expand All @@ -782,7 +783,6 @@ def do_install_dependencies(
from six.moves import queue
if requirements:
bare = True
blocking = not concurrent
# Load the lockfile if it exists, or if only is being used (e.g. lock is being used).
if skip_lock or only or not project.lockfile_exists:
if not bare:
Expand Down Expand Up @@ -818,27 +818,27 @@ def do_install_dependencies(
)
sys.exit(0)

procs = queue.Queue(maxsize=PIPENV_MAX_SUBPROCESS)
if concurrent:
nprocs = PIPENV_MAX_SUBPROCESS
else:
nprocs = 1
procs = queue.Queue(maxsize=nprocs)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so you have essentially dropped an argument, moved a queue definition, and redefined something that was passed as a kwarg to be a variable? There is a lot of unrelated code movement here when I think this is the only line related to the actual change described

Copy link
Contributor Author

@gkoz gkoz Mar 7, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't get it. This line relies on the four lines above it and the queue definition didn't move anywhere.

failed_deps_queue = queue.Queue()
if skip_lock:
ignore_hashes = True

install_kwargs = {
"no_deps": no_deps, "ignore_hashes": ignore_hashes, "allow_global": allow_global,
"blocking": blocking, "pypi_mirror": pypi_mirror
"blocking": not concurrent, "pypi_mirror": pypi_mirror
}
if concurrent:
install_kwargs["nprocs"] = PIPENV_MAX_SUBPROCESS
else:
install_kwargs["nprocs"] = 1

# with project.environment.activated():
batch_install(
deps_list, procs, failed_deps_queue, requirements_dir, **install_kwargs
)

if not procs.empty():
_cleanup_procs(procs, concurrent, failed_deps_queue)
_cleanup_procs(procs, failed_deps_queue)

# Iterate over the hopefully-poorly-packaged dependencies…
if not failed_deps_queue.empty():
Expand All @@ -850,15 +850,14 @@ def do_install_dependencies(
failed_dep = failed_deps_queue.get()
retry_list.append(failed_dep)
install_kwargs.update({
"nprocs": 1,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is specified as an argument to the function and re-specified here because we intentionally install with --sequential if there is a failure. This functionality needs to stay as is

"retry": False,
"blocking": True,
})
batch_install(
retry_list, procs, failed_deps_queue, requirements_dir, **install_kwargs
)
if not procs.empty():
_cleanup_procs(procs, False, failed_deps_queue, retry=False)
_cleanup_procs(procs, failed_deps_queue, retry=False)


def convert_three_to_python(three, python):
Expand Down Expand Up @@ -2507,6 +2506,8 @@ def do_run(command, args, three=None, python=False, pypi_mirror=None):
previous_pipenv_active_value = os.environ.get("PIPENV_ACTIVE")
os.environ["PIPENV_ACTIVE"] = vistir.misc.fs_str("1")

os.environ.pop("PIP_SHIMS_BASE_MODULE", None)

try:
script = project.build_script(command, args)
cmd_string = ' '.join([script.command] + script.args)
Expand Down
44 changes: 44 additions & 0 deletions tests/integration/test_sync.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import json
import os

import pytest
Expand Down Expand Up @@ -68,3 +69,46 @@ def test_sync_should_not_lock(PipenvInstance, pypi):
c = p.pipenv('sync')
assert c.return_code == 0
assert lockfile_content == p.lockfile


@pytest.mark.sync
@pytest.mark.lock
def test_sync_sequential_detect_errors(PipenvInstance, pypi):
with PipenvInstance(pypi=pypi) as p:
with open(p.pipfile_path, 'w') as f:
contents = """
[packages]
requests = "*"
""".strip()
f.write(contents)

c = p.pipenv('lock')
assert c.return_code == 0

# Force hash mismatch when installing `requests`
lock = p.lockfile
lock['default']['requests']['hashes'] = ['sha256:' + '0' * 64]
with open(p.lockfile_path, 'w') as f:
json.dump(lock, f)

c = p.pipenv('sync --sequential')
assert c.return_code != 0


@pytest.mark.sync
@pytest.mark.lock
def test_sync_sequential_verbose(PipenvInstance, pypi):
with PipenvInstance(pypi=pypi) as p:
with open(p.pipfile_path, 'w') as f:
contents = """
[packages]
requests = "*"
""".strip()
f.write(contents)

c = p.pipenv('lock')
assert c.return_code == 0

c = p.pipenv('sync --sequential --verbose')
for package in p.lockfile['default']:
assert 'Successfully installed {}'.format(package) in c.out