-
-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Fix sequential sync losing track of pip install processes #3537
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
81502e0
57389a2
4231d47
403a651
aa70ddb
06f3cae
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
Fix ``sync --sequential`` ignoring ``pip install`` errors and logs. |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -645,10 +645,10 @@ def do_where(virtualenv=False, bare=True): | |
click.echo(location) | ||
|
||
|
||
def _cleanup_procs(procs, concurrent, failed_deps_queue, retry=True): | ||
def _cleanup_procs(procs, failed_deps_queue, retry=True): | ||
while not procs.empty(): | ||
c = procs.get() | ||
if concurrent: | ||
if not c.blocking: | ||
c.block() | ||
failed = False | ||
if c.return_code != 0: | ||
|
@@ -682,7 +682,7 @@ def _cleanup_procs(procs, concurrent, failed_deps_queue, retry=True): | |
def batch_install(deps_list, procs, failed_deps_queue, | ||
requirements_dir, no_deps=False, ignore_hashes=False, | ||
allow_global=False, blocking=False, pypi_mirror=None, | ||
nprocs=PIPENV_MAX_SUBPROCESS, retry=True): | ||
retry=True): | ||
from .vendor.requirementslib.models.utils import strip_extras_markers_from_requirement | ||
failed = (not retry) | ||
if not failed: | ||
|
@@ -753,12 +753,13 @@ def batch_install(deps_list, procs, failed_deps_queue, | |
extra_indexes=extra_indexes, | ||
use_pep517=not failed, | ||
) | ||
if procs.qsize() < nprocs: | ||
c.dep = dep | ||
procs.put(c) | ||
c.dep = dep | ||
if dep.is_vcs or dep.editable: | ||
c.block() | ||
|
||
procs.put(c) | ||
if procs.full() or procs.qsize() == len(deps_list): | ||
_cleanup_procs(procs, not blocking, failed_deps_queue, retry=retry) | ||
_cleanup_procs(procs, failed_deps_queue, retry=retry) | ||
|
||
|
||
def do_install_dependencies( | ||
|
@@ -782,7 +783,6 @@ def do_install_dependencies( | |
from six.moves import queue | ||
if requirements: | ||
bare = True | ||
blocking = not concurrent | ||
# Load the lockfile if it exists, or if only is being used (e.g. lock is being used). | ||
if skip_lock or only or not project.lockfile_exists: | ||
if not bare: | ||
|
@@ -818,27 +818,27 @@ def do_install_dependencies( | |
) | ||
sys.exit(0) | ||
|
||
procs = queue.Queue(maxsize=PIPENV_MAX_SUBPROCESS) | ||
if concurrent: | ||
nprocs = PIPENV_MAX_SUBPROCESS | ||
else: | ||
nprocs = 1 | ||
procs = queue.Queue(maxsize=nprocs) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so you have essentially dropped an argument, moved a queue definition, and redefined something that was passed as a
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I don't get it. This line relies on the four lines above it and the queue definition didn't move anywhere. |
||
failed_deps_queue = queue.Queue() | ||
if skip_lock: | ||
ignore_hashes = True | ||
|
||
install_kwargs = { | ||
"no_deps": no_deps, "ignore_hashes": ignore_hashes, "allow_global": allow_global, | ||
"blocking": blocking, "pypi_mirror": pypi_mirror | ||
"blocking": not concurrent, "pypi_mirror": pypi_mirror | ||
} | ||
if concurrent: | ||
install_kwargs["nprocs"] = PIPENV_MAX_SUBPROCESS | ||
else: | ||
install_kwargs["nprocs"] = 1 | ||
|
||
# with project.environment.activated(): | ||
batch_install( | ||
deps_list, procs, failed_deps_queue, requirements_dir, **install_kwargs | ||
) | ||
|
||
if not procs.empty(): | ||
_cleanup_procs(procs, concurrent, failed_deps_queue) | ||
_cleanup_procs(procs, failed_deps_queue) | ||
|
||
# Iterate over the hopefully-poorly-packaged dependencies… | ||
if not failed_deps_queue.empty(): | ||
|
@@ -850,15 +850,14 @@ def do_install_dependencies( | |
failed_dep = failed_deps_queue.get() | ||
retry_list.append(failed_dep) | ||
install_kwargs.update({ | ||
"nprocs": 1, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is specified as an argument to the function and re-specified here because we intentionally install with |
||
"retry": False, | ||
"blocking": True, | ||
}) | ||
batch_install( | ||
retry_list, procs, failed_deps_queue, requirements_dir, **install_kwargs | ||
) | ||
if not procs.empty(): | ||
_cleanup_procs(procs, False, failed_deps_queue, retry=False) | ||
_cleanup_procs(procs, failed_deps_queue, retry=False) | ||
|
||
|
||
def convert_three_to_python(three, python): | ||
|
@@ -2507,6 +2506,8 @@ def do_run(command, args, three=None, python=False, pypi_mirror=None): | |
previous_pipenv_active_value = os.environ.get("PIPENV_ACTIVE") | ||
os.environ["PIPENV_ACTIVE"] = vistir.misc.fs_str("1") | ||
|
||
os.environ.pop("PIP_SHIMS_BASE_MODULE", None) | ||
|
||
try: | ||
script = project.build_script(command, args) | ||
cmd_string = ' '.join([script.command] + script.args) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We support passing
nprocs
here to allow us to fall back to--sequential
on failure. Please revert this aspect of the change.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm sorry I overlooked the fallback mode case. Eliminating the issue requires removing redundancy between
nprocs
andmaxsize
though. I'd propose just dropping themaxsize
then:There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
hm that makes sense, i'm good with this approach. Thanks for your patience