Skip to content

Commit

Permalink
Refactor operations.prepare.prepare_linked_requirement
Browse files Browse the repository at this point in the history
This address the TODO by breaking the method into smaller
(not small enough IMHO) ones.
  • Loading branch information
McSinyx committed Jun 9, 2020
1 parent 19e739c commit 152a04b
Showing 1 changed file with 72 additions and 99 deletions.
171 changes: 72 additions & 99 deletions src/pip/_internal/operations/prepare.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,131 +376,104 @@ def _download_should_save(self):
"Could not find or access download directory '{}'"
.format(self.download_dir))

def prepare_linked_requirement(
self,
req, # type: InstallRequirement
):
# type: (...) -> AbstractDistribution
"""Prepare a requirement that would be obtained from req.link
"""
assert req.link
link = req.link

# TODO: Breakup into smaller functions
if link.scheme == 'file':
def _log_preparing_link(self, req, link):
# type: (InstallRequirement, Link) -> None
"""Log the way the link prepared."""
if link.is_file:
path = link.file_path
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req.req or req)

download_dir = self.download_dir
def _get_linked_req_hashes(self, req):
# type: (InstallRequirement) -> Hashes
"""Return"""
if not self.require_hashes:
return req.hashes(trust_internet=True)
# We could check these first 2 conditions inside unpack_url
# and save repetition of conditions, but then we would
# report less-useful error messages for unhashable
# requirements, complaining that there's no hash provided.
if req.link.is_vcs:
raise VcsHashUnsupported()
elif req.link.is_existing_dir():
raise DirectoryUrlHashUnsupported()
if req.original_link is None and not req.is_pinned:
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the future.
# file:/// URLs aren't pinnable, so don't complain about
# them not being pinned.
raise HashUnpinned()
# If known-good hashes are missing for this requirement,
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
return req.hashes(trust_internet=False) or MissingHashes()

def prepare_linked_requirement(self, req):
# type: (InstallRequirement) -> AbstractDistribution
"""Prepare a requirement to be obtained from req.link."""
# TODO: Breakup into smaller functions
assert req.link
link = req.link
self._log_preparing_link(req, link)
if link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
# When doing `pip wheel` we download wheels to a dedicated dir.
download_dir = self.wheel_download_dir

if link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
else:
# We always delete unpacked sdists after pip runs.
autodelete_unpacked = True
download_dir = self.download_dir

# We always delete unpacked sdists after pip runs.
# For wheels however, we keep them if the operation is download.
# During installation of wheels, unpack wheels are only used
# to get metadata.
autodelete_unpacked = link.is_wheel and bool(download_dir)
# Since source_dir is only set for editable requirements.
assert req.source_dir is None
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)

# If a checkout exists, it's unwise to keep going.
# Version inconsistencies are logged later,
# but do not fail the installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a "
"pre-existing build directory ({}). This is likely "
"due to a previous installation that failed. pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again.".format(req, req.source_dir))

with indent_log():
# Since source_dir is only set for editable requirements.
assert req.source_dir is None
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
" pre-existing build directory ({}). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
.format(req, req.source_dir)
)

# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
if self.require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if link.is_vcs:
raise VcsHashUnsupported()
elif link.is_existing_dir():
raise DirectoryUrlHashUnsupported()
if not req.original_link and not req.is_pinned:
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()

hashes = req.hashes(trust_internet=not self.require_hashes)
if self.require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()

try:
local_file = unpack_url(
link, req.source_dir, self.downloader, download_dir,
hashes=hashes,
)
hashes=self._get_linked_req_hashes(req))
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because of error %s',
req,
exc,
)
raise InstallationError(
'Could not install requirement {} because of HTTP '
'error {} for URL {}'.format(req, exc, link)
)

# For use in later processing, preserve the file path on the
# requirement.
if local_file:
req.local_file_path = local_file.path
'error {} for URL {}'.format(req, exc, link))
else:
# For use in later processing,
# preserve the file path on the requirement.
if local_file:
req.local_file_path = local_file.path

abstract_dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation,
)
req, self.req_tracker, self.finder, self.build_isolation)

if download_dir:
if link.is_existing_dir():
logger.info('Link is a directory, ignoring download_dir')
elif local_file:
download_location = os.path.join(
download_dir, link.filename
)
download_dir, link.filename)
if not os.path.exists(download_location):
shutil.copy(local_file.path, download_location)
logger.info(
'Saved %s', display_path(download_location)
)

download_path = display_path(download_location)
logger.info('Saved {}'.format(download_path))
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if link.is_vcs:
Expand Down

0 comments on commit 152a04b

Please sign in to comment.