From 8b244bef510a86aa63531322453f349a7ad2969d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 29 Mar 2019 00:33:18 -0400 Subject: [PATCH 01/18] Update locking to respect pep517 and isolation - Fix verbosity and logging - Clean minor fixes Signed-off-by: Dan Ryan --- pipenv/patched/piptools/repositories/pypi.py | 13 ++- pipenv/utils.py | 84 ++++++++++++++----- .../vendoring/patches/patched/piptools.patch | 46 +++++----- 3 files changed, 93 insertions(+), 50 deletions(-) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 9d81bd5542..10a0e46954 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -248,8 +248,12 @@ def gen(ireq): def resolve_reqs(self, download_dir, ireq, wheel_cache): results = None - ireq.isolated = False + ireq.isolated = self.build_isolation ireq._wheel_cache = wheel_cache + if ireq and not ireq.link: + ireq.populate_link(self.finder, False, False) + if ireq.link and not ireq.link.is_wheel: + ireq.ensure_has_source_dir(self.source_dir) try: from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer except ImportError: @@ -273,7 +277,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache): 'download_dir': download_dir, 'wheel_download_dir': self._wheel_download_dir, 'progress_bar': 'off', - 'build_isolation': False, + 'build_isolation': self.build_isolation, } resolver_kwargs = { 'finder': self.finder, @@ -284,9 +288,10 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache): 'ignore_requires_python': True, 'ignore_installed': True, 'ignore_compatibility': False, - 'isolated': False, + 'isolated': True, 'wheel_cache': wheel_cache, - 'use_user_site': False + 'use_user_site': False, + 'use_pep517': True } resolver = None preparer = None diff --git a/pipenv/utils.py b/pipenv/utils.py index 060b85325d..18c7dd0142 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -340,15 +340,19 @@ def __repr__(self): @staticmethod @lru_cache() def _get_pip_command(): - from .vendor.pip_shims.shims import Command + from .vendor.pip_shims.shims import Command, cmdoptions class PipCommand(Command): """Needed for pip-tools.""" name = "PipCommand" - from pipenv.patched.piptools.scripts.compile import get_pip_command - return get_pip_command() + from pipenv.patched.piptools.pip import get_pip_command + pip_cmd = get_pip_command() + pip_cmd.parser.add_option(cmdoptions.no_use_pep517()) + pip_cmd.parser.add_option(cmdoptions.use_pep517()) + pip_cmd.parser.add_option(cmdoptions.no_build_isolation()) + return pip_cmd @classmethod def get_metadata( @@ -574,16 +578,29 @@ def pip_command(self): self._pip_command = self._get_pip_command() return self._pip_command - def prepare_pip_args(self): + def prepare_pip_args(self, use_pep517=True, build_isolation=True): pip_args = [] if self.sources: pip_args = prepare_pip_source_args(self.sources, pip_args) + if not use_pep517: + pip_args.append("--no-use-pep517") + if not build_isolation: + pip_args.append("--no-build-isolation") + pip_args.extend(["--cache-dir", environments.PIPENV_CACHE_DIR]) return pip_args @property def pip_args(self): + use_pep517 = False if ( + os.environ.get("PIP_NO_USE_PEP517", None) is not None + ) else (True if os.environ.get("PIP_USE_PEP517", None) is not None else None) + build_isolation = False if ( + os.environ.get("PIP_NO_BUILD_ISOLATION", None) is not None + ) else (True if os.environ.get("PIP_BUILD_ISOLATION", None) is not None else None) if self._pip_args is None: - self._pip_args = self.prepare_pip_args() + self._pip_args = self.prepare_pip_args( + use_pep517=use_pep517, build_isolation=build_isolation + ) return self._pip_args def prepare_constraint_file(self): @@ -595,8 +612,13 @@ def prepare_constraint_file(self): dir=self.req_dir, delete=False, ) + skip_args = ("build-isolation", "use-pep517", "cache-dir") + args_to_add = [ + arg for arg in self.pip_args + if not any(bad_arg in arg for bad_arg in skip_args) + ] if self.sources: - requirementstxt_sources = " ".join(self.pip_args) if self.pip_args else "" + requirementstxt_sources = " ".join(args_to_add) if args_to_add else "" requirementstxt_sources = requirementstxt_sources.replace(" --", "\n--") constraints_file.write(u"{0}\n".format(requirementstxt_sources)) constraints = self.initial_constraints @@ -633,7 +655,8 @@ def repository(self): if self._repository is None: from pipenv.patched.piptools.repositories.pypi import PyPIRepository self._repository = PyPIRepository( - pip_options=self.pip_options, use_json=False, session=self.session + pip_options=self.pip_options, use_json=False, session=self.session, + build_isolation=self.pip_options.build_isolation ) return self._repository @@ -672,22 +695,24 @@ def resolve(self): from pipenv.patched.piptools.exceptions import NoCandidateFound from pipenv.patched.piptools.cache import CorruptCacheError from .exceptions import CacheError, ResolutionFailure - try: - results = self.resolver.resolve(max_rounds=environments.PIPENV_MAX_ROUNDS) - except CorruptCacheError as e: - if environments.PIPENV_IS_CI or self.clear: - if self._retry_attempts < 3: - self.get_resolver(clear=True, pre=self.pre) - self._retry_attempts += 1 - self.resolve() + with temp_environ(): + os.environ["PIP_NO_USE_PEP517"] = str("") + try: + results = self.resolver.resolve(max_rounds=environments.PIPENV_MAX_ROUNDS) + except CorruptCacheError as e: + if environments.PIPENV_IS_CI or self.clear: + if self._retry_attempts < 3: + self.get_resolver(clear=True, pre=self.pre) + self._retry_attempts += 1 + self.resolve() + else: + raise CacheError(e.path) + except (NoCandidateFound, DistributionNotFound, HTTPError) as e: + raise ResolutionFailure(message=str(e)) else: - raise CacheError(e.path) - except (NoCandidateFound, DistributionNotFound, HTTPError) as e: - raise ResolutionFailure(message=str(e)) - else: - self.results = results - self.resolved_tree.update(results) - return self.resolved_tree + self.results = results + self.resolved_tree.update(results) + return self.resolved_tree @lru_cache(maxsize=1024) def fetch_candidate(self, ireq): @@ -939,6 +964,11 @@ def actually_resolve_deps( req_dir=None, ): from pipenv.vendor.vistir.path import create_tracked_tempdir + from pipenv.vendor.requirementslib.models.requirements import Requirement + import pipenv.patched.piptools.logging + + if environments.is_verbose(): + pipenv.patched.piptools.logging.log.verbosity = 1 if not req_dir: req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") @@ -1014,6 +1044,9 @@ def resolve(cmd, sp): echo(out, err=True) echo(c.err.strip(), err=True) sys.exit(c.return_code) + if environments.is_verbose(): + for ln in c.err.strip(): + sp.hide_and_write(ln) return c @@ -1171,7 +1204,12 @@ def venv_resolve_deps( sp.write(decode_for_output("Resolving dependencies...")) c = resolve(cmd, sp) results = c.out.strip() - sp.green.ok(environments.PIPENV_SPINNER_OK_TEXT.format("Success!")) + if c.ok: + sp.green.ok(environments.PIPENV_SPINNER_OK_TEXT.format("Success!")) + else: + sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!")) + click_echo("Output: {0}".format(c.out.strip()), err=True) + click_echo("Error: {0}".format(c.err.strip()), err=True) try: with open(target_file.name, "r") as fh: results = json.load(fh) diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index c220eb4bed..52d66f513e 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -161,9 +161,9 @@ index e54ae08..75b8208 100644 +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier + -+os.environ["PIP_SHIMS_BASE_MODULE"] = str("pip") ++os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") +import pip_shims -+from pip_shims.shims import VcsSupport, WheelCache, InstallationError ++from pip_shims.shims import VcsSupport, WheelCache, InstallationError, pip_version + + from .._compat import ( @@ -255,7 +255,7 @@ index e54ae08..75b8208 100644 # pip 19.0 has removed process_dependency_links from the PackageFinder constructor - if pkg_resources.parse_version(pip.__version__) < pkg_resources.parse_version('19.0'): -+ if pkg_resources.parse_version(pip_shims.shims.pip_version) < pkg_resources.parse_version('19.0'): ++ if pkg_resources.parse_version(pip_version) < pkg_resources.parse_version('19.0'): finder_kwargs["process_dependency_links"] = pip_options.process_dependency_links self.finder = PackageFinder(**finder_kwargs) @@ -287,7 +287,7 @@ index e54ae08..75b8208 100644 # Reuses pip's internal candidate sort key to sort matching_candidates = [candidates_by_version[ver] for ver in matching_versions] -@@ -135,14 +187,71 @@ class PyPIRepository(BaseRepository): +@@ -135,14 +187,75 @@ class PyPIRepository(BaseRepository): # Turn the candidate into a pinned InstallRequirement return make_install_requirement( @@ -353,15 +353,19 @@ index e54ae08..75b8208 100644 + def resolve_reqs(self, download_dir, ireq, wheel_cache): results = None -+ ireq.isolated = False ++ ireq.isolated = self.build_isolation + ireq._wheel_cache = wheel_cache ++ if ireq and not ireq.link: ++ ireq.populate_link(self.finder, False, False) ++ if ireq.link and not ireq.link.is_wheel: ++ ireq.ensure_has_source_dir(self.source_dir) try: from pip._internal.operations.prepare import RequirementPreparer - from pip._internal.resolve import Resolver as PipResolver except ImportError: # Pip 9 and below reqset = RequirementSet( -@@ -151,9 +260,11 @@ class PyPIRepository(BaseRepository): +@@ -151,9 +264,11 @@ class PyPIRepository(BaseRepository): download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, session=self.session, @@ -374,27 +378,24 @@ index e54ae08..75b8208 100644 else: # pip >= 10 preparer_kwargs = { -@@ -162,7 +273,7 @@ class PyPIRepository(BaseRepository): - 'download_dir': download_dir, - 'wheel_download_dir': self._wheel_download_dir, - 'progress_bar': 'off', -- 'build_isolation': self.build_isolation, -+ 'build_isolation': False, - } - resolver_kwargs = { - 'finder': self.finder, -@@ -170,8 +281,9 @@ class PyPIRepository(BaseRepository): +@@ -170,11 +285,13 @@ class PyPIRepository(BaseRepository): 'upgrade_strategy': "to-satisfy-only", 'force_reinstall': False, 'ignore_dependencies': False, - 'ignore_requires_python': False, + 'ignore_requires_python': True, 'ignore_installed': True, +- 'isolated': False, + 'ignore_compatibility': False, - 'isolated': False, ++ 'isolated': True, 'wheel_cache': wheel_cache, - 'use_user_site': False -@@ -186,15 +298,22 @@ class PyPIRepository(BaseRepository): +- 'use_user_site': False ++ 'use_user_site': False, ++ 'use_pep517': True + } + resolver = None + preparer = None +@@ -186,15 +303,21 @@ class PyPIRepository(BaseRepository): resolver_kwargs['preparer'] = preparer reqset = RequirementSet() ireq.is_direct = True @@ -413,7 +414,6 @@ index e54ae08..75b8208 100644 + cleanup_fn() + except OSError: + pass -+ + results = set(results) if results else set() + return results, ireq @@ -422,7 +422,7 @@ index e54ae08..75b8208 100644 """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). -@@ -223,7 +342,8 @@ class PyPIRepository(BaseRepository): +@@ -223,7 +346,8 @@ class PyPIRepository(BaseRepository): wheel_cache = WheelCache(CACHE_DIR, self.pip_options.format_control) prev_tracker = os.environ.get('PIP_REQ_TRACKER') try: @@ -432,7 +432,7 @@ index e54ae08..75b8208 100644 finally: if 'PIP_REQ_TRACKER' in os.environ: if prev_tracker: -@@ -245,6 +365,10 @@ class PyPIRepository(BaseRepository): +@@ -245,6 +369,10 @@ class PyPIRepository(BaseRepository): if ireq.editable: return set() @@ -443,7 +443,7 @@ index e54ae08..75b8208 100644 if not is_pinned_requirement(ireq): raise TypeError( "Expected pinned requirement, got {}".format(ireq)) -@@ -252,24 +376,16 @@ class PyPIRepository(BaseRepository): +@@ -252,24 +380,16 @@ class PyPIRepository(BaseRepository): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. From 83868c8f1ac1a565c1d9ed89b5117766d45b455d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 22 Jun 2019 21:27:56 -0400 Subject: [PATCH 02/18] Cut over to devpi for tests and improve logging - Verbose logs will now write gracefully to the terminal even while the spinner is running (i.e. during locking) - This PR also cuts over to Devpi for a backing cache rather than a hacked httpbin instance - Inclues a refactor of `pip_install` to deduplicate logic - Attempts to switch back to relying on native editable installs in pip as the current implementation is broken on master (i.e. nothing is installed in the virtualenv) - Fixes #3809 Signed-off-by: Dan Ryan --- .gitmodules | 4 +- Makefile | 46 ++ Pipfile.lock | 373 +++++++++++++++- news/3809.bugfix.rst | 1 + news/3810.feature.rst | 1 + pipenv/__init__.py | 2 + pipenv/core.py | 505 +++++++++++----------- pipenv/utils.py | 28 +- pytest.ini | 1 + setup.py | 2 +- tasks/release.py | 2 +- tests/integration/conftest.py | 88 +++- tests/integration/test_cli.py | 32 +- tests/integration/test_dot_venv.py | 16 +- tests/integration/test_install_basic.py | 109 ++--- tests/integration/test_install_markers.py | 55 +-- tests/integration/test_install_twists.py | 46 +- tests/integration/test_install_uri.py | 46 +- tests/integration/test_lock.py | 118 ++--- tests/integration/test_pipenv.py | 12 +- tests/integration/test_project.py | 24 +- tests/integration/test_sync.py | 20 +- tests/integration/test_uninstall.py | 14 +- tests/integration/test_windows.py | 16 +- tests/pypi | 2 +- tests/test_artifacts/git/requests | 2 +- tests/test_artifacts/git/six | 2 +- 27 files changed, 1025 insertions(+), 542 deletions(-) create mode 100644 news/3809.bugfix.rst create mode 100644 news/3810.feature.rst diff --git a/.gitmodules b/.gitmodules index fb727deeb2..e2f779afa6 100644 --- a/.gitmodules +++ b/.gitmodules @@ -6,7 +6,7 @@ url = https://github.com/pinax/pinax.git [submodule "tests/test_artifacts/git/requests"] path = tests/test_artifacts/git/requests - url = https://github.com/requests/requests.git + url = https://github.com/kennethreitz/requests.git [submodule "tests/test_artifacts/git/six"] path = tests/test_artifacts/git/six url = https://github.com/benjaminp/six.git @@ -24,7 +24,7 @@ url = https://github.com/pallets/flask.git [submodule "tests/test_artifacts/git/requests-2.18.4"] path = tests/test_artifacts/git/requests-2.18.4 - url = https://github.com/requests/requests + url = https://github.com/kennethreitz/requests [submodule "tests/pypi"] path = tests/pypi url = https://github.com/sarugaku/pipenv-test-artifacts.git diff --git a/Makefile b/Makefile index 2722bdb47e..059945bbb1 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,50 @@ +get_venv_dir:=$(shell mktemp -d 2>/dev/null || mktemp -d -t 'tmpvenv') +venv_dir := $(get_venv_dir)/pipenv_venv +venv_file := $(CURDIR)/.test_venv +get_venv_path =$(file < $(venv_file)) + format: black pipenv/*.py test: docker-compose up + +.PHONY: ramdisk +ramdisk: + sudo mkdir -p /mnt/ramdisk + sudo mount -t tmpfs -o size=2g tmpfs /mnt/ramdisk + sudo chown -R ${USER}:${USER} /mnt/ramdisk + +.PHONY: ramdisk-virtualenv +ramdisk-virtualenv: ramdisk + [ ! -e "/mnt/ramdisk/.venv/bin/activate" ] && \ + python -m virtualenv /mnt/ramdisk/.venv + @echo "/mnt/ramdisk/.venv" >> $(venv_file) + +.PHONY: virtualenv +virtualenv: + [ ! -e $(venv_dir) ] && rm -rf $(venv_file) && python -m virtualenv $(venv_dir) + @echo $(venv_dir) >> $(venv_file) + +.PHONY: test-install +test-install: virtualenv + . $(get_venv_path)/bin/activate && \ + python -m pip install --upgrade pip virtualenv -e .[tests,dev] && \ + pipenv install --dev + +.PHONY: submodules +submodules: + git submodule sync + git submodule update --init --recursive + +.PHONY: tests +tests: virtualenv submodules test-install + . $(get_venv_path)/bin/activate && \ + pipenv run pytest -ra -vvv --full-trace --tb=long + +.PHONY: test-specific +test-specific: submodules virtualenv test-install + . $(get_venv_path)/bin/activate && pipenv run pytest -ra -k '$(tests)' + +.PHONY: retest +retest: virtualenv submodules test-install + . $(get_venv_path)/bin/activate && pipenv run pytest -ra -k 'test_check_unused or test_install_editable_git_tag or test_get_vcs_refs or test_skip_requirements_when_pipfile or test_editable_vcs_install or test_basic_vcs_install or test_git_vcs_install or test_ssh_vcs_install or test_vcs_can_use_markers' -vvv --full-trace --tb=long diff --git a/Pipfile.lock b/Pipfile.lock index 1675ad80ab..94693e77f2 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -37,6 +37,29 @@ ], "version": "==1.4.3" }, + "argon2-cffi": { + "hashes": [ + "sha256:1029fef2f7808a89e3baa306f5ace36e768a2d847ee7b056399adcd7707f6256", + "sha256:206857d870c6ca3c92514ca70a3c371be47383f7ae6a448f5a16aa17baa550ba", + "sha256:3558a7e22b886efad0c99b23b9be24880213b4e2d1630095459978cfcae570e2", + "sha256:457fd6de741859aa91c750ffad97f12675c4356047e43392c5fb21f5d9f48b24", + "sha256:4a1daa9f6960cdbdb865efcabac4158693459f52e7582c9f8a7c92dc61cdc8e1", + "sha256:4bfb603184ea678563c0f1f1872367e81a3d2b70646a627d38ccede68d7b9194", + "sha256:5d7493ed10e384b84b6dac862fe96c443297a25b991a8364d94a67b6cd1e9569", + "sha256:5fb080047517add8d27baeb38a314814b5ab9c72630606788909b3f60a8f054a", + "sha256:7453b16496b5629005a43c5f5707ef8a31fcfa5bb0ed34b5ba7b86a3cc9d02f2", + "sha256:81548a27b919861040cb928a350733f4f9455dd67c7d1ba92eb5960a1d7f8b26", + "sha256:84fd768d523f87097d572cdfb98e868cdbdc8e80e3d444787fd32e7f6ae25b02", + "sha256:8b4cf6c0298f33b92fcd50f19899175b7421690fc8bc6ac68368320c158cbf51", + "sha256:af6a4799411eee3f7133fead973727f5fefacd18ea23f51039e70cae51ceb109", + "sha256:df7d60a4cf58dc08319fedc0506b42ec0fa5221c6e1f9e2e89fcddff92507390", + "sha256:f9072e9f70185a57e36228d34aad4bb644e6a8b4fd6a45f856c666f38f6de96c", + "sha256:fbae1d08b52f9a791500c650ab51ba00e374eaeccb5dbaa41b99dab4fd4115e8", + "sha256:fe91e3bd95aeae70366693dcc970db03a71619d19df6fbaabf662c3b3c54cdf8", + "sha256:fec86ee6f913154846171f66ee30c893c0cde3d434911f8b31c1f84a9aea410e" + ], + "version": "==19.1.0" + }, "arpeggio": { "hashes": [ "sha256:a5258b84f76661d558492fa87e42db634df143685a0e51802d59cae7daad8732", @@ -112,6 +135,39 @@ ], "version": "==2019.6.16" }, + "cffi": { + "hashes": [ + "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", + "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d", + "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90", + "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b", + "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63", + "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45", + "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25", + "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3", + "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b", + "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647", + "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016", + "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4", + "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb", + "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753", + "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7", + "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9", + "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f", + "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8", + "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f", + "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc", + "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42", + "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3", + "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909", + "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45", + "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d", + "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512", + "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff", + "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201" + ], + "version": "==1.12.3" + }, "chardet": { "hashes": [ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", @@ -119,6 +175,14 @@ ], "version": "==3.0.4" }, + "check-manifest": { + "hashes": [ + "sha256:8754cc8efd7c062a3705b442d1c23ff702d4477b41a269c2e354b25e1f5535a4", + "sha256:a4c555f658a7c135b8a22bd26c2e55cfaf5876e4d5962d8c25652f2addd556bc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.39" + }, "click": { "hashes": [ "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", @@ -127,6 +191,13 @@ "index": "pypi", "version": "==7.0" }, + "colorama": { + "hashes": [ + "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", + "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48" + ], + "version": "==0.4.1" + }, "configparser": { "hashes": [ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", @@ -143,6 +214,34 @@ "markers": "python_version < '3'", "version": "==0.5.5" }, + "decorator": { + "hashes": [ + "sha256:86156361c50488b84a3f148056ea716ca587df2f0de1d34750d35c21312725de", + "sha256:f069f3a01830ca754ba5258fde2278454a0b5b79e0d7f5c13b3b97e57d4acff6" + ], + "version": "==4.4.0" + }, + "devpi-client": { + "hashes": [ + "sha256:05398402d4335a8173d9118df409d16e085a6dacf54300851212d2f6370e1497", + "sha256:e9e19e87c61a75bc93137553db69554c69efb82cfc7ee83e0305cf6abfc91aa0" + ], + "version": "==4.4.0" + }, + "devpi-common": { + "hashes": [ + "sha256:2c7a6471c0f5b07ac9257adec3b3c3a89193ee672fdeb0a6f29487dc9d675e0c", + "sha256:c743abd38447258e27cdb733fa905c275e5dd3eeae25bab9ff59182a1083ed91" + ], + "version": "==3.3.2" + }, + "devpi-server": { + "hashes": [ + "sha256:96ab6390ea8aa2e80b4acdcf8e37f105af6ce7d0cb4efa18ba82de8e8b6c91ad", + "sha256:e92de95c869927ba628d25b024c460b63740d52ebba11dae79146d8cc3b6a033" + ], + "version": "==4.9.0" + }, "docutils": { "hashes": [ "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", @@ -177,6 +276,13 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.6.0" }, + "filelock": { + "hashes": [ + "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", + "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" + ], + "version": "==3.0.12" + }, "flake8": { "hashes": [ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661", @@ -205,7 +311,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "functools32": { @@ -216,6 +322,13 @@ "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, + "future": { + "hashes": [ + "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.17.1" + }, "futures": { "hashes": [ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", @@ -224,6 +337,14 @@ "markers": "python_version < '3.2'", "version": "==3.2.0" }, + "hupper": { + "hashes": [ + "sha256:5869ec2a46ba8ad481b0a27ca68f3e01dc7d3424925b7c872d9fcdff44b43442", + "sha256:8532d116fef1f89add74dbd8d5e6541cb3278b04f4fe9780a1356cb6adba1141" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.8.1" + }, "idna": { "hashes": [ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", @@ -350,6 +471,13 @@ ], "version": "==5.0.0" }, + "orderedmultidict": { + "hashes": [ + "sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b", + "sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5" + ], + "version": "==1.0" + }, "packaging": { "hashes": [ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", @@ -378,6 +506,31 @@ "ref": "a2ba0b30c86339cae5ef3a03046fc9c583452c40", "version": "==0.3.1.dev0" }, + "passlib": { + "extras": [ + "argon2" + ], + "hashes": [ + "sha256:3d948f64138c25633613f303bcc471126eae67c04d5e3f6b7b8ce6242f8653e0", + "sha256:43526aea08fa32c6b6dbbbe9963c4c767285b78147b7437597f992812f69d280" + ], + "version": "==1.7.1" + }, + "pastedeploy": { + "hashes": [ + "sha256:d423fb9d51fdcf853aa4ff43ac7ec469b643ea19590f67488122d6d0d772350a", + "sha256:fe53697ec2754703096b75d0ba29112b0590b4ce46726fe4f9408fd006e4eefc" + ], + "version": "==2.0.1" + }, + "path.py": { + "hashes": [ + "sha256:9f2169633403aa0423f6ec000e8701dd1819526c62465f5043952f92527fea0f", + "sha256:e107a3a8834a97be2a047f4b641822afc76a2b78352610102782732e6b389aa3" + ], + "markers": "python_version >= '3.5'", + "version": "==12.0.1" + }, "pathlib2": { "hashes": [ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", @@ -408,6 +561,22 @@ ], "version": "==1.5.0.1" }, + "plaster": { + "hashes": [ + "sha256:215c921a438b5349931fd7df9a5a11a3572947f20f4bc6dd622ac08f1c3ba249", + "sha256:8351c7c7efdf33084c1de88dd0f422cbe7342534537b553c49b857b12d98c8c3" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.0" + }, + "plaster-pastedeploy": { + "hashes": [ + "sha256:391d93a4e1ff81fc3bae27508ebb765b61f1724ae6169f83577f06b6357be7fd", + "sha256:7c8aa37c917b615c70bf942b24dc1e0455c49f62f1a2214b1a0dd98871644bbb" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.7" + }, "pluggy": { "hashes": [ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", @@ -416,6 +585,21 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.12.0" }, + "psutil": { + "hashes": [ + "sha256:028a1ec3c6197eadd11e7b46e8cc2f0720dc18ac6d7aabdb8e8c0d6c9704f000", + "sha256:503e4b20fa9d3342bcf58191bbc20a4a5ef79ca7df8972e6197cc14c5513e73d", + "sha256:863a85c1c0a5103a12c05a35e59d336e1d665747e531256e061213e2e90f63f3", + "sha256:954f782608bfef9ae9f78e660e065bd8ffcfaea780f9f2c8a133bb7cb9e826d7", + "sha256:b6e08f965a305cd84c2d07409bc16fbef4417d67b70c53b299116c5b895e3f45", + "sha256:bc96d437dfbb8865fc8828cf363450001cb04056bbdcdd6fc152c436c8a74c61", + "sha256:cf49178021075d47c61c03c0229ac0c60d5e2830f8cab19e2d88e579b18cdb76", + "sha256:d5350cb66690915d60f8b233180f1e49938756fb2d501c93c44f8fb5b970cc63", + "sha256:eba238cf1989dfff7d483c029acb0ac4fcbfc15de295d682901f0e2497e6781a" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==5.6.3" + }, "py": { "hashes": [ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", @@ -432,6 +616,12 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.5.0" }, + "pycparser": { + "hashes": [ + "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" + ], + "version": "==2.19" + }, "pyflakes": { "hashes": [ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", @@ -456,6 +646,14 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.0" }, + "pyramid": { + "hashes": [ + "sha256:51bf64647345237c00d2fe558935e0e4938c156e29f17e203457fd8e1d757dc7", + "sha256:d80ccb8cfa550139b50801591d4ca8a5575334adb493c402fce2312f55d07d66" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.10.4" + }, "pytest": { "hashes": [ "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45", @@ -464,6 +662,24 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.6.3" }, + "pytest-devpi-server": { + "hashes": [ + "sha256:1e1a4f4783f6833e49ae72956e68694b26de85e094d39c9cc6e5900774ecf2b9", + "sha256:a8f37e448d0a8c11c10f51b48a2bae4007431786223c39c3e22343e41f3aa8ee", + "sha256:c54e2ad77f9e2031d0284038b2bd1044debfcd1965be81c3d990df39e80503a3", + "sha256:d5e06acdd89966417d781d93cd08a2f1c21265bc06d5e4c1dd9309cdd0af988f" + ], + "version": "==1.7.0" + }, + "pytest-fixture-config": { + "hashes": [ + "sha256:1413e5e2c6572a3d7709de7ad69dc35004393d777a7883c8431b6f78a2e28fd0", + "sha256:41a17417721f6862ce6b40e3280fddd8e1659b2c306ec46b237d7021fec5218e", + "sha256:9bda6a817a3ac91a118dd42274cb3cc42dc0290a11317a7217d17eaae82800c5", + "sha256:a0e35e239e70fa12614bbe9ca51d3238fbeb89519deb80cd365b487665a666b0" + ], + "version": "==1.7.0" + }, "pytest-forked": { "hashes": [ "sha256:5fe33fbd07d7b1302c95310803a5e5726a4ff7f19d5a542b7ce57c76fed8135f", @@ -475,6 +691,24 @@ "editable": true, "path": "./tests/pytest-pypi" }, + "pytest-server-fixtures": { + "hashes": [ + "sha256:0fa5b1be6a84180e50ff91a58580e81ad3eb45828878a07942fbe384fcd86d1f", + "sha256:3d93f2ca4bb0a949a55cbdd3598fc44bc3199277dd6b31be39df7f7ebb7a3280", + "sha256:42a6020e60fd0c362dae0a594777b85e6b4a6d84ff3972ac3261e7de5f2f27fc", + "sha256:716e8911e0184d0fd41aa04c2980f04f7bf1d603d90d40de4817b8d6f7b5c7d6" + ], + "version": "==1.7.0" + }, + "pytest-shutil": { + "hashes": [ + "sha256:03c67282a0c520a790ca8db6f65e18851fae3786f45e3ae34e8d9fccbf266a72", + "sha256:343a6902a8ed0cbd29cf8954e2726382228a2ad2f5f7eac589b0d0dff878d806", + "sha256:b3568a675cb092c9b15c789ebd3046b79cfaca476868939748729d14557a98ff", + "sha256:d8165261de76e7508505c341d94c02b113dc963f274543abca74dbfabd021261" + ], + "version": "==1.7.0" + }, "pytest-tap": { "hashes": [ "sha256:3b05ec931424bbe44e944726b68f7ef185bb6d25ce9ce21ac52c9af7ffa9b506", @@ -490,6 +724,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.29.0" }, + "python-dateutil": { + "hashes": [ + "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", + "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.0" + }, "pytz": { "hashes": [ "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda", @@ -504,6 +746,13 @@ ], "version": "==24.0" }, + "repoze.lru": { + "hashes": [ + "sha256:0429a75e19380e4ed50c0694e26ac8819b4ea7851ee1fc7583c8572db80aff77", + "sha256:f77bf0e1096ea445beadd35f3479c5cff2aa1efe604a133e67150bc8630a62ea" + ], + "version": "==0.7" + }, "requests": { "hashes": [ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", @@ -519,6 +768,13 @@ ], "version": "==0.9.1" }, + "retry": { + "hashes": [ + "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", + "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4" + ], + "version": "==0.9.2" + }, "rope": { "hashes": [ "sha256:6b728fdc3e98a83446c27a91fc5d56808a004f8beab7a31ab1d7224cecc7d969", @@ -528,6 +784,30 @@ "index": "pypi", "version": "==0.14.0" }, + "ruamel.yaml": { + "hashes": [ + "sha256:17dbf6b7362e7aee8494f7a0f5cffd44902a6331fe89ef0853b855a7930ab845", + "sha256:23731c9efb79f3f5609dedffeb6c5c47a68125fd3d4b157d9fc71b1cd49076a9", + "sha256:2bbdd598ae57bac20968cf9028cc67d37d83bdb7942a94b9478110bc72193148", + "sha256:34586084cdd60845a3e1bece2b58f0a889be25450db8cc0ea143ddf0f40557a2", + "sha256:35957fedbb287b01313bb5c556ffdc70c0277c3500213b5e73dfd8716f748d77", + "sha256:414cb87a40974a575830b406ffab4ab8c6cbd82eeb73abd2a9d1397c1f0223e1", + "sha256:428775be75db68d908b17e4e8dda424c410222f170dc173246aa63e972d094b3", + "sha256:514f670f7d36519bda504d507edfe63e3c20489f86c86d42bc4d9a6dbdf82c7b", + "sha256:5cb962c1ac6887c5da29138fbbe3b4b7705372eb54e599907fa63d4cd743246d", + "sha256:5f6e30282cf70fb7754e1a5f101e27b5240009766376e131b31ab49f14fe81be", + "sha256:86f8e010af6af0b4f42de2d0d9b19cb441e61d3416082186f9dd03c8552d13ad", + "sha256:8d47ed1e557d546bd2dfe54f504d7274274602ff7a0652cde84c258ad6c2d96d", + "sha256:98668876720bce1ac08562d8b93a564a80e3397e442c7ea19cebdcdf73da7f74", + "sha256:9e1f0ddc18d8355dcf5586a5d90417df56074f237812b8682a93b62cca9d2043", + "sha256:a7bc812a72a79d6b7dbb96fa5bee3950464b65ec055d3abc4db6572f2373a95c", + "sha256:b72e13f9f206ee103247b07afd5a39c8b1aa98e8eba80ddba184d030337220ba", + "sha256:bcff8ea9d916789e85e24beed8830c157fb8bc7c313e554733a8151540e66c01", + "sha256:c76e78b3bab652069b8d6f7889b0e72f3455c2b854b2e0a8818393d149ad0a0d" + ], + "markers": "python_version >= '3.5'", + "version": "==0.15.97" + }, "scandir": { "hashes": [ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", @@ -598,6 +878,12 @@ "markers": "sys_platform == 'linux'", "version": "==0.8.5" }, + "strictyaml": { + "hashes": [ + "sha256:06d7100587695a0edfabd772a6c6fb69071fc38c413df599e22dfd40e52f5fad" + ], + "version": "==1.0.1" + }, "tap.py": { "hashes": [ "sha256:8ad62ba6898fcef4913c67d468d0c4beae3109b74c03363538145e31b1840b29", @@ -605,6 +891,12 @@ ], "version": "==2.5" }, + "termcolor": { + "hashes": [ + "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b" + ], + "version": "==1.1.0" + }, "toml": { "hashes": [ "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", @@ -620,6 +912,14 @@ ], "version": "==19.2.0" }, + "tox": { + "hashes": [ + "sha256:f5c8e446b51edd2ea97df31d4ded8c8b72e7d6c619519da6bb6084b9dd5770f9", + "sha256:f87fd33892a2df0950e5e034def9468988b8d008c7e9416be665fcc0dd45b14f" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==3.12.1" + }, "tqdm": { "hashes": [ "sha256:0a860bf2683fdbb4812fe539a6c22ea3f1777843ea985cb8c3807db448a0f7ab", @@ -628,6 +928,13 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.32.1" }, + "translationstring": { + "hashes": [ + "sha256:4ee44cfa58c52ade8910ea0ebc3d2d84bdcad9fa0422405b1801ec9b9a65b72d", + "sha256:e26c7bf383413234ed442e0980a2ebe192b95e3745288a8fd2805156d27515b4" + ], + "version": "==1.3" + }, "twine": { "hashes": [ "sha256:0fb0bfa3df4f62076cab5def36b1a71a2e4acb4d1fa5c97475b048117b1a6446", @@ -653,6 +960,13 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", "version": "==1.25.3" }, + "venusian": { + "hashes": [ + "sha256:2f2d077a1eedc3fda40425f65687c8c494da7e83d7c23bc2c4d1a40eb3ca5b6d", + "sha256:64ec8285b80b110d0ae5db4280e90e31848a59db98db1aba4d7d46f48ce91e3e" + ], + "version": "==1.2.0" + }, "virtualenv": { "hashes": [ "sha256:b7335cddd9260a3dd214b73a2521ffc09647bde3e9457fcca31dc3be3999d04a", @@ -669,6 +983,13 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.5.3" }, + "waitress": { + "hashes": [ + "sha256:4e2a6e6fca56d6d3c279f68a2b2cc9b4798d834ea3c3a9db3e2b76b6d66f4526", + "sha256:90fe750cd40b282fae877d3c866255d485de18e8a232e93de42ebd9fb750eebb" + ], + "version": "==1.3.0" + }, "wcwidth": { "hashes": [ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", @@ -683,6 +1004,14 @@ ], "version": "==0.5.1" }, + "webob": { + "hashes": [ + "sha256:05aaab7975e0ee8af2026325d656e5ce14a71f1883c52276181821d6d5bf7086", + "sha256:36db8203c67023d68c1b00208a7bf55e3b10de2aa317555740add29c619de12b" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.8.5" + }, "werkzeug": { "hashes": [ "sha256:865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", @@ -698,6 +1027,48 @@ ], "markers": "python_version >= '2.7'", "version": "==0.5.1" + }, + "zope.deprecation": { + "hashes": [ + "sha256:0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df", + "sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113" + ], + "version": "==4.4.0" + }, + "zope.interface": { + "hashes": [ + "sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c", + "sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b", + "sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02", + "sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f", + "sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5", + "sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375", + "sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487", + "sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2", + "sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0", + "sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b", + "sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63", + "sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39", + "sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745", + "sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc", + "sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2", + "sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa", + "sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1", + "sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc", + "sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98", + "sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97", + "sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab", + "sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127", + "sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d", + "sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe", + "sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891", + "sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1", + "sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b", + "sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966", + "sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==4.6.0" } } } diff --git a/news/3809.bugfix.rst b/news/3809.bugfix.rst new file mode 100644 index 0000000000..bd603aaf9d --- /dev/null +++ b/news/3809.bugfix.rst @@ -0,0 +1 @@ +Fixed several bugs which could prevent editable VCS dependencies from being installed into target environments, even when reporting successful installation. diff --git a/news/3810.feature.rst b/news/3810.feature.rst new file mode 100644 index 0000000000..335037790c --- /dev/null +++ b/news/3810.feature.rst @@ -0,0 +1 @@ +Improved verbose logging output during ``pipenv lock`` will now stream output to the console while maintaining a spinner. diff --git a/pipenv/__init__.py b/pipenv/__init__.py index 695a493912..31d49fc1a4 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -37,11 +37,13 @@ pass from pipenv.vendor.vistir.misc import get_text_stream + stdout = get_text_stream("stdout") stderr = get_text_stream("stderr") if os.name == "nt": from pipenv.vendor.vistir.misc import _can_use_color, _wrap_for_color + if _can_use_color(stdout): stdout = _wrap_for_color(stdout) if _can_use_color(stderr): diff --git a/pipenv/core.py b/pipenv/core.py index e0df54e375..0991347430 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1,5 +1,6 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function +import io import json as simplejson import logging import os @@ -13,7 +14,7 @@ import urllib3.util as urllib3_util import vistir -import click_completion +from click_completion import init as init_completion import delegator import dotenv import pipfile @@ -26,7 +27,8 @@ PIPENV_CACHE_DIR, PIPENV_COLORBLIND, PIPENV_DEFAULT_PYTHON_VERSION, PIPENV_DONT_USE_PYENV, PIPENV_HIDE_EMOJIS, PIPENV_MAX_SUBPROCESS, PIPENV_PYUP_API_KEY, PIPENV_SHELL_FANCY, PIPENV_SKIP_VALIDATION, - PIPENV_YES, SESSION_IS_INTERACTIVE, PIP_EXISTS_ACTION, PIPENV_RESOLVE_VCS + PIPENV_YES, SESSION_IS_INTERACTIVE, PIP_EXISTS_ACTION, PIPENV_RESOLVE_VCS, + is_type_checking ) from .project import Project, SourceNotFound from .utils import ( @@ -39,6 +41,12 @@ ) +if is_type_checking(): + from typing import Dict, List, Mapping, Optional, Union + from pipenv.vendor.requirementslib.models.requirements import Requirement + TSourceDict = Dict[str, Union[str, bool]] + + # Packages that should be ignored later. BAD_PACKAGES = ( "distribute", @@ -73,7 +81,7 @@ INSTALL_LABEL2 = " " STARTING_LABEL = " " # Enable shell completion. -click_completion.init() +init_completion() # Disable colors, for the color blind and others who do not prefer colors. if PIPENV_COLORBLIND: crayons.disable() @@ -737,12 +745,6 @@ def batch_install(deps_list, procs, failed_deps_queue, os.environ["PIP_USER"] = vistir.compat.fs_str("0") if "PYTHONHOME" in os.environ: del os.environ["PYTHONHOME"] - if not install_deps and not environments.PIPENV_RESOLVE_VCS: - link = getattr(dep.req, "link", None) - is_wheel = False - if link: - is_wheel = link.is_wheel - install_deps = dep.is_file_or_url and not (is_wheel or dep.editable) c = pip_install( dep, ignore_hashes=any([ignore_hashes, dep.editable, dep.is_vcs]), @@ -1281,12 +1283,168 @@ def do_init( ) +def get_pip_args( + pre=False, # type: bool + verbose=False, # type: bool, + upgrade=False, # type: bool, + require_hashes=False, # type: bool, + no_build_isolation=False, # type: bool, + no_use_pep517=False, # type: bool, + no_deps=False, # type: bool, + selective_upgrade=False, # type: bool + src_dir=None, # type: Optional[str] +): + # type: (...) -> List[str] + from .vendor.packaging.version import parse as parse_version + arg_map = { + "pre": ["--pre"], + "verbose": ["--verbose"], + "upgrade": ["--upgrade"], + "require_hashes": ["--require-hashes"], + "no_build_isolation": ["--no-build-isolation"], + "no_use_pep517": [], + "no_deps": ["--no-deps"], + "selective_upgrade": [ + "--upgrade-strategy=only-if-needed", "--exists_action={0}".format(PIP_EXISTS_ACTION or "i") + ], + "src_dir": src_dir, + } + if project.environment.pip_version >= parse_version("19.0"): + arg_map["no_use_pep517"].append("--no-use-pep517") + if project.environment.pip_version < parse_version("19.1"): + arg_map["no_use_pep517"].append("--no-build-isolation") + arg_set = [] + for key in arg_map.keys(): + if key in locals() and locals().get(key): + arg_set.extend(arg_map.get(key)) + return list(vistir.misc.dedup(arg_set)) + + +def get_project_index(index=None, trusted_hosts=None): + # type: (Optional[Union[str, TSourceDict]], Optional[List[str]]) -> TSourceDict + from .vendor.urllib3.util import parse_url + if trusted_hosts is None: + trusted_hosts = [] + if isinstance(index, vistir.compat.Mapping): + return index + try: + source = project.find_source(index) + except SourceNotFound: + index_url = parse_url(index) + src_name = project.src_name_from_url(index) + verify_ssl = index_url.host not in trusted_hosts + source = {"url": index, "verify_ssl": verify_ssl, "name": src_name} + return source + + +def get_source_list( + index=None, # type: Optional[Union[str, TSourceDict]] + extra_indexes=None, # type: Optional[List[str]] + trusted_hosts=None, # type: Optional[List[str]] + pypi_mirror=None, # type: Optional[str] +): + # type: (...) -> List[TSourceDict] + sources = [] # type: List[TSourceDict] + if index: + sources.append(get_project_index(index)) + if extra_indexes: + if isinstance(extra_indexes, six.string_types): + extra_indexes = [extra_indexes,] + for source in extra_indexes: + extra_src = get_project_index(source) + if not sources or extra_src["url"] != sources[0]["url"]: + sources.append(extra_src) + else: + for source in project.pipfile_sources: + if not sources or source["url"] != sources[0]["url"]: + sources.append(source) + if not sources: + sources = project.pipfile_sources + if pypi_mirror: + sources = [ + create_mirror_source(pypi_mirror) if is_pypi_url(source["url"]) else source + for source in sources + ] + return sources + + +def get_requirement_line( + requirement, # type: Requirement + src_dir=None, # type: Optional[str] + include_hashes=True, # type: bool + format_for_file=False, # type: bool +): + # type: (...) -> Union[List[str], str] + line = None + if requirement.vcs or requirement.is_file_or_url: + if src_dir and requirement.line_instance.wheel_kwargs: + requirement.line_instance._wheel_kwargs.update({ + "src_dir": src_dir + }) + # if requirement.vcs and requirement.editable: + # repo = requirement.req.get_vcs_repo(src_dir=src_dir) + # requirement.line_instance.vcsrepo + # line = repo.url + # name = requirement.name + # line = "{0}+".format(requirement.vcs) if requirement.vcs else "" + # if requirement.extras: + # name = "{0}{1}".format(name, requirement.extras_as_pip) + # line = "{0}{1}#egg={2}".format( + # line, vistir.path.path_to_url(repo.checkout_directory), requirement.name + # ) + # if repo.subdirectory: + # line = "{0}&subdirectory={1}".format(line, repo.subdirectory) + # else: + line = requirement.line_instance.line + if requirement.line_instance.markers: + line = '{0}; {1}'.format(line, requirement.line_instance.markers) + if not format_for_file: + line = '"{0}"'.format(line) + if requirement.editable: + if not format_for_file: + return ["-e", line] + return '-e {0}'.format(line) + if not format_for_file: + return [line,] + return line + return requirement.as_line(include_hashes=include_hashes, as_list=not format_for_file) + + +def write_requirement_to_file( + requirement, # type: Requirement + requirements_dir=None, # type: Optional[str] + src_dir=None, # type: Optional[str] + include_hashes=True # type: bool +): + # type: (...) -> str + if not requirements_dir: + requirements_dir = vistir.path.create_tracked_tempdir( + prefix="pipenv", suffix="requirements") + line = get_requirement_line( + requirement, src_dir, include_hashes=include_hashes, format_for_file=True + ) + + f = vistir.compat.NamedTemporaryFile( + prefix="pipenv-", suffix="-requirement.txt", dir=requirements_dir, + delete=False + ) + if environments.is_verbose(): + click.echo( + "Writing supplied requirement line to temporary file: {0!r}".format(line), + err=True + ) + f.write(vistir.misc.to_bytes(line)) + r = f.name + f.close() + return r + + def pip_install( requirement=None, r=None, allow_global=False, ignore_hashes=False, - no_deps=True, + no_deps=None, block=True, index=None, pre=False, @@ -1298,18 +1456,38 @@ def pip_install( use_pep517=True ): from pipenv.patched.notpip._internal import logger as piplogger - from .vendor.vistir.compat import Mapping - from .vendor.urllib3.util import parse_url - src = [] - write_to_tmpfile = False - if requirement: - needs_hashes = not requirement.editable and not ignore_hashes and r is None - has_subdir = requirement.is_vcs and requirement.req.subdirectory - write_to_tmpfile = needs_hashes or has_subdir - + src_dir = None if not trusted_hosts: trusted_hosts = [] + trusted_hosts.extend(os.environ.get("PIP_TRUSTED_HOSTS", [])) + if not allow_global: + src_dir = project.virtualenv_src_location + # src_dir = os.getenv("PIP_SRC", os.getenv("PIP_SRC_DIR", project.virtualenv_src_location)) + else: + src_dir = os.getenv("PIP_SRC", os.getenv("PIP_SRC_DIR")) + if requirement: + if requirement.editable or not requirement.hashes: + ignore_hashes = True + elif not (requirement.is_vcs or requirement.editable or requirement.vcs): + ignore_hashes = False + line = None + if requirement.vcs and not requirement.line_instance.markers: + line = get_requirement_line(requirement, src_dir, include_hashes=not ignore_hashes, format_for_file=False) + else: + r = write_requirement_to_file( + requirement, requirements_dir=requirements_dir, src_dir=src_dir, + include_hashes=not ignore_hashes + ) + # Try installing for each source in project.sources. + sources = get_source_list( + index, extra_indexes=extra_indexes, trusted_hosts=trusted_hosts, + pypi_mirror=pypi_mirror + ) + if r: + with io.open(r, "r") as fh: + if "--hash" not in fh.read(): + ignore_hashes = True if environments.is_verbose(): piplogger.setLevel(logging.INFO) if requirement: @@ -1318,214 +1496,52 @@ def pip_install( err=True, ) - if requirement: - ignore_hashes = True if not requirement.hashes else ignore_hashes - - # Create files for hash mode. - if write_to_tmpfile: - if not requirements_dir: - requirements_dir = vistir.path.create_tracked_tempdir( - prefix="pipenv", suffix="requirements") - f = vistir.compat.NamedTemporaryFile( - prefix="pipenv-", suffix="-requirement.txt", dir=requirements_dir, - delete=False - ) - line = requirement.as_line(include_hashes=not ignore_hashes) - if environments.is_verbose(): - click.echo( - "Writing requirement line to temporary file: {0!r}".format(line), - err=True - ) - f.write(vistir.misc.to_bytes(line)) - r = f.name - f.close() - if requirement and requirement.vcs: # Install dependencies when a package is a non-editable VCS dependency. # Don't specify a source directory when using --system. - if not allow_global and ("PIP_SRC" not in os.environ): - src.extend(["--src", "{0}".format(project.virtualenv_src_location)]) - - # Try installing for each source in project.sources. - if index: - if isinstance(index, (Mapping, dict)): - index_source = index - else: - try: - index_source = project.find_source(index) - index_source = index_source.copy() - except SourceNotFound: - src_name = project.src_name_from_url(index) - index_url = parse_url(index) - verify_ssl = index_url.host not in trusted_hosts - index_source = {"url": index, "verify_ssl": verify_ssl, "name": src_name} - sources = [index_source.copy(),] - if extra_indexes: - if isinstance(extra_indexes, six.string_types): - extra_indexes = [extra_indexes,] - for idx in extra_indexes: - extra_src = None - if isinstance(idx, (Mapping, dict)): - extra_src = idx - try: - extra_src = project.find_source(idx) if not extra_src else extra_src - except SourceNotFound: - src_name = project.src_name_from_url(idx) - src_url = parse_url(idx) - verify_ssl = src_url.host not in trusted_hosts - extra_src = {"url": idx, "verify_ssl": verify_ssl, "name": extra_src} - if extra_src["url"] != index_source["url"]: - sources.append(extra_src) - else: - for idx in project.pipfile_sources: - if idx["url"] != sources[0]["url"]: - sources.append(idx) - else: - sources = project.pipfile_sources - if pypi_mirror: - sources = [ - create_mirror_source(pypi_mirror) if is_pypi_url(source["url"]) else source - for source in sources - ] - - line_kwargs = {"as_list": True, "include_hashes": not ignore_hashes} - - # Install dependencies when a package is a VCS dependency. - if requirement and requirement.vcs: - ignore_hashes = True - # Don't specify a source directory when using --system. - src_dir = None - if "PIP_SRC" in os.environ: - src_dir = os.environ["PIP_SRC"] - src = ["--src", os.environ["PIP_SRC"]] - if not requirement.editable and not environments.PIPENV_RESOLVE_VCS: + if not requirement.editable and no_deps is not True: # Leave this off becauase old lockfiles don't have all deps included # TODO: When can it be turned back on? no_deps = False - - if src_dir is not None: - if environments.is_verbose(): - click.echo("Using source directory: {0!r}".format(src_dir)) - repo = requirement.req.get_vcs_repo(src_dir=src_dir) - else: - repo = requirement.req.get_vcs_repo() - write_to_tmpfile = True - line_kwargs["include_markers"] = False - line_kwargs["include_hashes"] = False - if not requirements_dir: - requirements_dir = vistir.path.create_tracked_tempdir(prefix="pipenv", - suffix="requirements") - f = vistir.compat.NamedTemporaryFile( - prefix="pipenv-", suffix="-requirement.txt", dir=requirements_dir, - delete=False - ) - line = "-e " if requirement.editable else "" - if requirement.editable or requirement.name is not None: - name = requirement.name - if requirement.extras: - name = "{0}{1}".format(name, requirement.extras_as_pip) - line = "{0}{1}#egg={2}".format( - line, vistir.path.path_to_url(repo.checkout_directory), requirement.name - ) - if repo.subdirectory: - line = "{0}&subdirectory={1}".format(line, repo.subdirectory) - else: - line = requirement.as_line(**line_kwargs) - if environments.is_verbose(): - click.echo( - "Writing requirement line to temporary file: {0!r}".format(line), - err=True - ) - f.write(vistir.misc.to_bytes(line)) - r = f.name - f.close() - - # Create files for hash mode. - if write_to_tmpfile and not r: - if not requirements_dir: - requirements_dir = vistir.path.create_tracked_tempdir( - prefix="pipenv", suffix="requirements") - f = vistir.compat.NamedTemporaryFile( - prefix="pipenv-", suffix="-requirement.txt", dir=requirements_dir, - delete=False - ) - ignore_hashes = True if not requirement.hashes else ignore_hashes - line = requirement.as_line(include_hashes=not ignore_hashes) - line = "{0} {1}".format(line, " ".join(src)) - if environments.is_verbose(): - click.echo( - "Writing requirement line to temporary file: {0!r}".format(line), - err=True - ) - f.write(vistir.misc.to_bytes(line)) - r = f.name - f.close() - - if (requirement and requirement.editable) and not r: - line_kwargs["include_markers"] = False - line_kwargs["include_hashes"] = False - install_reqs = requirement.as_line(**line_kwargs) - if requirement.editable and install_reqs[0].startswith("-e "): - req, install_reqs = install_reqs[0], install_reqs[1:] - possible_hashes = install_reqs[:] - editable_opt, req = req.split(" ", 1) - install_reqs = [editable_opt, req] + install_reqs - - # hashes must be passed via a file - ignore_hashes = True - elif r: - install_reqs = ["-r", r] - with open(r) as f: - if "--hash" not in f.read(): - ignore_hashes = True - else: - ignore_hashes = True if not requirement.hashes else ignore_hashes - install_reqs = requirement.as_line(as_list=True, include_hashes=not ignore_hashes) - if not requirement.markers: - install_reqs = [escape_cmd(r) for r in install_reqs] - elif len(install_reqs) > 1: - install_reqs = install_reqs[0] + [escape_cmd(r) for r in install_reqs[1:]] + elif requirement.editable and no_deps is None: + no_deps = True pip_command = [which_pip(allow_global=allow_global), "install"] - if pre: - pip_command.append("--pre") - if src: - pip_command.extend(src) - if environments.is_verbose(): - pip_command.append("--verbose") - pip_command.append("--upgrade") - if selective_upgrade: - pip_command.append("--upgrade-strategy=only-if-needed") - if no_deps: - pip_command.append("--no-deps") - pip_command.extend(install_reqs) + pip_args = get_pip_args( + pre=pre, verbose=environments.is_verbose(), upgrade=True, + selective_upgrade=selective_upgrade, no_use_pep517=not use_pep517, + no_deps=no_deps, require_hashes=not ignore_hashes + ) + pip_command.extend(pip_args) + if r: + pip_command.extend(["-r", r]) + elif line: + pip_command.extend(line) pip_command.extend(prepare_pip_source_args(sources)) - if not ignore_hashes: - pip_command.append("--require-hashes") - if not use_pep517: - from .vendor.packaging.version import parse as parse_version - pip_command.append("--no-build-isolation") - if project.environment.pip_version >= parse_version("19.0"): - pip_command.append("--no-use-pep517") if environments.is_verbose(): click.echo("$ {0}".format(pip_command), err=True) cache_dir = vistir.compat.Path(PIPENV_CACHE_DIR) + DEFAULT_EXISTS_ACTION = "w" + if selective_upgrade: + DEFAULT_EXISTS_ACTION = "i" + exists_action = vistir.misc.fs_str(PIP_EXISTS_ACTION or DEFAULT_EXISTS_ACTION) pip_config = { "PIP_CACHE_DIR": vistir.misc.fs_str(cache_dir.as_posix()), "PIP_WHEEL_DIR": vistir.misc.fs_str(cache_dir.joinpath("wheels").as_posix()), "PIP_DESTINATION_DIR": vistir.misc.fs_str( cache_dir.joinpath("pkgs").as_posix() ), - "PIP_EXISTS_ACTION": vistir.misc.fs_str(PIP_EXISTS_ACTION or "w"), + "PIP_EXISTS_ACTION": exists_action, "PATH": vistir.misc.fs_str(os.environ.get("PATH")), } - if src: + if src_dir: + if environments.is_verbose(): + click.echo("Using source directory: {0!r}".format(src_dir), err=True) pip_config.update( - {"PIP_SRC": vistir.misc.fs_str(project.virtualenv_src_location)} + {"PIP_SRC": vistir.misc.fs_str(src_dir)} ) cmd = Script.parse(pip_command) pip_command = cmd.cmdify() c = None - # with project.environment.activated(): c = delegator.run(pip_command, block=block, env=pip_config) return c @@ -2091,57 +2107,60 @@ def do_install( pkg_requirement = Requirement.from_line(pkg_line) except ValueError as e: sp.write_err(vistir.compat.fs_str("{0}: {1}".format(crayons.red("WARNING"), e))) - sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Installation Failed")) + sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Installation Failed")) sys.exit(1) if index_url: pkg_requirement.index = index_url - deps = [] - if pkg_requirement.is_vcs and PIPENV_RESOLVE_VCS: - deps = pkg_requirement.req.dependencies - to_install = [pkg_requirement,] + # deps = [] + # if pkg_requirement.is_vcs and PIPENV_RESOLVE_VCS: + # if not allow_global and ( + # pkg_requirement.line_instance and pkg_requirement.line_instance.wheel_kwargs + # ): + # pkg_requirement.line_instance._wheel_kwargs["src_dir"] = project.virtualenv_src_location + # pkg_setupinfo = pkg_requirement.line_instance.setup_info + # deps = pkg_setupinfo.requires no_deps = False sp.text = "Installing..." try: - if deps: - to_install.extend([ - Requirement.from_line(d) for d in list(deps[0].values()) - ]) - no_deps = True - for dep in to_install: - sp.text = "Installing {0}...".format(dep.name) - c = pip_install( - dep, - ignore_hashes=True, - allow_global=system, - selective_upgrade=selective_upgrade, - no_deps=no_deps, - pre=pre, - requirements_dir=requirements_directory, - index=index_url, - extra_indexes=extra_index_url, - pypi_mirror=pypi_mirror, - ) - if not c.ok: - sp.write_err(vistir.compat.fs_str( - "{0}: {1}".format( - crayons.red("WARNING"), - "Failed installing package {0}".format(pkg_line) - ), - )) - sp.write_err(vistir.compat.fs_str( - "Error text: {0}".format(c.out) - )) - raise RuntimeError(c.err) - if environments.is_verbose(): - click.echo(crayons.blue(format_pip_output(c.out))) + sp.text = "Installing {0}...".format(pkg_requirement.name) + if environments.is_verbose(): + sp.hide_and_write("Installing package: {0}".format(pkg_requirement.as_line(include_hashes=False))) + c = pip_install( + pkg_requirement, + ignore_hashes=True, + allow_global=system, + selective_upgrade=selective_upgrade, + no_deps=no_deps, + pre=pre, + requirements_dir=requirements_directory, + index=index_url, + extra_indexes=extra_index_url, + pypi_mirror=pypi_mirror, + ) + if not c.ok: + sp.write_err(vistir.compat.fs_str( + "{0}: {1}".format( + crayons.red("WARNING"), + "Failed installing package {0}".format(pkg_line) + ), + )) + sp.write_err(vistir.compat.fs_str( + "Error text: {0}".format(c.out) + )) + sp.write_err(vistir.compat.fs_str( + "{0}".format(c.err) + )) + raise RuntimeError(c.err) + if environments.is_verbose(): + click.echo(crayons.blue(format_pip_output(c.out))) except (ValueError, RuntimeError) as e: sp.write_err(vistir.compat.fs_str( "{0}: {1}".format(crayons.red("WARNING"), e), )) - sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format( + sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format( "Installation Failed", )) - sys.exit(1) + # sys.exit(1) # Warn if --editable wasn't passed. if pkg_requirement.is_vcs and not pkg_requirement.editable and not PIPENV_RESOLVE_VCS: sp.write_err( @@ -2182,7 +2201,7 @@ def do_install( # Add the package to the Pipfile. try: project.add_package_to_pipfile(pkg_requirement, dev) - except ValueError as e: + except ValueError: import traceback sp.write_err( "{0} {1}".format( diff --git a/pipenv/utils.py b/pipenv/utils.py index 18c7dd0142..cfda803a28 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -965,10 +965,6 @@ def actually_resolve_deps( ): from pipenv.vendor.vistir.path import create_tracked_tempdir from pipenv.vendor.requirementslib.models.requirements import Requirement - import pipenv.patched.piptools.logging - - if environments.is_verbose(): - pipenv.patched.piptools.logging.log.verbosity = 1 if not req_dir: req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") @@ -1022,17 +1018,21 @@ def resolve(cmd, sp): result = None try: result = c.expect(u"\n", timeout=environments.PIPENV_INSTALL_TIMEOUT) - except (EOF, TIMEOUT): + except TIMEOUT: pass - _out = c.subprocess.before - if _out: - _out = decode_output("{0}\n".format(_out)) + except EOF: + break + except KeyboardInterrupt: + c.kill() + break + if result: + _out = c.subprocess.before + _out = decode_output("{0}".format(_out)) out += _out - sp.text = to_native_string("{0}".format(_out[:100])) + # sp.text = to_native_string("{0}".format(_out[:100])) if environments.is_verbose(): - sp.hide_and_write(_out.rstrip()) - _out = to_native_string("") - if not result and not _out: + sp.hide_and_write(out.splitlines()[-1].rstrip()) + else: break c.block() if c.return_code != 0: @@ -1042,11 +1042,9 @@ def resolve(cmd, sp): echo(c.out.strip(), err=True) if not environments.is_verbose(): echo(out, err=True) - echo(c.err.strip(), err=True) sys.exit(c.return_code) if environments.is_verbose(): - for ln in c.err.strip(): - sp.hide_and_write(ln) + echo(c.err.strip(), err=True) return c diff --git a/pytest.ini b/pytest.ini index 61b492e331..8bbd020839 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,6 @@ [pytest] addopts = -ra -n auto +plugins = pytest_devpi_server xdist testpaths = tests ; Add vendor and patched in addition to the default list of ignored dirs ; Additionally, ignore tasks, news, test subdirectories and peeps directory diff --git a/setup.py b/setup.py index d262eb354d..178b3d3a91 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ "parver", "invoke", ], - "tests": ["pytest", "pytest-tap", "pytest-xdist", "flaky", "mock"], + "tests": ["pytest", "pytest-devpi-server", "pytest-tap", "pytest-xdist", "flaky", "mock"], } # https://pypi.python.org/pypi/stdeb/0.8.5#quickstart-2-just-tell-me-the-fastest-way-to-make-a-deb diff --git a/tasks/release.py b/tasks/release.py index dc76a5f175..375d73020c 100644 --- a/tasks/release.py +++ b/tasks/release.py @@ -129,7 +129,7 @@ def build_dists(ctx): log('Building sdist using %s ....' % executable) os.environ["PIPENV_PYTHON"] = py_version ctx.run('pipenv install --dev', env=env) - ctx.run('pipenv run pip install -e . --upgrade --upgrade-strategy=eager --no-use-pep517', env=env) + ctx.run('pipenv run pip install -e . --upgrade --upgrade-strategy=eager', env=env) log('Building wheel using python %s ....' % py_version) if py_version == '3.6': ctx.run('pipenv run python setup.py sdist bdist_wheel', env=env) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 0f20261483..e7c720b391 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -7,21 +7,23 @@ import sys import warnings -from shutil import rmtree as _rmtree +from shutil import copyfileobj, rmtree as _rmtree import pytest +import requests -from vistir.compat import ResourceWarning, fs_str, fs_encode, FileNotFoundError, PermissionError, TemporaryDirectory -from vistir.misc import run -from vistir.contextmanagers import temp_environ -from vistir.path import mkdir_p, create_tracked_tempdir, handle_remove_readonly +from pipenv.vendor.vistir.compat import ResourceWarning, fs_str, fs_encode, FileNotFoundError, PermissionError, TemporaryDirectory +from pipenv.vendor.vistir.misc import run +from pipenv.vendor.vistir.contextmanagers import temp_environ, open_file +from pipenv.vendor.vistir.path import mkdir_p, create_tracked_tempdir, handle_remove_readonly from pipenv._compat import Path from pipenv.cmdparse import Script from pipenv.exceptions import VirtualenvActivationException from pipenv.vendor import delegator, requests, toml, tomlkit from pytest_pypi.app import prepare_fixtures -from pytest_pypi.app import prepare_packages as prepare_pypi_packages +from pytest_shutil.workspace import Workspace +from _pytest_devpi_server import DevpiServer warnings.simplefilter("default", category=ResourceWarning) @@ -93,10 +95,21 @@ def check_for_mercurial(): TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi') WE_HAVE_HG = check_for_mercurial() -prepare_pypi_packages(PYPI_VENDOR_DIR) prepare_fixtures(os.path.join(PYPI_VENDOR_DIR, "fixtures")) +@pytest.fixture(scope="session") +def pipenv_devpi_server(): + with DevpiServer(offline=False) as server: + server.start() + server.api("index", "-c", "pipenv", "bases=root/pypi", "volatile=False") + server.index = "pipenv" + for path in Path(PYPI_VENDOR_DIR).iterdir(): + if path.is_dir(): + server.api("upload", "--from-dir", path.as_posix()) + yield server + + def pytest_runtest_setup(item): if item.get_closest_marker('needs_internet') is not None and not WE_HAVE_INTERNET: pytest.skip('requires internet') @@ -257,11 +270,12 @@ def get_url(cls, pkg=None, filename=None): file_path = os.path.join(pkg, filename) if filename and not pkg: pkg = os.path.basename(filename) - if pypi: + fixture_pypi = os.getenv("ARTIFACT_PYPI_URL") + if fixture_pypi: if pkg and not filename: - url = "{0}/artifacts/{1}".format(pypi, pkg) + url = "{0}/artifacts/{1}".format(fixture_pypi, pkg) else: - url = "{0}/artifacts/{1}/{2}".format(pypi, pkg, filename) + url = "{0}/artifacts/{1}/{2}".format(fixture_pypi, pkg, filename) return url if pkg and not filename: return cls.get_fixture_path(file_path).as_uri() @@ -273,7 +287,13 @@ def __init__( self, pypi=None, pipfile=True, chdir=False, path=None, home_dir=None, venv_root=None, ignore_virtualenvs=True, venv_in_project=True, name=None ): - self.pypi = pypi + self.index_url = os.getenv("PIPENV_TEST_INDEX") + self.pypi = None + if pypi: + self.pypi = pypi.url + elif self.index_url is not None: + self.pypi, _, _ = self.index_url.rpartition("/") if self.index_url else "" + self.index = os.getenv("PIPENV_PYPI_INDEX") os.environ["PYTHONWARNINGS"] = "ignore:DEPRECATION" if ignore_virtualenvs: os.environ["PIPENV_IGNORE_VIRTUALENVS"] = fs_str("1") @@ -312,8 +332,9 @@ def __init__( self.chdir = chdir if self.pypi: - os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi.url)) - os.environ['PIPENV_TEST_INDEX'] = fs_str('{0}/simple'.format(self.pypi.url)) + os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi)) + # os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi.url)) + # os.environ['PIPENV_TEST_INDEX'] = fs_str('{0}/simple'.format(self.pypi.url)) if pipfile: p_path = os.sep.join([self.path, 'Pipfile']) @@ -401,13 +422,28 @@ def _rmtree_func(path, ignore_errors=True, onerror=None): @pytest.fixture() -def PipenvInstance(monkeypatch): +def pip_src_dir(request, vistir_tmpdir): + old_src_dir = os.environ.get('PIP_SRC', '') + os.environ['PIP_SRC'] = vistir_tmpdir.as_posix() + + def finalize(): + os.environ['PIP_SRC'] = fs_str(old_src_dir) + + request.addfinalizer(finalize) + return request + + +@pytest.fixture() +def PipenvInstance(pip_src_dir, monkeypatch, pipenv_devpi_server, pypi): with temp_environ(), monkeypatch.context() as m: m.setattr(shutil, "rmtree", _rmtree_func) original_umask = os.umask(0o007) os.environ["PIPENV_NOSPIN"] = fs_str("1") os.environ["CI"] = fs_str("1") os.environ['PIPENV_DONT_USE_PYENV'] = fs_str('1') + os.environ["PIPENV_TEST_INDEX"] = "{0}/{1}/{2}/+simple".format(pipenv_devpi_server.uri, pipenv_devpi_server.user, pipenv_devpi_server.index) + os.environ["PIPENV_PYPI_INDEX"] = pipenv_devpi_server.index + os.environ["ARTIFACT_PYPI_URL"] = pypi.url warnings.simplefilter("ignore", category=ResourceWarning) warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") try: @@ -416,16 +452,20 @@ def PipenvInstance(monkeypatch): os.umask(original_umask) -@pytest.fixture(autouse=True) -def pip_src_dir(request, vistir_tmpdir): - old_src_dir = os.environ.get('PIP_SRC', '') - os.environ['PIP_SRC'] = vistir_tmpdir.as_posix() - - def finalize(): - os.environ['PIP_SRC'] = fs_str(old_src_dir) - - request.addfinalizer(finalize) - return request +@pytest.fixture() +def PipenvInstance_NoPyPI(monkeypatch, pip_src_dir): + with temp_environ(), monkeypatch.context() as m: + m.setattr(shutil, "rmtree", _rmtree_func) + original_umask = os.umask(0o007) + os.environ["PIPENV_NOSPIN"] = fs_str("1") + os.environ["CI"] = fs_str("1") + os.environ['PIPENV_DONT_USE_PYENV'] = fs_str('1') + warnings.simplefilter("ignore", category=ResourceWarning) + warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") + try: + yield _PipenvInstance + finally: + os.umask(original_umask) @pytest.fixture() diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index 6153658dad..a682d0dda2 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -14,8 +14,8 @@ @pytest.mark.cli -def test_pipenv_where(PipenvInstance, pypi_secure): - with PipenvInstance(pypi=pypi_secure) as p: +def test_pipenv_where(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("--where") assert c.ok assert normalize_drive(p.path) in c.out @@ -82,8 +82,8 @@ def test_pipenv_rm(PipenvInstance): @pytest.mark.cli -def test_pipenv_graph(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_pipenv_graph(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv('install requests') assert c.ok graph = p.pipenv("graph") @@ -98,8 +98,8 @@ def test_pipenv_graph(PipenvInstance, pypi): @pytest.mark.cli -def test_pipenv_graph_reverse(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_pipenv_graph_reverse(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv('install requests==2.18.4') assert c.ok c = p.pipenv('graph --reverse') @@ -128,8 +128,8 @@ def test_pipenv_graph_reverse(PipenvInstance, pypi): @pytest.mark.cli @pytest.mark.needs_internet(reason='required by check') @flaky -def test_pipenv_check(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_pipenv_check(PipenvInstance): + with PipenvInstance() as p: p.pipenv('install requests==1.0.0') c = p.pipenv('check') assert c.return_code != 0 @@ -197,8 +197,8 @@ def test_man(PipenvInstance): @pytest.mark.cli -def test_install_parse_error(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_install_parse_error(PipenvInstance): + with PipenvInstance() as p: # Make sure unparseable packages don't wind up in the pipfile # Escape $ for shell input @@ -219,21 +219,21 @@ def test_install_parse_error(PipenvInstance, pypi): @pytest.mark.unused @pytest.mark.skip_osx @pytest.mark.needs_internet(reason='required by check') -def test_check_unused(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_check_unused(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open('__init__.py', 'w') as f: contents = """ -import tablib +import fake_package import records import flask """.strip() f.write(contents) - p.pipenv('install requests tablib flask') + p.pipenv('install requests fake_package flask') - assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'tablib', 'flask']) + assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'fake_package', 'flask']) c = p.pipenv('check --unused .') - assert 'tablib' not in c.out + assert 'fake_package' not in c.out assert 'flask' not in c.out diff --git a/tests/integration/test_dot_venv.py b/tests/integration/test_dot_venv.py index 840560f910..aa52dd5eab 100644 --- a/tests/integration/test_dot_venv.py +++ b/tests/integration/test_dot_venv.py @@ -11,10 +11,10 @@ @pytest.mark.dotvenv -def test_venv_in_project(PipenvInstance, pypi): +def test_venv_in_project(PipenvInstance): with temp_environ(): os.environ['PIPENV_VENV_IN_PROJECT'] = '1' - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: c = p.pipenv('install requests') assert c.return_code == 0 assert normalize_drive(p.path) in p.pipenv('--venv').out @@ -36,8 +36,8 @@ def test_venv_at_project_root(PipenvInstance): @pytest.mark.dotvenv -def test_reuse_previous_venv(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_reuse_previous_venv(PipenvInstance): + with PipenvInstance(chdir=True) as p: os.mkdir('.venv') c = p.pipenv('install requests') assert c.return_code == 0 @@ -46,11 +46,11 @@ def test_reuse_previous_venv(PipenvInstance, pypi): @pytest.mark.dotvenv @pytest.mark.parametrize('venv_name', ('test-venv', os.path.join('foo', 'test-venv'))) -def test_venv_file(venv_name, PipenvInstance, pypi): +def test_venv_file(venv_name, PipenvInstance): """Tests virtualenv creation when a .venv file exists at the project root and contains a venv name. """ - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: file_path = os.path.join(p.path, '.venv') with open(file_path, 'w') as f: f.write(venv_name) @@ -79,11 +79,11 @@ def test_venv_file(venv_name, PipenvInstance, pypi): @pytest.mark.dotvenv -def test_venv_file_with_path(PipenvInstance, pypi): +def test_venv_file_with_path(PipenvInstance): """Tests virtualenv creation when a .venv file exists at the project root and contains an absolute path. """ - with temp_environ(), PipenvInstance(chdir=True, pypi=pypi) as p: + with temp_environ(), PipenvInstance(chdir=True) as p: with TemporaryDirectory( prefix='pipenv-', suffix='-test_venv' ) as venv_path: diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index ea6efb3ed2..4fb936b9dd 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -14,8 +14,8 @@ @pytest.mark.install @pytest.mark.setup -def test_basic_setup(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_basic_setup(PipenvInstance): + with PipenvInstance() as p: with PipenvInstance(pipfile=False) as p: c = p.pipenv("install requests") assert c.return_code == 0 @@ -31,8 +31,8 @@ def test_basic_setup(PipenvInstance, pypi): @flaky @pytest.mark.install @pytest.mark.skip_osx -def test_basic_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_basic_install(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("install requests") assert c.return_code == 0 assert "requests" in p.pipfile["packages"] @@ -45,8 +45,8 @@ def test_basic_install(PipenvInstance, pypi): @flaky @pytest.mark.install -def test_mirror_install(PipenvInstance, pypi): - with temp_environ(), PipenvInstance(chdir=True, pypi=pypi) as p: +def test_mirror_install(PipenvInstance): + with temp_environ(), PipenvInstance(chdir=True) as p: mirror_url = os.environ.pop( "PIPENV_TEST_INDEX", "https://pypi.python.org/simple" ) @@ -72,7 +72,7 @@ def test_mirror_install(PipenvInstance, pypi): @flaky @pytest.mark.install @pytest.mark.needs_internet -def test_bad_mirror_install(PipenvInstance, pypi): +def test_bad_mirror_install(PipenvInstance): with temp_environ(), PipenvInstance(chdir=True) as p: # This demonstrates that the mirror parameter is being used os.environ.pop("PIPENV_TEST_INDEX", None) @@ -83,8 +83,8 @@ def test_bad_mirror_install(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.complex @pytest.mark.skip(reason="Does not work unless you can explicitly install into py2") -def test_complex_lock(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_complex_lock(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("install apscheduler") assert c.return_code == 0 assert "apscheduler" in p.pipfile["packages"] @@ -95,8 +95,8 @@ def test_complex_lock(PipenvInstance, pypi): @flaky @pytest.mark.dev @pytest.mark.run -def test_basic_dev_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_basic_dev_install(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("install requests --dev") assert c.return_code == 0 assert "requests" in p.pipfile["dev-packages"] @@ -113,9 +113,9 @@ def test_basic_dev_install(PipenvInstance, pypi): @flaky @pytest.mark.dev @pytest.mark.install -def test_install_without_dev(PipenvInstance, pypi): +def test_install_without_dev(PipenvInstance): """Ensure that running `pipenv install` doesn't install dev packages""" - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -139,8 +139,8 @@ def test_install_without_dev(PipenvInstance, pypi): @flaky @pytest.mark.install -def test_install_without_dev_section(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_install_without_dev_section(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -160,8 +160,8 @@ def test_install_without_dev_section(PipenvInstance, pypi): @flaky @pytest.mark.extras @pytest.mark.install -def test_extras_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_extras_install(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install requests[socks]") assert c.return_code == 0 assert "requests" in p.pipfile["packages"] @@ -177,8 +177,8 @@ def test_extras_install(PipenvInstance, pypi): @flaky @pytest.mark.pin @pytest.mark.install -def test_windows_pinned_pipfile(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_windows_pinned_pipfile(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -195,8 +195,8 @@ def test_windows_pinned_pipfile(PipenvInstance, pypi): @pytest.mark.install @pytest.mark.resolver @pytest.mark.backup_resolver -def test_backup_resolver(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_backup_resolver(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -212,8 +212,8 @@ def test_backup_resolver(PipenvInstance, pypi): @flaky @pytest.mark.run @pytest.mark.alt -def test_alternative_version_specifier(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_alternative_version_specifier(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -237,8 +237,8 @@ def test_alternative_version_specifier(PipenvInstance, pypi): @flaky @pytest.mark.run @pytest.mark.alt -def test_outline_table_specifier(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_outline_table_specifier(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ [packages.requests] @@ -261,8 +261,8 @@ def test_outline_table_specifier(PipenvInstance, pypi): @pytest.mark.bad @pytest.mark.install -def test_bad_packages(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_bad_packages(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("install NotAPackage") assert c.return_code > 0 @@ -271,9 +271,9 @@ def test_bad_packages(PipenvInstance, pypi): @pytest.mark.install @pytest.mark.requirements @pytest.mark.skip(reason="Not mocking this.") -def test_requirements_to_pipfile(PipenvInstance, pypi): +def test_requirements_to_pipfile(PipenvInstance): - with PipenvInstance(pipfile=False, chdir=True, pypi=pypi) as p: + with PipenvInstance(pipfile=False, chdir=True) as p: # Write a requirements file with open("requirements.txt", "w") as f: @@ -300,13 +300,13 @@ def test_requirements_to_pipfile(PipenvInstance, pypi): @pytest.mark.install @pytest.mark.skip_osx @pytest.mark.requirements -def test_skip_requirements_when_pipfile(PipenvInstance, pypi): +def test_skip_requirements_when_pipfile(PipenvInstance): """Ensure requirements.txt is NOT imported when 1. We do `pipenv install [package]` 2. A Pipfile already exists when we run `pipenv install`. """ - with PipenvInstance(chdir=True, pypi=pypi) as p: + with PipenvInstance(chdir=True) as p: with open("requirements.txt", "w") as f: f.write("requests==2.18.1\n") c = p.pipenv("install six") @@ -315,13 +315,13 @@ def test_skip_requirements_when_pipfile(PipenvInstance, pypi): contents = """ [packages] six = "*" -tablib = "<0.12" +"fake_package" = "<0.12" """.strip() f.write(contents) c = p.pipenv("install") assert c.ok - assert "tablib" in p.pipfile["packages"] - assert "tablib" in p.lockfile["default"] + assert "fake_package" in p.pipfile["packages"] + assert "fake_package" in p.lockfile["default"] assert "six" in p.pipfile["packages"] assert "six" in p.lockfile["default"] assert "requests" not in p.pipfile["packages"] @@ -330,24 +330,25 @@ def test_skip_requirements_when_pipfile(PipenvInstance, pypi): @pytest.mark.cli @pytest.mark.clean -def test_clean_on_empty_venv(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_clean_on_empty_venv(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("clean") assert c.return_code == 0 @pytest.mark.install -def test_install_does_not_extrapolate_environ(PipenvInstance, pypi): +def test_install_does_not_extrapolate_environ(PipenvInstance): """Ensure environment variables are not expanded in lock file. """ - with temp_environ(), PipenvInstance(pypi=pypi, chdir=True) as p: - os.environ["PYPI_URL"] = pypi.url + with temp_environ(), PipenvInstance(chdir=True) as p: + # os.environ["PYPI_URL"] = pypi.url + os.environ["PYPI_URL"] = p.pypi with open(p.pipfile_path, "w") as f: f.write( """ [[source]] -url = '${PYPI_URL}/simple' +url = '${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple' verify_ssl = true name = 'mockpi' """ @@ -356,14 +357,14 @@ def test_install_does_not_extrapolate_environ(PipenvInstance, pypi): # Ensure simple install does not extrapolate. c = p.pipenv("install") assert c.return_code == 0 - assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/simple" - assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/simple" + assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" + assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" # Ensure package install does not extrapolate. c = p.pipenv("install six") assert c.return_code == 0 - assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/simple" - assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/simple" + assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" + assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" @pytest.mark.editable @@ -378,10 +379,10 @@ def test_editable_no_args(PipenvInstance): @pytest.mark.install @pytest.mark.virtualenv -def test_install_venv_project_directory(PipenvInstance, pypi): +def test_install_venv_project_directory(PipenvInstance): """Test the project functionality during virtualenv creation. """ - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: with temp_environ(), TemporaryDirectory( prefix="pipenv-", suffix="temp_workon_home" ) as workon_home: @@ -402,8 +403,8 @@ def test_install_venv_project_directory(PipenvInstance, pypi): @pytest.mark.deploy @pytest.mark.system -def test_system_and_deploy_work(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_system_and_deploy_work(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install six requests") assert c.return_code == 0 c = p.pipenv("--rm") @@ -438,24 +439,24 @@ def test_install_creates_pipfile(PipenvInstance): @pytest.mark.install -def test_install_non_exist_dep(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_install_non_exist_dep(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install dateutil") assert not c.ok assert "dateutil" not in p.pipfile["packages"] @pytest.mark.install -def test_install_package_with_dots(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_install_package_with_dots(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install backports.html") assert c.ok assert "backports.html" in p.pipfile["packages"] @pytest.mark.install -def test_rewrite_outline_table(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_rewrite_outline_table(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 87770ed778..379731a417 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -12,33 +12,34 @@ from pipenv.utils import temp_environ -@pytest.mark.markers @flaky -def test_package_environment_markers(PipenvInstance, pypi): +@pytest.mark.markers +def test_package_environment_markers(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] -tablib = {version = "*", markers="os_name=='splashwear'"} +fake_package = {version = "*", markers="os_name=='splashwear'"} """.strip() f.write(contents) c = p.pipenv('install') assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['tablib'], p.lockfile["default"]["tablib"] + assert 'markers' in p.lockfile['default']['fake_package'], p.lockfile["default"]["fake_package"] - c = p.pipenv('run python -c "import tablib;"') + c = p.pipenv('run python -c "import fake_package;"') assert c.return_code == 1 -@pytest.mark.markers + @flaky -def test_platform_python_implementation_marker(PipenvInstance, pypi): +@pytest.mark.markers +def test_platform_python_implementation_marker(PipenvInstance): """Markers should be converted during locking to help users who input this incorrectly. """ - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -57,17 +58,17 @@ def test_platform_python_implementation_marker(PipenvInstance, pypi): "platform_python_implementation == 'CPython'" +@flaky @pytest.mark.run @pytest.mark.alt @pytest.mark.install -@flaky -def test_specific_package_environment_markers(PipenvInstance, pypi): +def test_specific_package_environment_markers(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] -tablib = {version = "*", os_name = "== 'splashwear'"} +fake_package = {version = "*", os_name = "== 'splashwear'"} """.strip() f.write(contents) @@ -75,18 +76,18 @@ def test_specific_package_environment_markers(PipenvInstance, pypi): assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['tablib'] + assert 'markers' in p.lockfile['default']['fake_package'] - c = p.pipenv('run python -c "import tablib;"') + c = p.pipenv('run python -c "import fake_package;"') assert c.return_code == 1 -@pytest.mark.markers @flaky -def test_top_level_overrides_environment_markers(PipenvInstance, pypi): +@pytest.mark.markers +def test_top_level_overrides_environment_markers(PipenvInstance): """Top-level environment markers should take precedence. """ - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -101,17 +102,17 @@ def test_top_level_overrides_environment_markers(PipenvInstance, pypi): assert p.lockfile['default']['funcsigs']['markers'] == "os_name == 'splashwear'", p.lockfile['default']['funcsigs'] +@flaky @pytest.mark.markers @pytest.mark.install -@flaky -def test_global_overrides_environment_markers(PipenvInstance, pypi): +def test_global_overrides_environment_markers(PipenvInstance): """Empty (unconditional) dependency should take precedence. If a dependency is specified without environment markers, it should override dependencies with environment markers. In this example, APScheduler requires funcsigs only on Python 2, but since funcsigs is also specified as an unconditional dep, its markers should be empty. """ - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -126,12 +127,12 @@ def test_global_overrides_environment_markers(PipenvInstance, pypi): assert p.lockfile['default']['funcsigs'].get('markers', '') == '' +@flaky @pytest.mark.lock @pytest.mark.complex @pytest.mark.py3_only @pytest.mark.lte_py36 -@flaky -def test_resolver_unique_markers(PipenvInstance, pypi): +def test_resolver_unique_markers(PipenvInstance): """vcrpy has a dependency on `yarl` which comes with a marker of 'python version in "3.4, 3.5, 3.6" - this marker duplicates itself: @@ -139,7 +140,7 @@ def test_resolver_unique_markers(PipenvInstance, pypi): This verifies that we clean that successfully. """ - with PipenvInstance(chdir=True, pypi=pypi) as p: + with PipenvInstance(chdir=True) as p: c = p.pipenv('install vcrpy==2.0.1') assert c.return_code == 0 c = p.pipenv('lock') @@ -151,10 +152,10 @@ def test_resolver_unique_markers(PipenvInstance, pypi): assert yarl['markers'] in ["python_version in '3.4, 3.5, 3.6'", "python_version >= '3.4'"] -@pytest.mark.project @flaky -def test_environment_variable_value_does_not_change_hash(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +@pytest.mark.project +def test_environment_variable_value_does_not_change_hash(PipenvInstance): + with PipenvInstance(chdir=True) as p: with temp_environ(): with open(p.pipfile_path, 'w') as f: f.write(""" diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py index a231c17d5c..44973df5a3 100644 --- a/tests/integration/test_install_twists.py +++ b/tests/integration/test_install_twists.py @@ -17,10 +17,10 @@ @pytest.mark.extras @pytest.mark.install @pytest.mark.local -def test_local_extras_install(PipenvInstance, pypi): +def test_local_extras_install(PipenvInstance): """Ensure -e .[extras] installs. """ - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: setup_py = os.path.join(p.path, "setup.py") with open(setup_py, "w") as fh: contents = """ @@ -102,10 +102,10 @@ def helper_dependency_links_install_test(pipenv_instance, deplink): assert "version" in pipenv_instance.lockfile["default"]["test-private-dependency"] assert "0.1" in pipenv_instance.lockfile["default"]["test-private-dependency"]["version"] - def test_https_dependency_links_install(self, PipenvInstance, pypi): + def test_https_dependency_links_install(self, PipenvInstance): """Ensure dependency_links are parsed and installed (needed for private repo dependencies). """ - with temp_environ(), PipenvInstance(pypi=pypi, chdir=True) as p: + with temp_environ(), PipenvInstance(chdir=True) as p: os.environ["PIP_NO_BUILD_ISOLATION"] = '1' TestDirectDependencies.helper_dependency_links_install_test( p, @@ -113,8 +113,8 @@ def test_https_dependency_links_install(self, PipenvInstance, pypi): ) @pytest.mark.needs_github_ssh - def test_ssh_dependency_links_install(self, PipenvInstance, pypi): - with temp_environ(), PipenvInstance(pypi=pypi, chdir=True) as p: + def test_ssh_dependency_links_install(self, PipenvInstance): + with temp_environ(), PipenvInstance(chdir=True) as p: os.environ['PIP_PROCESS_DEPENDENCY_LINKS'] = '1' os.environ["PIP_NO_BUILD_ISOLATION"] = '1' TestDirectDependencies.helper_dependency_links_install_test( @@ -140,11 +140,11 @@ def test_e_dot(PipenvInstance, pip_src_dir): @pytest.mark.install @flaky -def test_multiprocess_bug_and_install(PipenvInstance, pypi): +def test_multiprocess_bug_and_install(PipenvInstance): with temp_environ(): os.environ["PIPENV_MAX_SUBPROCESS"] = "2" - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -168,9 +168,9 @@ def test_multiprocess_bug_and_install(PipenvInstance, pypi): @pytest.mark.sequential @pytest.mark.install @flaky -def test_sequential_mode(PipenvInstance, pypi): +def test_sequential_mode(PipenvInstance): - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, "w") as f: contents = """ [packages] @@ -193,8 +193,8 @@ def test_sequential_mode(PipenvInstance, pypi): @pytest.mark.install @pytest.mark.run -def test_normalize_name_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_normalize_name_install(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ # Pre comment @@ -222,18 +222,18 @@ def test_normalize_name_install(PipenvInstance, pypi): assert "# Inline comment" in contents +@flaky @pytest.mark.files @pytest.mark.resolver @pytest.mark.eggs -@flaky -def test_local_package(PipenvInstance, pip_src_dir, pypi, testsroot): +def test_local_package(PipenvInstance, pip_src_dir, testsroot): """This test ensures that local packages (directories with a setup.py) installed in editable mode have their dependencies resolved as well""" file_name = "requests-2.19.1.tar.gz" package = "requests-2.19.1" # Not sure where travis/appveyor run tests from source_path = os.path.abspath(os.path.join(testsroot, "test_artifacts", file_name)) - with PipenvInstance(chdir=True, pypi=pypi) as p: + with PipenvInstance(chdir=True) as p: # This tests for a bug when installing a zipfile in the current dir copy_to = os.path.join(p.path, file_name) shutil.copy(source_path, copy_to) @@ -251,12 +251,12 @@ def test_local_package(PipenvInstance, pip_src_dir, pypi, testsroot): @pytest.mark.files @flaky -def test_local_zipfiles(PipenvInstance, pypi, testsroot): +def test_local_zipfiles(PipenvInstance, testsroot): file_name = "requests-2.19.1.tar.gz" # Not sure where travis/appveyor run tests from source_path = os.path.abspath(os.path.join(testsroot, "test_artifacts", file_name)) - with PipenvInstance(chdir=True, pypi=pypi) as p: + with PipenvInstance(chdir=True) as p: # This tests for a bug when installing a zipfile in the current dir shutil.copy(source_path, os.path.join(p.path, file_name)) @@ -276,11 +276,11 @@ def test_local_zipfiles(PipenvInstance, pypi, testsroot): @pytest.mark.files @flaky -def test_relative_paths(PipenvInstance, pypi, testsroot): +def test_relative_paths(PipenvInstance, testsroot): file_name = "requests-2.19.1.tar.gz" source_path = os.path.abspath(os.path.join(testsroot, "test_artifacts", file_name)) - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: artifact_dir = "artifacts" artifact_path = os.path.join(p.path, artifact_dir) mkdir_p(artifact_path) @@ -299,8 +299,8 @@ def test_relative_paths(PipenvInstance, pypi, testsroot): @pytest.mark.install @pytest.mark.local_file @flaky -def test_install_local_file_collision(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_install_local_file_collision(PipenvInstance): + with PipenvInstance() as p: target_package = "alembic" fake_file = os.path.join(p.path, target_package) with open(fake_file, "w") as f: @@ -339,7 +339,7 @@ def test_install_local_uri_special_character(PipenvInstance, testsroot): @pytest.mark.files @pytest.mark.install @pytest.mark.run -def test_multiple_editable_packages_should_not_race(PipenvInstance, pypi, testsroot): +def test_multiple_editable_packages_should_not_race(PipenvInstance, testsroot): """Test for a race condition that can occur when installing multiple 'editable' packages at once, and which causes some of them to not be importable. @@ -356,7 +356,7 @@ def test_multiple_editable_packages_should_not_race(PipenvInstance, pypi, testsr [packages] """ - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: for pkg_name in pkgs: source_path = p._pipfile.get_fixture_path("git/{0}/".format(pkg_name)).as_posix() c = delegator.run("git clone {0} ./{1}".format(source_path, pkg_name)) diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 3edc1e1154..58df07bd02 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -13,8 +13,8 @@ @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_basic_vcs_install(PipenvInstance, pip_src_dir, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_basic_vcs_install(PipenvInstance): # ! This is failing + with PipenvInstance(chdir=True) as p: c = p.pipenv("install git+https://github.com/benjaminp/six.git@1.11.0#egg=six") assert c.return_code == 0 # edge case where normal package starts with VCS name shouldn't be flagged as vcs @@ -34,8 +34,8 @@ def test_basic_vcs_install(PipenvInstance, pip_src_dir, pypi): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_git_vcs_install(PipenvInstance, pip_src_dir, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_git_vcs_install(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install git+git://github.com/benjaminp/six.git@1.11.0#egg=six") assert c.return_code == 0 assert "six" in p.pipfile["packages"] @@ -52,8 +52,8 @@ def test_git_vcs_install(PipenvInstance, pip_src_dir, pypi): @pytest.mark.install @pytest.mark.needs_internet @pytest.mark.needs_github_ssh -def test_ssh_vcs_install(PipenvInstance, pip_src_dir, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_ssh_vcs_install(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install git+ssh://git@github.com/benjaminp/six.git@1.11.0#egg=six") assert c.return_code == 0 assert "six" in p.pipfile["packages"] @@ -69,8 +69,8 @@ def test_ssh_vcs_install(PipenvInstance, pip_src_dir, pypi): @pytest.mark.urls @pytest.mark.files @pytest.mark.needs_internet -def test_urls_work(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_urls_work(PipenvInstance): + with PipenvInstance(chdir=True) as p: # the library this installs is "django-cms" path = p._pipfile.get_url("django", "3.4.x.zip") c = p.pipenv( @@ -107,10 +107,10 @@ def test_file_urls_work(PipenvInstance, pip_src_dir): @pytest.mark.urls @pytest.mark.files @pytest.mark.needs_internet -def test_local_vcs_urls_work(PipenvInstance, pypi, tmpdir): +def test_local_vcs_urls_work(PipenvInstance, tmpdir): six_dir = tmpdir.join("six") six_path = Path(six_dir.strpath) - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: c = delegator.run( "git clone https://github.com/benjaminp/six.git {0}".format(six_dir.strpath) ) @@ -125,10 +125,10 @@ def test_local_vcs_urls_work(PipenvInstance, pypi, tmpdir): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_editable_vcs_install(PipenvInstance, pip_src_dir, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_editable_vcs_install(PipenvInstance_NoPyPI): # ! This is failing + with PipenvInstance_NoPyPI(chdir=True) as p: c = p.pipenv( - "install -e git+https://github.com/requests/requests.git#egg=requests" + "install -e git+https://github.com/kennethreitz/requests.git#egg=requests --verbose" ) assert c.return_code == 0 assert "requests" in p.pipfile["packages"] @@ -145,12 +145,12 @@ def test_editable_vcs_install(PipenvInstance, pip_src_dir, pypi): @pytest.mark.tablib @pytest.mark.install @pytest.mark.needs_internet -def test_install_editable_git_tag(PipenvInstance, pypi): +def test_install_editable_git_tag(PipenvInstance_NoPyPI): # ! This is failing # This uses the real PyPI since we need Internet to access the Git # dependency anyway. - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance_NoPyPI(chdir=True) as p: c = p.pipenv( - "install -e git+https://github.com/benjaminp/six.git@1.11.0#egg=six" + "install -e git+https://github.com/benjaminp/six.git@1.11.0#egg=six --verbose" ) assert c.return_code == 0 assert "six" in p.pipfile["packages"] @@ -193,7 +193,7 @@ def test_install_named_index_alias(PipenvInstance): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_install_local_vcs_not_in_lockfile(PipenvInstance, pip_src_dir): +def test_install_local_vcs_not_in_lockfile(PipenvInstance): with PipenvInstance(chdir=True) as p: # six_path = os.path.join(p.path, "six") six_path = p._pipfile.get_fixture_path("git/six/").as_posix() @@ -209,10 +209,10 @@ def test_install_local_vcs_not_in_lockfile(PipenvInstance, pip_src_dir): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_get_vcs_refs(PipenvInstance): - with PipenvInstance(chdir=True) as p: +def test_get_vcs_refs(PipenvInstance_NoPyPI): # ! this is failing + with PipenvInstance_NoPyPI(chdir=True) as p: c = p.pipenv( - "install -e git+https://github.com/benjaminp/six.git@1.9.0#egg=six" + "install -e git+https://github.com/benjaminp/six.git@1.9.0#egg=six --verbose" ) assert c.return_code == 0 assert "six" in p.pipfile["packages"] @@ -238,7 +238,7 @@ def test_get_vcs_refs(PipenvInstance): @pytest.mark.install @pytest.mark.needs_internet @pytest.mark.skip_py27_win -def test_vcs_entry_supersedes_non_vcs(PipenvInstance, pip_src_dir): +def test_vcs_entry_supersedes_non_vcs(PipenvInstance): """See issue #2181 -- non-editable VCS dep was specified, but not showing up in the lockfile -- due to not running pip install before locking and not locking the resolution graph of non-editable vcs dependencies. @@ -275,8 +275,8 @@ def test_vcs_entry_supersedes_non_vcs(PipenvInstance, pip_src_dir): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_vcs_can_use_markers(PipenvInstance, pip_src_dir, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_vcs_can_use_markers(PipenvInstance): + with PipenvInstance(chdir=True) as p: path = p._pipfile.get_fixture_path("git/six/.git") p._pipfile.install("six", {"git": "{0}".format(path.as_uri()), "markers": "sys_platform == 'linux'"}) assert "six" in p.pipfile["packages"] diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index f9b208263e..44f68bfe05 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -14,7 +14,7 @@ @pytest.mark.lock @pytest.mark.requirements -def test_lock_handle_eggs(PipenvInstance, pypi): +def test_lock_handle_eggs(PipenvInstance): """Ensure locking works with packages provoding egg formats. """ with PipenvInstance() as p: @@ -31,9 +31,9 @@ def test_lock_handle_eggs(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.requirements -def test_lock_requirements_file(PipenvInstance, pypi): +def test_lock_requirements_file(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -61,9 +61,9 @@ def test_lock_requirements_file(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.keep_outdated -def test_lock_keep_outdated(PipenvInstance, pypi): +def test_lock_keep_outdated(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -99,8 +99,8 @@ def test_lock_keep_outdated(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.keep_outdated -def test_keep_outdated_doesnt_remove_lockfile_entries(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_keep_outdated_doesnt_remove_lockfile_entries(PipenvInstance): + with PipenvInstance(chdir=True) as p: p._pipfile.add("requests", "==2.18.4") p._pipfile.add("colorama", {"version": "*", "markers": "os_name=='FakeOS'"}) p.pipenv("install") @@ -112,8 +112,8 @@ def test_keep_outdated_doesnt_remove_lockfile_entries(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.keep_outdated -def test_keep_outdated_doesnt_upgrade_pipfile_pins(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_keep_outdated_doesnt_upgrade_pipfile_pins(PipenvInstance): + with PipenvInstance(chdir=True) as p: p._pipfile.add("urllib3", "==1.21.1") c = p.pipenv("install") assert c.ok @@ -126,8 +126,8 @@ def test_keep_outdated_doesnt_upgrade_pipfile_pins(PipenvInstance, pypi): assert p.lockfile["default"]["urllib3"]["version"] == "==1.21.1" -def test_keep_outdated_keeps_markers_not_removed(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_keep_outdated_keeps_markers_not_removed(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv("install six click") assert c.ok lockfile = Path(p.lockfile_path) @@ -143,8 +143,8 @@ def test_keep_outdated_keeps_markers_not_removed(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.keep_outdated -def test_keep_outdated_doesnt_update_satisfied_constraints(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_keep_outdated_doesnt_update_satisfied_constraints(PipenvInstance): + with PipenvInstance(chdir=True) as p: p._pipfile.add("requests", "==2.18.4") c = p.pipenv("install") assert c.ok @@ -174,7 +174,7 @@ def test_complex_lock_with_vcs_deps(PipenvInstance, pip_src_dir): click = "==6.7" [dev-packages] -requests = {git = "https://github.com/requests/requests.git"} +requests = {git = "https://github.com/kennethreitz/requests.git"} """.strip() f.write(contents) @@ -198,9 +198,9 @@ def test_complex_lock_with_vcs_deps(PipenvInstance, pip_src_dir): @pytest.mark.lock @pytest.mark.requirements -def test_lock_with_prereleases(PipenvInstance, pypi): +def test_lock_with_prereleases(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -221,9 +221,9 @@ def test_lock_with_prereleases(PipenvInstance, pypi): @pytest.mark.complex @pytest.mark.needs_internet @flaky -def test_complex_deps_lock_and_install_properly(PipenvInstance, pip_src_dir, pypi): +def test_complex_deps_lock_and_install_properly(PipenvInstance, pip_src_dir): # This uses the real PyPI because Maya has too many dependencies... - with PipenvInstance(chdir=True, pypi=pypi) as p: + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -240,8 +240,8 @@ def test_complex_deps_lock_and_install_properly(PipenvInstance, pip_src_dir, pyp @pytest.mark.lock @pytest.mark.extras -def test_lock_extras_without_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_lock_extras_without_install(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -265,11 +265,11 @@ def test_lock_extras_without_install(PipenvInstance, pypi): @pytest.mark.complex @pytest.mark.needs_internet @pytest.mark.skip(reason='Needs numpy to be mocked') -def test_complex_lock_deep_extras(PipenvInstance, pypi): +def test_complex_lock_deep_extras(PipenvInstance): # records[pandas] requires tablib[pandas] which requires pandas. # This uses the real PyPI; Pandas has too many requirements to mock. - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -387,21 +387,22 @@ def test_private_index_mirror_lock_requirements(PipenvInstance): @pytest.mark.index @pytest.mark.install -def test_lock_updated_source(PipenvInstance, pypi): +def test_lock_updated_source(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] -url = "{url}/${{MY_ENV_VAR}}" +url = "{url}/${{MY_ENV_VAR}}/+simple" [packages] requests = "==2.14.0" - """.strip().format(url=pypi.url) + """.strip().format(url=p.pypi) + # """.strip().format(url=pypi.url) f.write(contents) with temp_environ(): - os.environ['MY_ENV_VAR'] = 'simple' + os.environ['MY_ENV_VAR'] = p.index c = p.pipenv('lock') assert c.return_code == 0 assert 'requests' in p.lockfile['default'] @@ -409,11 +410,12 @@ def test_lock_updated_source(PipenvInstance, pypi): with open(p.pipfile_path, 'w') as f: contents = """ [[source]] -url = "{url}/simple" +url = "{url}" [packages] requests = "==2.14.0" - """.strip().format(url=pypi.url) + """.strip().format(url=p.index_url) + # """.strip().format(url=pypi.url) f.write(contents) c = p.pipenv('lock') @@ -424,12 +426,12 @@ def test_lock_updated_source(PipenvInstance, pypi): @pytest.mark.vcs @pytest.mark.lock @pytest.mark.needs_internet -def test_lock_editable_vcs_without_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_editable_vcs_without_install(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] -requests = {git = "https://github.com/requests/requests.git", ref = "master", editable = true} +requests = {git = "https://github.com/kennethreitz/requests.git", ref = "master", editable = true} """.strip()) c = p.pipenv('lock') assert c.return_code == 0 @@ -443,16 +445,16 @@ def test_lock_editable_vcs_without_install(PipenvInstance, pypi): @pytest.mark.vcs @pytest.mark.lock @pytest.mark.needs_internet -def test_lock_editable_vcs_with_ref_in_git(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_editable_vcs_with_ref_in_git(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] -requests = {git = "https://github.com/requests/requests.git@883caaf", editable = true} +requests = {git = "https://github.com/kennethreitz/requests.git@883caaf", editable = true} """.strip()) c = p.pipenv('lock') assert c.return_code == 0 - assert p.lockfile['default']['requests']['git'] == 'https://github.com/requests/requests.git' + assert p.lockfile['default']['requests']['git'] == 'https://github.com/kennethreitz/requests.git' assert p.lockfile['default']['requests']['ref'] == '883caaf145fbe93bd0d208a6b864de9146087312' c = p.pipenv('install') assert c.return_code == 0 @@ -461,16 +463,16 @@ def test_lock_editable_vcs_with_ref_in_git(PipenvInstance, pypi): @pytest.mark.vcs @pytest.mark.lock @pytest.mark.needs_internet -def test_lock_editable_vcs_with_ref(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_editable_vcs_with_ref(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] -requests = {git = "https://github.com/requests/requests.git", ref = "883caaf", editable = true} +requests = {git = "https://github.com/kennethreitz/requests.git", ref = "883caaf", editable = true} """.strip()) c = p.pipenv('lock') assert c.return_code == 0 - assert p.lockfile['default']['requests']['git'] == 'https://github.com/requests/requests.git' + assert p.lockfile['default']['requests']['git'] == 'https://github.com/kennethreitz/requests.git' assert p.lockfile['default']['requests']['ref'] == '883caaf145fbe93bd0d208a6b864de9146087312' c = p.pipenv('install') assert c.return_code == 0 @@ -480,12 +482,12 @@ def test_lock_editable_vcs_with_ref(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.extras @pytest.mark.needs_internet -def test_lock_editable_vcs_with_extras_without_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_editable_vcs_with_extras_without_install(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] -requests = {git = "https://github.com/requests/requests.git", editable = true, extras = ["socks"]} +requests = {git = "https://github.com/kennethreitz/requests.git", editable = true, extras = ["socks"]} """.strip()) c = p.pipenv('lock') assert c.return_code == 0 @@ -500,12 +502,12 @@ def test_lock_editable_vcs_with_extras_without_install(PipenvInstance, pypi): @pytest.mark.vcs @pytest.mark.lock @pytest.mark.needs_internet -def test_lock_editable_vcs_with_markers_without_install(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_editable_vcs_with_markers_without_install(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] -requests = {git = "https://github.com/requests/requests.git", ref = "master", editable = true, markers = "python_version >= '2.6'"} +requests = {git = "https://github.com/kennethreitz/requests.git", ref = "master", editable = true, markers = "python_version >= '2.6'"} """.strip()) c = p.pipenv('lock') assert c.return_code == 0 @@ -518,8 +520,8 @@ def test_lock_editable_vcs_with_markers_without_install(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.skip(reason="This doesn't work for some reason.") -def test_lock_respecting_python_version(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_respecting_python_version(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] @@ -562,8 +564,8 @@ def test_lockfile_with_empty_dict(PipenvInstance): @pytest.mark.lock @pytest.mark.install @pytest.mark.skip_lock -def test_lock_with_incomplete_source(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_with_incomplete_source(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [[source]] @@ -581,8 +583,8 @@ def test_lock_with_incomplete_source(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.install -def test_lock_no_warnings(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_no_warnings(PipenvInstance): + with PipenvInstance(chdir=True) as p: os.environ["PYTHONWARNINGS"] = str("once") c = p.pipenv("install six") assert c.return_code == 0 @@ -596,7 +598,7 @@ def test_lock_no_warnings(PipenvInstance, pypi): @pytest.mark.lock @pytest.mark.install @pytest.mark.skipif(sys.version_info >= (3, 5), reason="scandir doesn't get installed on python 3.5+") -def test_lock_missing_cache_entries_gets_all_hashes(PipenvInstance, pypi, tmpdir): +def test_lock_missing_cache_entries_gets_all_hashes(PipenvInstance, tmpdir): """ Test locking pathlib2 on python2.7 which needs `scandir`, but fails to resolve when using a fresh dependency cache. @@ -604,7 +606,7 @@ def test_lock_missing_cache_entries_gets_all_hashes(PipenvInstance, pypi, tmpdir with temp_environ(): os.environ["PIPENV_CACHE_DIR"] = str(tmpdir.strpath) - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: p._pipfile.add("pathlib2", "*") assert "pathlib2" in p.pipfile["packages"] c = p.pipenv("install") @@ -619,10 +621,10 @@ def test_lock_missing_cache_entries_gets_all_hashes(PipenvInstance, pypi, tmpdir @pytest.mark.vcs @pytest.mark.lock -def test_vcs_lock_respects_top_level_pins(PipenvInstance, pypi): +def test_vcs_lock_respects_top_level_pins(PipenvInstance): """Test that locking VCS dependencies respects top level packages pinned in Pipfiles""" - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() p._pipfile.add("requests", { "editable": True, "git": "{0}".format(requests_uri), @@ -638,8 +640,8 @@ def test_vcs_lock_respects_top_level_pins(PipenvInstance, pypi): @pytest.mark.lock -def test_lock_after_update_source_name(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_lock_after_update_source_name(PipenvInstance): + with PipenvInstance(chdir=True) as p: contents = """ [[source]] url = "https://test.pypi.org/simple" diff --git a/tests/integration/test_pipenv.py b/tests/integration/test_pipenv.py index 23ebfc4c5f..b050a72eba 100644 --- a/tests/integration/test_pipenv.py +++ b/tests/integration/test_pipenv.py @@ -28,9 +28,9 @@ def test_code_import_manual(PipenvInstance): @pytest.mark.lock @pytest.mark.deploy @pytest.mark.cli -def test_deploy_works(PipenvInstance, pypi): +def test_deploy_works(PipenvInstance): - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -61,9 +61,9 @@ def test_deploy_works(PipenvInstance, pypi): @pytest.mark.update @pytest.mark.lock -def test_update_locks(PipenvInstance, pypi): +def test_update_locks(PipenvInstance): - with PipenvInstance(pypi=pypi) as p: + with PipenvInstance() as p: c = p.pipenv('install requests==2.14.0') assert c.return_code == 0 with open(p.pipfile_path, 'r') as fh: @@ -82,8 +82,8 @@ def test_update_locks(PipenvInstance, pypi): @pytest.mark.project @pytest.mark.proper_names -def test_proper_names_unamanged_virtualenv(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi): +def test_proper_names_unamanged_virtualenv(PipenvInstance): + with PipenvInstance(chdir=True): c = delegator.run('python -m virtualenv .venv') assert c.return_code == 0 project = Project() diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index 436f11c135..efdc03b698 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -39,8 +39,8 @@ def test_pipfile_envvar_expansion(PipenvInstance): @pytest.mark.project @pytest.mark.sources @pytest.mark.parametrize('lock_first', [True, False]) -def test_get_source(PipenvInstance, pypi, lock_first): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_get_source(PipenvInstance, lock_first): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] @@ -86,8 +86,8 @@ def test_get_source(PipenvInstance, pypi, lock_first): @pytest.mark.install @pytest.mark.project @pytest.mark.parametrize('newlines', [u'\n', u'\r\n']) -def test_maintain_file_line_endings(PipenvInstance, pypi, newlines): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_maintain_file_line_endings(PipenvInstance, newlines): + with PipenvInstance(chdir=True) as p: # Initial pipfile + lockfile generation c = p.pipenv('install pytz') assert c.return_code == 0 @@ -122,8 +122,8 @@ def test_maintain_file_line_endings(PipenvInstance, pypi, newlines): @pytest.mark.project @pytest.mark.sources -def test_many_indexes(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_many_indexes(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] @@ -154,11 +154,11 @@ def test_many_indexes(PipenvInstance, pypi): @pytest.mark.install @pytest.mark.project -def test_include_editable_packages(PipenvInstance, pypi, testsroot, pathlib_tmpdir): +def test_include_editable_packages(PipenvInstance, testsroot, pathlib_tmpdir): file_name = "requests-2.19.1.tar.gz" package = pathlib_tmpdir.joinpath("requests-2.19.1") source_path = os.path.abspath(os.path.join(testsroot, "test_artifacts", file_name)) - with PipenvInstance(chdir=True, pypi=pypi) as p: + with PipenvInstance(chdir=True) as p: with tarfile.open(source_path, "r:gz") as tarinfo: tarinfo.extractall(path=str(pathlib_tmpdir)) c = p.pipenv('install -e {}'.format(package)) @@ -172,8 +172,8 @@ def test_include_editable_packages(PipenvInstance, pypi, testsroot, pathlib_tmpd @pytest.mark.project @pytest.mark.virtualenv -def test_run_in_virtualenv_with_global_context(PipenvInstance, pypi, virtualenv): - with PipenvInstance(chdir=True, pypi=pypi, venv_root=virtualenv.as_posix(), ignore_virtualenvs=False, venv_in_project=False) as p: +def test_run_in_virtualenv_with_global_context(PipenvInstance, virtualenv): + with PipenvInstance(chdir=True, venv_root=virtualenv.as_posix(), ignore_virtualenvs=False, venv_in_project=False) as p: c = delegator_run( "pipenv run pip freeze", cwd=os.path.abspath(p.path), env=os.environ.copy() @@ -210,8 +210,8 @@ def test_run_in_virtualenv_with_global_context(PipenvInstance, pypi, virtualenv) @pytest.mark.project @pytest.mark.virtualenv -def test_run_in_virtualenv(PipenvInstance, pypi): - with PipenvInstance(chdir=True, pypi=pypi) as p: +def test_run_in_virtualenv(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv('run pip freeze') assert c.return_code == 0 assert 'Creating a virtualenv' in c.err diff --git a/tests/integration/test_sync.py b/tests/integration/test_sync.py index 1300bf5dec..d085aaf421 100644 --- a/tests/integration/test_sync.py +++ b/tests/integration/test_sync.py @@ -9,8 +9,8 @@ @pytest.mark.sync -def test_sync_error_without_lockfile(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_sync_error_without_lockfile(PipenvInstance): + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] @@ -23,8 +23,8 @@ def test_sync_error_without_lockfile(PipenvInstance, pypi): @pytest.mark.sync @pytest.mark.lock -def test_mirror_lock_sync(PipenvInstance, pypi): - with temp_environ(), PipenvInstance(chdir=True, pypi=pypi) as p: +def test_mirror_lock_sync(PipenvInstance): + with temp_environ(), PipenvInstance(chdir=True) as p: mirror_url = os.environ.pop('PIPENV_TEST_INDEX', "https://pypi.kennethreitz.org/simple") assert 'pypi.org' not in mirror_url with open(p.pipfile_path, 'w') as f: @@ -45,10 +45,10 @@ def test_mirror_lock_sync(PipenvInstance, pypi): @pytest.mark.sync @pytest.mark.lock -def test_sync_should_not_lock(PipenvInstance, pypi): +def test_sync_should_not_lock(PipenvInstance): """Sync should not touch the lock file, even if Pipfile is changed. """ - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: f.write(""" [packages] @@ -73,8 +73,8 @@ def test_sync_should_not_lock(PipenvInstance, pypi): @pytest.mark.sync @pytest.mark.lock -def test_sync_sequential_detect_errors(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_sync_sequential_detect_errors(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] @@ -97,8 +97,8 @@ def test_sync_sequential_detect_errors(PipenvInstance, pypi): @pytest.mark.sync @pytest.mark.lock -def test_sync_sequential_verbose(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_sync_sequential_verbose(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index d850ed0cbf..ef7fb688e6 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -11,8 +11,8 @@ @pytest.mark.run @pytest.mark.uninstall @pytest.mark.install -def test_uninstall(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_uninstall(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("install requests") assert c.return_code == 0 assert "requests" in p.pipfile["packages"] @@ -38,7 +38,7 @@ def test_uninstall(PipenvInstance, pypi): @pytest.mark.run @pytest.mark.uninstall @pytest.mark.install -def test_mirror_uninstall(PipenvInstance, pypi): +def test_mirror_uninstall(PipenvInstance): with temp_environ(), PipenvInstance(chdir=True) as p: mirror_url = os.environ.pop( @@ -102,8 +102,8 @@ def test_uninstall_all_local_files(PipenvInstance, testsroot): @pytest.mark.run @pytest.mark.uninstall @pytest.mark.install -def test_uninstall_all_dev(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_uninstall_all_dev(PipenvInstance): + with PipenvInstance() as p: c = p.pipenv("install --dev requests six") assert c.return_code == 0 @@ -135,8 +135,8 @@ def test_uninstall_all_dev(PipenvInstance, pypi): @pytest.mark.uninstall @pytest.mark.run -def test_normalize_name_uninstall(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi) as p: +def test_normalize_name_uninstall(PipenvInstance): + with PipenvInstance() as p: with open(p.pipfile_path, "w") as f: contents = """ # Pre comment diff --git a/tests/integration/test_windows.py b/tests/integration/test_windows.py index 80fc4053c9..b303d0ab11 100644 --- a/tests/integration/test_windows.py +++ b/tests/integration/test_windows.py @@ -13,10 +13,10 @@ @pytest.mark.project -def test_case_changes_windows(PipenvInstance, pypi): +def test_case_changes_windows(PipenvInstance): """Test project matching for case changes on Windows. """ - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: c = p.pipenv('install pytz') assert c.return_code == 0 @@ -40,7 +40,7 @@ def test_case_changes_windows(PipenvInstance, pypi): @pytest.mark.files -def test_local_path_windows(PipenvInstance, pypi): +def test_local_path_windows(PipenvInstance): whl = ( Path(__file__).parent.parent .joinpath('pypi', 'six', 'six-1.11.0-py2.py3-none-any.whl') @@ -49,13 +49,13 @@ def test_local_path_windows(PipenvInstance, pypi): whl = whl.resolve() except OSError: whl = whl.absolute() - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: c = p.pipenv('install "{0}"'.format(whl)) assert c.return_code == 0 @pytest.mark.files -def test_local_path_windows_forward_slash(PipenvInstance, pypi): +def test_local_path_windows_forward_slash(PipenvInstance): whl = ( Path(__file__).parent.parent .joinpath('pypi', 'six', 'six-1.11.0-py2.py3-none-any.whl') @@ -64,14 +64,14 @@ def test_local_path_windows_forward_slash(PipenvInstance, pypi): whl = whl.resolve() except OSError: whl = whl.absolute() - with PipenvInstance(pypi=pypi, chdir=True) as p: + with PipenvInstance(chdir=True) as p: c = p.pipenv('install "{0}"'.format(whl.as_posix())) assert c.return_code == 0 @pytest.mark.cli -def test_pipenv_clean_windows(PipenvInstance, pypi): - with PipenvInstance(pypi=pypi, chdir=True) as p: +def test_pipenv_clean_windows(PipenvInstance): + with PipenvInstance(chdir=True) as p: c = p.pipenv('install requests') assert c.return_code == 0 c = p.pipenv('run pip install click') diff --git a/tests/pypi b/tests/pypi index fbd3539075..2c4b6de4d8 160000 --- a/tests/pypi +++ b/tests/pypi @@ -1 +1 @@ -Subproject commit fbd3539075d67494119b0c642707a449bcbd0bd4 +Subproject commit 2c4b6de4d88d7d5732bdf0c9345ad10f8336abd3 diff --git a/tests/test_artifacts/git/requests b/tests/test_artifacts/git/requests index 57d7284c1a..4983a9bde3 160000 --- a/tests/test_artifacts/git/requests +++ b/tests/test_artifacts/git/requests @@ -1 +1 @@ -Subproject commit 57d7284c1a245cf9fbcecb594f50471d86e879f7 +Subproject commit 4983a9bde39c6320aa4f3e34e50dac6e263dab6f diff --git a/tests/test_artifacts/git/six b/tests/test_artifacts/git/six index e114efceea..aa4e90bcd7 160000 --- a/tests/test_artifacts/git/six +++ b/tests/test_artifacts/git/six @@ -1 +1 @@ -Subproject commit e114efceea962fb143c909c904157ca994246fd2 +Subproject commit aa4e90bcd7b7bc13a71dfaebcb2021f4caaa8432 From 1b42124ea02aaff5849ded8b1de32d61d4421a91 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 25 Jun 2019 01:00:49 -0400 Subject: [PATCH 03/18] Fix VCS installs Signed-off-by: Dan Ryan --- pipenv/core.py | 37 ++--- pipenv/utils.py | 1 + pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/exceptions.py | 80 +++++---- .../requirementslib/models/dependencies.py | 132 +++++++++------ .../vendor/requirementslib/models/markers.py | 19 +-- .../vendor/requirementslib/models/pipfile.py | 6 +- .../vendor/requirementslib/models/project.py | 39 ++--- .../requirementslib/models/requirements.py | 154 ++++++++++-------- pipenv/vendor/requirementslib/models/utils.py | 22 ++- pipenv/vendor/requirementslib/utils.py | 2 +- 11 files changed, 268 insertions(+), 226 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 0991347430..b5bad11ac1 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -745,6 +745,9 @@ def batch_install(deps_list, procs, failed_deps_queue, os.environ["PIP_USER"] = vistir.compat.fs_str("0") if "PYTHONHOME" in os.environ: del os.environ["PYTHONHOME"] + if "GIT_CONFIG" in os.environ and dep.is_vcs: + del os.environ["GIT_CONFIG"] + c = pip_install( dep, ignore_hashes=any([ignore_hashes, dep.editable, dep.is_vcs]), @@ -1381,20 +1384,7 @@ def get_requirement_line( requirement.line_instance._wheel_kwargs.update({ "src_dir": src_dir }) - # if requirement.vcs and requirement.editable: - # repo = requirement.req.get_vcs_repo(src_dir=src_dir) - # requirement.line_instance.vcsrepo - # line = repo.url - # name = requirement.name - # line = "{0}+".format(requirement.vcs) if requirement.vcs else "" - # if requirement.extras: - # name = "{0}{1}".format(name, requirement.extras_as_pip) - # line = "{0}{1}#egg={2}".format( - # line, vistir.path.path_to_url(repo.checkout_directory), requirement.name - # ) - # if repo.subdirectory: - # line = "{0}&subdirectory={1}".format(line, repo.subdirectory) - # else: + requirement.line_instance.vcsrepo line = requirement.line_instance.line if requirement.line_instance.markers: line = '{0}; {1}'.format(line, requirement.line_instance.markers) @@ -1420,8 +1410,8 @@ def write_requirement_to_file( if not requirements_dir: requirements_dir = vistir.path.create_tracked_tempdir( prefix="pipenv", suffix="requirements") - line = get_requirement_line( - requirement, src_dir, include_hashes=include_hashes, format_for_file=True + line = requirement.line_instance.get_line( + with_prefix=True, with_hashes=include_hashes, with_markers=True, as_list=False ) f = vistir.compat.NamedTemporaryFile( @@ -1472,8 +1462,10 @@ def pip_install( elif not (requirement.is_vcs or requirement.editable or requirement.vcs): ignore_hashes = False line = None - if requirement.vcs and not requirement.line_instance.markers: - line = get_requirement_line(requirement, src_dir, include_hashes=not ignore_hashes, format_for_file=False) + if requirement.vcs: + line = requirement.line_instance.get_line( + with_prefix=True, with_hashes=False, with_markers=True, as_list=True + ) else: r = write_requirement_to_file( requirement, requirements_dir=requirements_dir, src_dir=src_dir, @@ -1543,6 +1535,7 @@ def pip_install( pip_command = cmd.cmdify() c = None c = delegator.run(pip_command, block=block, env=pip_config) + c.env = pip_config return c @@ -2111,14 +2104,6 @@ def do_install( sys.exit(1) if index_url: pkg_requirement.index = index_url - # deps = [] - # if pkg_requirement.is_vcs and PIPENV_RESOLVE_VCS: - # if not allow_global and ( - # pkg_requirement.line_instance and pkg_requirement.line_instance.wheel_kwargs - # ): - # pkg_requirement.line_instance._wheel_kwargs["src_dir"] = project.virtualenv_src_location - # pkg_setupinfo = pkg_requirement.line_instance.setup_info - # deps = pkg_setupinfo.requires no_deps = False sp.text = "Installing..." try: diff --git a/pipenv/utils.py b/pipenv/utils.py index cfda803a28..64333e2697 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -451,6 +451,7 @@ def get_deps_from_req(cls, req, resolver=None): from .vendor.requirementslib.models.utils import _requirement_to_str_lowercase_name from .vendor.requirementslib.models.requirements import Requirement from requirementslib.utils import is_installable_dir + # TODO: this is way too complex, refactor this constraints = set() # type: Set[str] locked_deps = dict() # type: Dict[str, Dict[str, Union[str, bool, List[str]]]] if (req.is_file_or_url or req.is_vcs) and not req.is_wheel: diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 7f039c0751..70b604a82e 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.5.1" +__version__ = "1.5.2.dev0" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index 17b884eb46..d11dbce9b4 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -1,19 +1,21 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function + import errno import os -import six import sys - +import six from vistir.compat import FileNotFoundError - if six.PY2: + class FileExistsError(OSError): def __init__(self, *args, **kwargs): self.errno = errno.EEXIST super(FileExistsError, self).__init__(*args, **kwargs) + + else: from six.moves.builtins import FileExistsError @@ -24,8 +26,15 @@ class RequirementError(Exception): class MissingParameter(Exception): def __init__(self, param): - Exception.__init__(self) - print("Missing parameter: %s" % param, file=sys.stderr, flush=True) + self.message = self.get_message(param) + super(MissingParameter, self).__init__(self.message) + + @classmethod + def get_message(cls, param): + return "Missing Parameter: %s" % param + + def show(self, param): + print(self.message, file=sys.stderr, flush=True) class FileCorruptException(OSError): @@ -35,58 +44,67 @@ def __init__(self, path, *args, **kwargs): if not backup_path and args: args = reversed(args) backup_path = args.pop() - if not isinstance(backup_path, six.string_types) or not os.path.exists(os.path.abspath(os.path.dirname(backup_path))): + if not isinstance(backup_path, six.string_types) or not os.path.exists( + os.path.abspath(os.path.dirname(backup_path)) + ): args.append(backup_path) backup_path = None if args: args = reversed(args) - self.path = path - self.backup_path = backup_path - self.show(self.path, self.backup_path) - OSError.__init__(self, path, *args, **kwargs) + self.message = self.get_message(path, backup_path=backup_path) + super(FileCorruptException, self).__init__(self.message) - @classmethod - def show(cls, path, backup_path=None): - print("ERROR: Failed to load file at %s" % path, file=sys.stderr, flush=True) + def get_message(self, path, backup_path=None): + message = "ERROR: Failed to load file at %s" % path if backup_path: msg = "it will be backed up to %s and removed" % backup_path else: - msg = "it will be removed and replaced." - print("The file is corrupt, %s" % msg, file=sys.stderr, flush=True) + msg = "it will be removed and replaced on the next lock." + message = "{0}\nYour lockfile is corrupt, {1}".format(message, msg) + return message + + def show(self): + print(self.message, file=sys.stderr, flush=True) class LockfileCorruptException(FileCorruptException): + def __init__(self, path, backup_path=None): + self.message = self.get_message(path, backup_path=backup_path) + super(LockfileCorruptException, self).__init__(self.message) - @classmethod - def show(cls, path, backup_path=None): - print("ERROR: Failed to load lockfile at %s" % path, file=sys.stderr, flush=True) + def get_message(self, path, backup_path=None): + message = "ERROR: Failed to load lockfile at %s" % path if backup_path: msg = "it will be backed up to %s and removed" % backup_path else: msg = "it will be removed and replaced on the next lock." - print("Your lockfile is corrupt, %s" % msg, file=sys.stderr, flush=True) + message = "{0}\nYour lockfile is corrupt, {1}".format(message, msg) + return message + + def show(self, path, backup_path=None): + print(self.message, file=sys.stderr, flush=True) class PipfileCorruptException(FileCorruptException): + def __init__(self, path, backup_path=None): + self.message = self.get_message(path, backup_path=backup_path) + super(PipfileCorruptException, self).__init__(self.message) - @classmethod - def show(cls, path, backup_path=None): - print("ERROR: Failed to load Pipfile at %s" % path, file=sys.stderr, flush=True) + def get_message(self, path, backup_path=None): + message = "ERROR: Failed to load Pipfile at %s" % path if backup_path: msg = "it will be backed up to %s and removed" % backup_path else: msg = "it will be removed and replaced on the next lock." - print("Your Pipfile is corrupt, %s" % msg, file=sys.stderr, flush=True) + message = "{0}\nYour Pipfile is corrupt, {1}".format(message, msg) + return message + + def show(self, path, backup_path=None): + print(self.message, file=sys.stderr, flush=True) class PipfileNotFound(FileNotFoundError): def __init__(self, path, *args, **kwargs): self.errno = errno.ENOENT - self.path = path - self.show(path) - super(PipfileNotFound, self).__init__(*args, **kwargs) - - @classmethod - def show(cls, path): - print("ERROR: The file could not be found: %s" % path, file=sys.stderr, flush=True) - print("Aborting...", file=sys.stderr, flush=True) + self.filename = path + super(PipfileNotFound, self).__init__(self.filename) diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 44f34edb48..82eaba5f3e 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -9,34 +9,56 @@ import attr import packaging.markers import packaging.version +import pip_shims.shims import requests - from first import first from packaging.utils import canonicalize_name - -import pip_shims.shims -from vistir.compat import JSONDecodeError, fs_str, ResourceWarning +from vistir.compat import JSONDecodeError, fs_str from vistir.contextmanagers import cd, temp_environ from vistir.misc import partialclass from vistir.path import create_tracked_tempdir -from ..environment import MYPY_RUNNING -from ..utils import prepare_pip_source_args, _ensure_dir from .cache import CACHE_DIR, DependencyCache from .utils import ( - clean_requires_python, fix_requires_python_marker, format_requirement, - full_groupby, is_pinned_requirement, key_from_ireq, - make_install_requirement, name_from_req, version_from_ireq + clean_requires_python, + fix_requires_python_marker, + format_requirement, + full_groupby, + is_pinned_requirement, + key_from_ireq, + make_install_requirement, + name_from_req, + version_from_ireq, ) - +from ..environment import MYPY_RUNNING +from ..utils import _ensure_dir, prepare_pip_source_args if MYPY_RUNNING: - from typing import Any, Dict, List, Generator, Optional, Union, Tuple, TypeVar, Text, Set, AnyStr - from pip_shims.shims import InstallRequirement, InstallationCandidate, PackageFinder, Command + from typing import ( + Any, + Dict, + List, + Generator, + Optional, + Union, + Tuple, + TypeVar, + Text, + Set, + ) + from pip_shims.shims import ( + InstallRequirement, + InstallationCandidate, + PackageFinder, + Command, + ) from packaging.requirements import Requirement as PackagingRequirement + TRequirement = TypeVar("TRequirement") - RequirementType = TypeVar('RequirementType', covariant=True, bound=PackagingRequirement) - MarkerType = TypeVar('MarkerType', covariant=True, bound=Marker) + RequirementType = TypeVar( + "RequirementType", covariant=True, bound=PackagingRequirement + ) + MarkerType = TypeVar("MarkerType", covariant=True, bound=Marker) STRING_TYPE = Union[str, bytes, Text] S = TypeVar("S", bytes, str, Text) @@ -67,7 +89,6 @@ def find_all_matches(finder, ireq, pre=False): :rtype: list[:class:`~pip._internal.index.InstallationCandidate`] """ - candidates = clean_requires_python(finder.find_all_candidates(ireq.name)) versions = {candidate.version for candidate in candidates} allowed_versions = _get_filtered_versions(ireq, versions, pre) @@ -158,10 +179,14 @@ def compatible_abstract_dep(self, other): elif len(other.candidates) == 1 and first(other.candidates).editable: return other new_specifiers = self.specifiers & other.specifiers - markers = set(self.markers,) if self.markers else set() + markers = set(self.markers) if self.markers else set() if other.markers: markers.add(other.markers) - new_markers = packaging.markers.Marker(" or ".join(str(m) for m in sorted(markers))) + new_markers = None + if markers: + new_markers = packaging.markers.Marker( + " or ".join(str(m) for m in sorted(markers)) + ) new_ireq = copy.deepcopy(self.requirement.ireq) new_ireq.req.specifier = new_specifiers new_ireq.req.marker = new_markers @@ -187,7 +212,7 @@ def compatible_abstract_dep(self, other): requirement=new_requirement, parent=self.parent, dep_dict=dep_dict, - finder=self.finder + finder=self.finder, ) def get_deps(self, candidate): @@ -204,7 +229,7 @@ def get_deps(self, candidate): from .requirements import Requirement req = Requirement.from_line(key) - req.merge_markers(self.markers) + req = req.merge_markers(self.markers) self.dep_dict[key] = req.get_abstract_dependencies() return self.dep_dict[key] @@ -230,13 +255,18 @@ def from_requirement(cls, requirement, parent=None): if not is_pinned and not requirement.editable: for r in requirement.find_all_matches(finder=finder): req = make_install_requirement( - name, r.version, extras=extras, markers=markers, constraint=is_constraint, + name, + r.version, + extras=extras, + markers=markers, + constraint=is_constraint, ) req.req.link = r.location req.parent = parent candidates.append(req) candidates = sorted( - set(candidates), key=lambda k: packaging.version.parse(version_from_ireq(k)), + set(candidates), + key=lambda k: packaging.version.parse(version_from_ireq(k)), ) else: candidates = [requirement.ireq] @@ -279,9 +309,7 @@ def get_abstract_dependencies(reqs, sources=None, parent=None): for req in reqs: if isinstance(req, pip_shims.shims.InstallRequirement): - requirement = Requirement.from_line( - "{0}{1}".format(req.name, req.specifier) - ) + requirement = Requirement.from_line("{0}{1}".format(req.name, req.specifier)) if req.link: requirement.req.link = req.link requirement.markers = req.markers @@ -311,27 +339,26 @@ def get_dependencies(ireq, sources=None, parent=None): :rtype: set(str) """ if not isinstance(ireq, pip_shims.shims.InstallRequirement): - name = getattr( - ireq, "project_name", - getattr(ireq, "project", ireq.name), - ) + name = getattr(ireq, "project_name", getattr(ireq, "project", ireq.name)) version = getattr(ireq, "version", None) if not version: ireq = pip_shims.shims.InstallRequirement.from_line("{0}".format(name)) else: - ireq = pip_shims.shims.InstallRequirement.from_line("{0}=={1}".format(name, version)) + ireq = pip_shims.shims.InstallRequirement.from_line( + "{0}=={1}".format(name, version) + ) pip_options = get_pip_options(sources=sources) getters = [ get_dependencies_from_cache, get_dependencies_from_wheel_cache, get_dependencies_from_json, - functools.partial(get_dependencies_from_index, pip_options=pip_options) + functools.partial(get_dependencies_from_index, pip_options=pip_options), ] for getter in getters: deps = getter(ireq) if deps is not None: return deps - raise RuntimeError('failed to get dependencies for {}'.format(ireq)) + raise RuntimeError("failed to get dependencies for {}".format(ireq)) def get_dependencies_from_wheel_cache(ireq): @@ -389,7 +416,7 @@ def gen(ireq): finally: session.close() requires_dist = info.get("requires_dist", info.get("requires")) - if not requires_dist: # The API can return None for this. + if not requires_dist: # The API can return None for this. return for requires in requires_dist: i = pip_shims.shims.InstallRequirement.from_line(requires) @@ -430,9 +457,9 @@ def get_dependencies_from_cache(ireq): dep_ireq = pip_shims.shims.InstallRequirement.from_line(line) name = canonicalize_name(dep_ireq.name) if _marker_contains_extra(dep_ireq): - broken = True # The "extra =" marker breaks everything. + broken = True # The "extra =" marker breaks everything. elif name == canonicalize_name(ireq.name): - broken = True # A package cannot depend on itself. + broken = True # A package cannot depend on itself. if broken: break except Exception: @@ -446,7 +473,7 @@ def get_dependencies_from_cache(ireq): def is_python(section): - return section.startswith('[') and ':' in section + return section.startswith("[") and ":" in section def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache=None): @@ -468,12 +495,15 @@ def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache reqset.add_requirement(dep) requirements = None setup_requires = {} - with temp_environ(), start_resolver(finder=finder, wheel_cache=wheel_cache) as resolver: - os.environ['PIP_EXISTS_ACTION'] = 'i' + with temp_environ(), start_resolver( + finder=finder, wheel_cache=wheel_cache + ) as resolver: + os.environ["PIP_EXISTS_ACTION"] = "i" dist = None if dep.editable and not dep.prepared and not dep.req: with cd(dep.setup_py_dir): from setuptools.dist import distutils + try: dist = distutils.core.run_setup(dep.setup_py) except (ImportError, TypeError, AttributeError): @@ -504,7 +534,7 @@ def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache add_marker = fix_requires_python_marker(requires_python) reqset.remove(dep) if dep.req.marker: - dep.req.marker._markers.extend(['and',].extend(add_marker._markers)) + dep.req.marker._markers.extend(["and"].extend(add_marker._markers)) else: dep.req.marker = add_marker reqset.add(dep) @@ -512,7 +542,7 @@ def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache for r in results: if requires_python: if r.req.marker: - r.req.marker._markers.extend(['and',].extend(add_marker._markers)) + r.req.marker._markers.extend(["and"].extend(add_marker._markers)) else: r.req.marker = add_marker requirements.add(format_requirement(r)) @@ -531,10 +561,16 @@ def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache else: not_python = True - if ':' not in value and not_python: + if ":" not in value and not_python: try: - requirement_str = "{0}{1}".format(value, python_version).replace(":", ";") - requirements.add(format_requirement(make_install_requirement(requirement_str).ireq)) + requirement_str = "{0}{1}".format(value, python_version).replace( + ":", ";" + ) + requirements.add( + format_requirement( + make_install_requirement(requirement_str).ireq + ) + ) # Anything could go wrong here -- can't be too careful. except Exception: pass @@ -559,9 +595,7 @@ def get_pip_options(args=[], sources=None, pip_command=None): if not pip_command: pip_command = get_pip_command() if not sources: - sources = [ - {"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True} - ] + sources = [{"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True}] _ensure_dir(CACHE_DIR) pip_args = args pip_args = prepare_pip_source_args(sources, pip_args) @@ -587,9 +621,7 @@ def get_finder(sources=None, pip_command=None, pip_options=None): if not pip_command: pip_command = get_pip_command() if not sources: - sources = [ - {"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True} - ] + sources = [{"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True}] if not pip_options: pip_options = get_pip_options(sources=sources, pip_command=pip_command) session = pip_command._build_session(pip_options) @@ -652,7 +684,9 @@ def start_resolver(finder=None, wheel_cache=None): use_user_site=False, ) try: - if packaging.version.parse(pip_shims.shims.pip_version) >= packaging.version.parse('18'): + if packaging.version.parse( + pip_shims.shims.pip_version + ) >= packaging.version.parse("18"): with pip_shims.shims.RequirementTracker() as req_tracker: preparer = preparer(req_tracker=req_tracker) yield resolver(preparer=preparer) diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 5e66511422..fc85fbdd51 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -7,7 +7,7 @@ import packaging.version import six from packaging.markers import InvalidMarker, Marker -from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet +from packaging.specifiers import Specifier, SpecifierSet from vistir.compat import Mapping, Set, lru_cache from vistir.misc import dedup @@ -19,18 +19,7 @@ if MYPY_RUNNING: - from typing import ( - Optional, - List, - Type, - Any, - Tuple, - Union, - Set, - AnyStr, - Text, - Iterator, - ) + from typing import Optional, List, Type, Any, Tuple, Union, AnyStr, Text, Iterator STRING_TYPE = Union[str, bytes, Text] @@ -277,8 +266,8 @@ def cleanup_pyspecs(specs, joiner="or"): }, # leave these the same no matter what operator we use ("!=", "==", "~=", "==="): { - "or": lambda x: get_sorted_version_string(x), - "and": lambda x: get_sorted_version_string(x), + "or": get_sorted_version_string, + "and": get_sorted_version_string, }, } op_translations = { diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 3f7b20c2e6..e55ad741a6 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -242,7 +242,11 @@ def __getattr__(self, k, *args, **kwargs): @property def requires_python(self): # type: () -> bool - return self._pipfile.requires.requires_python + return getattr( + self._pipfile.requires, + "python_version", + getattr(self._pipfile.requires, "python_full_version", None), + ) @property def allow_prereleases(self): diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py index 28afcf0ba5..7c1b0e8100 100644 --- a/pipenv/vendor/requirementslib/models/project.py +++ b/pipenv/vendor/requirementslib/models/project.py @@ -1,6 +1,6 @@ # -*- coding=utf-8 -*- -from __future__ import absolute_import, unicode_literals, print_function +from __future__ import absolute_import, print_function, unicode_literals import collections import io @@ -13,14 +13,10 @@ import plette.models import six import tomlkit +from vistir.compat import FileNotFoundError - -SectionDifference = collections.namedtuple("SectionDifference", [ - "inthis", "inthat", -]) -FileDifference = collections.namedtuple("FileDifference", [ - "default", "develop", -]) +SectionDifference = collections.namedtuple("SectionDifference", ["inthis", "inthat"]) +FileDifference = collections.namedtuple("FileDifference", ["default", "develop"]) def _are_pipfile_entries_equal(a, b): @@ -52,12 +48,15 @@ def preferred_newlines(f): class ProjectFile(object): """A file in the Pipfile project. """ + location = attr.ib() line_ending = attr.ib() model = attr.ib() @classmethod def read(cls, location, model_cls, invalid_ok=False): + if not os.path.exists(location) and not invalid_ok: + raise FileNotFoundError(location) try: with io.open(location, encoding="utf-8") as f: model = model_cls.load(f) @@ -89,14 +88,9 @@ class Project(object): def __attrs_post_init__(self): self.root = root = os.path.abspath(self.root) - self._p = ProjectFile.read( - os.path.join(root, "Pipfile"), - plette.Pipfile, - ) + self._p = ProjectFile.read(os.path.join(root, "Pipfile"), plette.Pipfile) self._l = ProjectFile.read( - os.path.join(root, "Pipfile.lock"), - plette.Lockfile, - invalid_ok=True, + os.path.join(root, "Pipfile.lock"), plette.Lockfile, invalid_ok=True ) @property @@ -138,14 +132,17 @@ def contains_key_in_pipfile(self, key): self._get_pipfile_section(develop=True, insert=False), ] return any( - (packaging.utils.canonicalize_name(name) == - packaging.utils.canonicalize_name(key)) + ( + packaging.utils.canonicalize_name(name) + == packaging.utils.canonicalize_name(key) + ) for section in sections for name in section ) def add_line_to_pipfile(self, line, develop): from requirementslib import Requirement + requirement = Requirement.from_line(line) section = self._get_pipfile_section(develop=develop) key = requirement.normalized_name @@ -164,13 +161,9 @@ def remove_keys_from_pipfile(self, keys, default, develop): keys = {packaging.utils.canonicalize_name(key) for key in keys} sections = [] if default: - sections.append(self._get_pipfile_section( - develop=False, insert=False, - )) + sections.append(self._get_pipfile_section(develop=False, insert=False)) if develop: - sections.append(self._get_pipfile_section( - develop=True, insert=False, - )) + sections.append(self._get_pipfile_section(develop=True, insert=False)) for section in sections: removals = set() for name in section: diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 559ab424d1..40a49d48c0 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -190,7 +190,10 @@ def __hash__(self): tuple(self.extras), tuple(self.hashes), self.vcs, - self.ireq, + self.uri, + self.path, + self.name, + self._requirement, ) ) @@ -208,6 +211,58 @@ def __repr__(self): except Exception: return "".format(self.__dict__.values()) + def __str__(self): + # type: () -> str + if self.markers: + return "{0}; {1}".format(self.get_line(), self.markers) + return self.get_line() + + def get_line( + self, with_prefix=False, with_markers=False, with_hashes=True, as_list=False + ): + # type: (bool, bool, bool, bool) -> Union[STRING_TYPE, List[STRING_TYPE]] + line = self.line + extras_str = extras_to_string(self.extras) + with_hashes = False if self.editable or self.is_vcs else with_hashes + hash_list = ["--hash={0}".format(h) for h in self.hashes] + if self.is_named: + line = self.name_and_specifier + elif self.is_direct_url: + line = self.link.url + elif extras_str: + if self.is_vcs: + line = self.link.url + if "git+file:/" in line and "git+file:///" not in line: + line = line.replace("git+file:/", "git+file:///") + elif extras_str not in line: + line = "{0}{1}".format(line, extras_str) + # XXX: For using markers on vcs or url requirements, they can be used + # as normal (i.e. no space between the requirement and the semicolon) + # and no additional quoting as long as they are not editable requirements + # HOWEVER, for editable requirements, the requirement+marker must be quoted + # We do this here for the line-formatted versions, but leave it up to the + # `Script.parse()` functionality in pipenv, for instance, to handle that + # in a cross-platform manner for the `as_list` approach since that is how + # we anticipate this will be used if passing directly to the command line + # for pip. + if with_markers and self.markers: + line = "{0}; {1}".format(line, self.markers) + if with_prefix and self.editable and not as_list: + line = '"{0}"'.format(line) + if as_list: + result_list = [] + if with_prefix and self.editable: + result_list.append("-e") + result_list.append(line) + if with_hashes: + result_list.extend(self.hashes) + return result_list + if with_prefix and self.editable: + line = "-e {0}".format(line) + if with_hashes and hash_list: + line = "{0} {1}".format(line, " ".join(hash_list)) + return line + @property def name_and_specifier(self): name_str, spec_str = "", "" @@ -240,22 +295,7 @@ def split_hashes(cls, line): @property def line_with_prefix(self): # type: () -> STRING_TYPE - line = self.line - if self.is_named: - return self.name_and_specifier - extras_str = extras_to_string(self.extras) - if self.is_direct_url: - line = self.link.url - elif extras_str: - if self.is_vcs: - line = self.link.url - if "git+file:/" in line and "git+file:///" not in line: - line = line.replace("git+file:/", "git+file:///") - elif extras_str not in line: - line = "{0}{1}".format(line, extras_str) - if self.editable: - return "-e {0}".format(line) - return line + return self.get_line(with_prefix=True, with_hashes=False) @property def line_for_ireq(self): @@ -2570,37 +2610,33 @@ def update_name_from_path(self, path): if self.req._setup_info and self.req._setup_info.name is None: self.req._setup_info.name = name + def get_line_instance(self): + # type: () -> Line + line_parts = [] + if self.req: + line_parts.append(self.req.line_part) + if not self.is_vcs and not self.vcs and self.extras_as_pip: + line_parts.append(self.extras_as_pip) + if self._specifiers and not (self.is_file_or_url or self.is_vcs): + line_parts.append(self._specifiers) + if self.markers: + line_parts.append("; {0}".format(self.markers)) + if self.hashes_as_pip: + line_parts.append(self.hashes_as_pip) + line = "".join(line_parts) + return Line(line) + @property def line_instance(self): # type: () -> Optional[Line] if self._line_instance is None: - if self.req is not None and self.req._parsed_line is not None: - self._line_instance = self.req._parsed_line - else: - include_extras = True - include_specifiers = True - if self.is_vcs: - include_extras = False - if self.is_file_or_url or self.is_vcs or not self._specifiers: - include_specifiers = False - line_part = "" # type: STRING_TYPE - if self.req and self.req.line_part: - line_part = "{0!s}".format(self.req.line_part) - parts = [] # type: List[STRING_TYPE] - parts = [ - line_part, - self.extras_as_pip if include_extras else "", - self._specifiers if include_specifiers and self._specifiers else "", - self.markers_as_pip, - ] - line = "".join(parts) - self._line_instance = Line(line) + self.line_instance = self.get_line_instance() return self._line_instance @line_instance.setter def line_instance(self, line_instance): # type: (Line) -> None - if self.req and not self.req._parsed_line: + if self.req: self.req._parsed_line = line_instance self._line_instance = line_instance @@ -2834,29 +2870,14 @@ def as_line( in the requirement line. """ - include_specifiers = True if self.specifiers else False - if self.is_vcs: - include_extras = False - if self.is_file_or_url or self.is_vcs: - include_specifiers = False - parts = [ - self.req.line_part, - self.extras_as_pip if include_extras else "", - self.specifiers if include_specifiers else "", - self.markers_as_pip if include_markers else "", - ] - if as_list: - # This is used for passing to a subprocess call - parts = ["".join(parts)] - if include_hashes: - hashes = self.get_hashes_as_pip(as_list=as_list) - if as_list: - parts.extend(hashes) - else: - parts.append(hashes) - - is_local = self.is_file_or_url and self.req and self.req.is_local - if sources and self.requirement and not (is_local or self.vcs): + assert self.line_instance is not None + parts = self.line_instance.get_line( + with_prefix=True, + with_hashes=include_hashes, + with_markers=include_markers, + as_list=as_list, + ) + if sources and self.requirement and not (self.line_instance.is_local or self.vcs): from ..utils import prepare_pip_source_args if self.index: @@ -2866,11 +2887,8 @@ def as_line( parts.extend(sources) else: index_string = " ".join(source_list) - parts.extend([" ", index_string]) - if as_list: - return parts - line = "".join(parts) - return line + parts = "{0} {1}".format(parts, index_string) + return parts def get_markers(self): # type: () -> Marker @@ -3093,6 +3111,8 @@ def run_requires(self, sources=None, finder=None): def merge_markers(self, markers): # type: (Union[AnyStr, Marker]) -> None + if not markers: + return self if not isinstance(markers, Marker): markers = Marker(markers) _markers = [] # type: List[Marker] diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index fd5567a8ab..1c1c320d93 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -829,7 +829,9 @@ def name_from_req(req): return req.name -def make_install_requirement(name, version, extras, markers, constraint=False): +def make_install_requirement( + name, version=None, extras=None, markers=None, constraint=False +): """ Generates an :class:`~pip._internal.req.req_install.InstallRequirement`. @@ -853,19 +855,16 @@ def make_install_requirement(name, version, extras, markers, constraint=False): from pip_shims.shims import install_req_from_line extras_string = "" + requirement_string = "{0}".format(name) if extras: # Sort extras for stability extras_string = "[{}]".format(",".join(sorted(extras))) - - if not markers: - return install_req_from_line( - str("{}{}=={}".format(name, extras_string, version)), constraint=constraint - ) - else: - return install_req_from_line( - str("{}{}=={}; {}".format(name, extras_string, version, str(markers))), - constraint=constraint, - ) + requirement_string = "{0}{1}".format(requirement_string, extras_string) + if version: + requirement_string = "{0}=={1}".format(requirement_string, str(version)) + if markers: + requirement_string = "{0}; {1}".format(requirement_string, str(markers)) + return install_req_from_line(requirement_string, constraint=constraint) def version_from_ireq(ireq): @@ -986,7 +985,6 @@ def read_source(path, encoding="utf-8"): return fp.read() - SETUPTOOLS_SHIM = ( "import setuptools, tokenize;__file__=%r;" "f=getattr(tokenize, 'open', open)(__file__);" diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index 3769dbac1f..503a13d071 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -121,7 +121,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): # type: (S) -> S - """Cleans VCS uris from pipenv.patched.notpip format""" + """Cleans VCS uris from pip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: From e1e8364eeb69e671138a7723ae248732b06765ac Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 25 Jun 2019 04:01:06 -0400 Subject: [PATCH 04/18] Update lockfile to support 2.7 Signed-off-by: Dan Ryan --- Pipfile | 1 + Pipfile.lock | 34 +++++++++++++++++----------------- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/Pipfile b/Pipfile index 826df207d7..95d10d5174 100644 --- a/Pipfile +++ b/Pipfile @@ -2,6 +2,7 @@ pipenv = {path = ".", editable = true, extras = ["tests", "dev"]} sphinx-click = "*" click = "*" +"path.py" = "<12.0" pytest_pypi = {path = "./tests/pytest-pypi", editable = true} stdeb = {version="*", markers="sys_platform == 'linux'"} jedi = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 94693e77f2..f70c7ca198 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f4d89c0aab5c4e865f8c96ba24613fb1e66bae803a3ceaeadb6abf0061898091" + "sha256": "44189c3f4dc6499d20a5c0b5d1f875b0dc242943175e3e38a6e9719fc5860f7d" }, "pipfile-spec": 6, "requires": {}, @@ -488,10 +488,10 @@ }, "parso": { "hashes": [ - "sha256:17cc2d7a945eb42c3569d4564cdf49bde221bc2b552af3eca9c1aad517dcdd33", - "sha256:2e9574cb12e7112a87253e14e2c380ce312060269d04bd018478a3c92ea9a376" + "sha256:5052bb33be034cba784193e74b1cde6ebf29ae8b8c1e4ad94df0c4209bfc4826", + "sha256:db5881df1643bf3e66c097bfd8935cf03eae73f4cb61ae4433c9ea4fb6613446" ], - "version": "==0.4.0" + "version": "==0.5.0" }, "parver": { "hashes": [ @@ -525,11 +525,11 @@ }, "path.py": { "hashes": [ - "sha256:9f2169633403aa0423f6ec000e8701dd1819526c62465f5043952f92527fea0f", - "sha256:e107a3a8834a97be2a047f4b641822afc76a2b78352610102782732e6b389aa3" + "sha256:de7cd643affbc23e56533a6e8d551ecdee4983501a08c24e4e71565202d8cdaa", + "sha256:ea40833e76c50485fffd3e094d52e9e8701ba8c62a3b8f67c655c28a9538aac1" ], - "markers": "python_version >= '3.5'", - "version": "==12.0.1" + "index": "pypi", + "version": "==11.5.2" }, "pathlib2": { "hashes": [ @@ -541,10 +541,10 @@ }, "pbr": { "hashes": [ - "sha256:089ccb087e9bd8f278caedfa6c2c5d461381437eda3db750b6834e78b319f404", - "sha256:9fb1c3371344cd617eb073c6c00872e9b0e5a7fefed6cd29f327a1b26ab5c498" + "sha256:9181e2a34d80f07a359ff1d0504fad3a47e00e1cf2c475b0aa7dcb030af54c40", + "sha256:94bdc84da376b3dd5061aa0c3b6faffe943ee2e56fa4ff9bd63e1643932f34fc" ], - "version": "==5.3.0" + "version": "==5.3.1" }, "pipenv": { "editable": true, @@ -842,10 +842,10 @@ }, "soupsieve": { "hashes": [ - "sha256:6898e82ecb03772a0d82bd0d0a10c0d6dcc342f77e0701d0ec4a8271be465ece", - "sha256:b20eff5e564529711544066d7dc0f7661df41232ae263619dede5059799cdfca" + "sha256:72b5f1aea9101cf720a36bb2327ede866fd6f1a07b1e87c92a1cc18113cbc946", + "sha256:e4e9c053d59795e440163733a7fec6c5972210e1790c507e4c7b051d6c5259de" ], - "version": "==1.9.1" + "version": "==1.9.2" }, "sphinx": { "hashes": [ @@ -922,11 +922,11 @@ }, "tqdm": { "hashes": [ - "sha256:0a860bf2683fdbb4812fe539a6c22ea3f1777843ea985cb8c3807db448a0f7ab", - "sha256:e288416eecd4df19d12407d0c913cbf77aa8009d7fddb18f632aded3bdbdda6b" + "sha256:14a285392c32b6f8222ecfbcd217838f88e11630affe9006cd0e94c7eff3cb61", + "sha256:25d4c0ea02a305a688e7e9c2cdc8f862f989ef2a4701ab28ee963295f5b109ab" ], "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==4.32.1" + "version": "==4.32.2" }, "translationstring": { "hashes": [ From edbb6321f453273bc345a09b2c231ddb9d70a30b Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 25 Jun 2019 17:25:18 -0400 Subject: [PATCH 05/18] Add windows support to test devpi server Signed-off-by: Dan Ryan --- tests/integration/conftest.py | 53 +++++++++++++++++++++++++++++++---- 1 file changed, 47 insertions(+), 6 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index e7c720b391..d651ea85a3 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -2,9 +2,12 @@ from __future__ import absolute_import, print_function import errno import json +import logging import os import shutil +import socket import sys +import time import warnings from shutil import copyfileobj, rmtree as _rmtree @@ -14,18 +17,16 @@ from pipenv.vendor.vistir.compat import ResourceWarning, fs_str, fs_encode, FileNotFoundError, PermissionError, TemporaryDirectory from pipenv.vendor.vistir.misc import run -from pipenv.vendor.vistir.contextmanagers import temp_environ, open_file +from pipenv.vendor.vistir.contextmanagers import temp_environ from pipenv.vendor.vistir.path import mkdir_p, create_tracked_tempdir, handle_remove_readonly from pipenv._compat import Path -from pipenv.cmdparse import Script from pipenv.exceptions import VirtualenvActivationException -from pipenv.vendor import delegator, requests, toml, tomlkit +from pipenv.vendor import delegator, toml, tomlkit from pytest_pypi.app import prepare_fixtures -from pytest_shutil.workspace import Workspace -from _pytest_devpi_server import DevpiServer - +from _pytest_devpi_server import DevpiServer as _DevpiServer +log = logging.getLogger(__name__) warnings.simplefilter("default", category=ResourceWarning) @@ -37,6 +38,46 @@ def try_internet(url="http://httpbin.org/ip", timeout=1.5): resp.raise_for_status() +class ServerNotDead(Exception): + pass + + +class DevpiServer(_DevpiServer): + def _find_and_kill(self, retries, signal): + log.debug("Killing server running at {}:{} using signal {}".format(self.hostname, self.port, signal)) + for _ in range(retries): + cd_path = "/" + if sys.platform == "darwin": + netstat_cmd = "lsof -n -i:{} | grep LISTEN | awk '{{ print $2 }}'".format(self.port) + elif sys.platform == "linux": + netstat_cmd = ("netstat -anp 2>/dev/null | grep %s:%s | grep LISTEN | " + "awk '{ print $7 }' | cut -d'/' -f1" % (socket.gethostbyname(self.hostname), self.port)) + else: + netstat_cmd = 'for /f "usebackq tokens=5" %%i IN (`netstat -aon ^| findstr "{0}"`) do @echo %%~nxi'.format(self.port) + pids = [p.strip() for p in self.run(netstat_cmd, capture=True, cd=cd_path).split('\n') if p.strip()] + + if not pids: + # No PIDs remaining, server has died. + break + + for pid in pids: + try: + pid = int(pid) + except ValueError: + log.error("Can't determine port, process shutting down or owned by someone else") + else: + try: + os.kill(pid, signal) + except OSError as oe: + if oe.errno == errno.ESRCH: # Process doesn't appear to exist. + log.error("For some reason couldn't find PID {} to kill.".format(p)) + else: + raise + time.sleep(self.kill_retry_delay) + else: + raise ServerNotDead("Server not dead after %d retries" % retries) + + def check_internet(): has_internet = False for url in ("http://httpbin.org/ip", "http://clients3.google.com/generate_204"): From 4b75c6490dbdbfde5ed19c62a3cd041a5c9b8e92 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 25 Jun 2019 23:52:49 -0400 Subject: [PATCH 06/18] Fix devpi server to work on windows Signed-off-by: Dan Ryan --- tests/integration/conftest.py | 41 +++++++++++++++++++++++++++++++++-- 1 file changed, 39 insertions(+), 2 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index d651ea85a3..e5088d1922 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -5,6 +5,7 @@ import logging import os import shutil +import signal import socket import sys import time @@ -43,18 +44,33 @@ class ServerNotDead(Exception): class DevpiServer(_DevpiServer): + + term_signal = signal.SIGTERM if not os.name == "nt" else signal.CTRL_C_EVENT + kill_signal = signal.SIGKILL if not os.name == "nt" else signal.CTRL_BREAK_EVENT + def _find_and_kill(self, retries, signal): log.debug("Killing server running at {}:{} using signal {}".format(self.hostname, self.port, signal)) for _ in range(retries): cd_path = "/" + pids = [] + netstat_cmd = "" if sys.platform == "darwin": netstat_cmd = "lsof -n -i:{} | grep LISTEN | awk '{{ print $2 }}'".format(self.port) elif sys.platform == "linux": netstat_cmd = ("netstat -anp 2>/dev/null | grep %s:%s | grep LISTEN | " "awk '{ print $7 }' | cut -d'/' -f1" % (socket.gethostbyname(self.hostname), self.port)) else: - netstat_cmd = 'for /f "usebackq tokens=5" %%i IN (`netstat -aon ^| findstr "{0}"`) do @echo %%~nxi'.format(self.port) - pids = [p.strip() for p in self.run(netstat_cmd, capture=True, cd=cd_path).split('\n') if p.strip()] + procs = self.run("tasklist /NH /FI devpi-server.exe", capture=True) + pids = [ + task.strip().split()[1] for task in procs.strip().splitlines() + if "No tasks are running" not in task.strip() + ] + if netstat_cmd: + pids = [ + p.strip() for p in + self.run(netstat_cmd, capture=True, cd=cd_path).split('\n') + if p.strip() + ] if not pids: # No PIDs remaining, server has died. @@ -73,10 +89,31 @@ def _find_and_kill(self, retries, signal): log.error("For some reason couldn't find PID {} to kill.".format(p)) else: raise + self.run("taskkill /f /pid %s" % pid, capture=False, check_rc=False) time.sleep(self.kill_retry_delay) else: raise ServerNotDead("Server not dead after %d retries" % retries) + def kill(self, retries=5): + """Kill all running versions of this server. + Just killing the thread.server pid isn't good enough, it may have spawned children. + """ + # Prevent traceback printed when the server goes away as we kill it + if self.server: + self.server.exit = True + + if self.dead: + return + + try: + self._find_and_kill(retries, self.term_signal) + except ServerNotDead: + log.error("Server not dead after %d retries, trying with SIGKILL" % retries) + try: + self._find_and_kill(retries, self.kill_signal) + except ServerNotDead: + log.error("Server still not dead, giving up") + def check_internet(): has_internet = False From 5ac676711af2a4f1e3265cd0303af9af58f88876 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 5 Jul 2019 02:06:43 -0400 Subject: [PATCH 07/18] Fix editable VCS dependency resolution - Fixes #3809 Signed-off-by: Dan Ryan --- Pipfile | 1 - pipenv/cli/command.py | 6 +- pipenv/environment.py | 204 +++++++++++++++-- pipenv/project.py | 10 +- pipenv/utils.py | 116 +++++++++- .../requirementslib/models/requirements.py | 28 ++- pipenv/vendor/requirementslib/models/utils.py | 4 + tests/fixtures/fake-package/.coveragerc | 27 +++ tests/fixtures/fake-package/.editorconfig | 27 +++ tests/fixtures/fake-package/.gitignore | 108 +++++++++ .../fake-package/.pre-commit-config.yaml | 20 ++ tests/fixtures/fake-package/.travis.yml | 38 ++++ tests/fixtures/fake-package/LICENSE | 13 ++ tests/fixtures/fake-package/MANIFEST.in | 19 ++ tests/fixtures/fake-package/Pipfile | 14 ++ tests/fixtures/fake-package/README.rst | 3 + tests/fixtures/fake-package/appveyor.yml | 61 +++++ tests/fixtures/fake-package/docs/conf.py | 208 ++++++++++++++++++ .../fake-package/docs/requirements.txt | 2 + tests/fixtures/fake-package/news/.gitignore | 1 + tests/fixtures/fake-package/pyproject.toml | 50 +++++ tests/fixtures/fake-package/setup.cfg | 120 ++++++++++ tests/fixtures/fake-package/setup.py | 35 +++ .../fake-package/src/fake_package/__init__.py | 1 + .../fake-package/tasks/CHANGELOG.rst.jinja2 | 40 ++++ tests/fixtures/fake-package/tasks/__init__.py | 175 +++++++++++++++ tests/fixtures/fake-package/tox.ini | 37 ++++ tests/integration/test_cli.py | 8 +- tests/integration/test_install_basic.py | 4 +- tests/integration/test_install_markers.py | 6 +- tests/integration/test_install_uri.py | 4 +- tests/integration/test_lock.py | 14 +- tests/pypi | 2 +- tests/pytest-pypi/pytest_pypi/app.py | 36 ++- .../pytest_pypi/templates/package_pypi.html | 4 + 35 files changed, 1386 insertions(+), 60 deletions(-) create mode 100644 tests/fixtures/fake-package/.coveragerc create mode 100644 tests/fixtures/fake-package/.editorconfig create mode 100644 tests/fixtures/fake-package/.gitignore create mode 100644 tests/fixtures/fake-package/.pre-commit-config.yaml create mode 100644 tests/fixtures/fake-package/.travis.yml create mode 100644 tests/fixtures/fake-package/LICENSE create mode 100644 tests/fixtures/fake-package/MANIFEST.in create mode 100644 tests/fixtures/fake-package/Pipfile create mode 100644 tests/fixtures/fake-package/README.rst create mode 100644 tests/fixtures/fake-package/appveyor.yml create mode 100644 tests/fixtures/fake-package/docs/conf.py create mode 100644 tests/fixtures/fake-package/docs/requirements.txt create mode 100644 tests/fixtures/fake-package/news/.gitignore create mode 100644 tests/fixtures/fake-package/pyproject.toml create mode 100644 tests/fixtures/fake-package/setup.cfg create mode 100644 tests/fixtures/fake-package/setup.py create mode 100644 tests/fixtures/fake-package/src/fake_package/__init__.py create mode 100644 tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 create mode 100644 tests/fixtures/fake-package/tasks/__init__.py create mode 100644 tests/fixtures/fake-package/tox.ini create mode 100644 tests/pytest-pypi/pytest_pypi/templates/package_pypi.html diff --git a/Pipfile b/Pipfile index 95d10d5174..826df207d7 100644 --- a/Pipfile +++ b/Pipfile @@ -2,7 +2,6 @@ pipenv = {path = ".", editable = true, extras = ["tests", "dev"]} sphinx-click = "*" click = "*" -"path.py" = "<12.0" pytest_pypi = {path = "./tests/pytest-pypi", editable = true} stdeb = {version="*", markers="sys_platform == 'linux'"} jedi = "*" diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index a897cff00a..d1bcaa939d 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -8,9 +8,9 @@ argument, echo, edit, group, option, pass_context, secho, version_option ) -import click_completion -import crayons -import delegator +from ..vendor import click_completion +from ..vendor import delegator +from ..patched import crayons from ..__version__ import __version__ from .options import ( diff --git a/pipenv/environment.py b/pipenv/environment.py index a8489d955b..4a6940197e 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -1,7 +1,9 @@ # -*- coding=utf-8 -*- +from __future__ import absolute_import, print_function import contextlib import importlib +import io import json import operator import os @@ -18,7 +20,7 @@ import pipenv from .vendor.cached_property import cached_property -import vistir +from .vendor import vistir from .utils import normalize_path, make_posix @@ -46,6 +48,9 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No self.extra_dists = [] prefix = prefix if prefix else sys.prefix self.prefix = vistir.compat.Path(prefix) + self._base_paths = {} + if self.is_venv: + self._base_paths = self.get_paths() self.sys_paths = get_paths() def safe_import(self, name): @@ -117,6 +122,13 @@ def find_libdir(self): @property def python_info(self): include_dir = self.prefix / "include" + if not os.path.exists(include_dir): + include_dirs = self.get_include_path() + if include_dirs: + include_path = include_dirs.get("include", include_dirs.get("platinclude")) + if not include_path: + return {} + include_dir = vistir.compat.Path(include_path) python_path = next(iter(list(include_dir.iterdir())), None) if python_path and python_path.name.startswith("python"): python_version = python_path.name.replace("python", "") @@ -165,17 +177,39 @@ def base_paths(self): """ prefix = make_posix(self.prefix.as_posix()) - install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' - paths = get_paths(install_scheme, vars={ - 'base': prefix, - 'platbase': prefix, - }) - current_version = get_python_version() - for k in list(paths.keys()): - if not os.path.exists(paths[k]): - paths[k] = self._replace_parent_version(paths[k], current_version) + paths = {} + if self._base_paths: + paths = self._base_paths.copy() + else: + try: + paths = self.get_paths() + except Exception: + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) + current_version = get_python_version() + try: + for k in list(paths.keys()): + if not os.path.exists(paths[k]): + paths[k] = self._replace_parent_version(paths[k], current_version) + except OSError: + # Sometimes virtualenvs are made using virtualenv interpreters and there is no + # include directory, which will cause this approach to fail. This failsafe + # will make sure we fall back to the shell execution to find the real include path + paths = self.get_include_path() + paths.update(self.get_lib_paths()) + paths["scripts"] = self.script_basedir + if not paths: + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) if not os.path.exists(paths["purelib"]) and not os.path.exists(paths["platlib"]): - paths = self.get_paths() + lib_paths = self.get_lib_paths() + paths.update(lib_paths) paths["PATH"] = paths["scripts"] + os.pathsep + os.defpath if "prefix" not in paths: paths["prefix"] = prefix @@ -232,6 +266,47 @@ def sys_path(self): path = sys.path return path + def build_command(self, python_lib=False, python_inc=False, scripts=False, py_version=False): + """Build the text for running a command in the given environment + + :param python_lib: Whether to include the python lib dir commands, defaults to False + :type python_lib: bool, optional + :param python_inc: Whether to include the python include dir commands, defaults to False + :type python_inc: bool, optional + :param scripts: Whether to include the scripts directory, defaults to False + :type scripts: bool, optional + :param py_version: Whether to include the python version info, defaults to False + :type py_version: bool, optional + :return: A string representing the command to run + """ + pylib_lines = [] + pyinc_lines = [] + py_command = ( + "import sysconfig, distutils.sysconfig, io, json, sys; paths = {{" + "%s }}; value = u'{{0}}'.format(json.dumps(paths));" + "fh = io.open('{0}', 'w'); fh.write(value); fh.close()" + ) + distutils_line = "distutils.sysconfig.get_python_{0}(plat_specific={1})" + sysconfig_line = "sysconfig.get_path('{0}')" + if python_lib: + for key, var, val in (("pure", "lib", "0"), ("plat", "lib", "1")): + dist_prefix = "{0}lib".format(key) + # XXX: We need to get 'stdlib' or 'platstdlib' + sys_prefix = "{0}stdlib".format("" if key == "pure" else key) + pylib_lines.append("u'%s': u'{{0}}'.format(%s)" % (dist_prefix, distutils_line.format(var, val))) + pylib_lines.append("u'%s': u'{{0}}'.format(%s)" % (sys_prefix, sysconfig_line.format(sys_prefix))) + if python_inc: + for key, var, val in (("include", "inc", "0"), ("platinclude", "inc", "1")): + pylib_lines.append("u'%s': u'{{0}}'.format(%s)" % (key, distutils_line.format(var, val))) + lines = pylib_lines + pyinc_lines + if scripts: + lines.append("u'scripts': u'{{0}}'.format(%s)" % sysconfig_line.format("scripts")) + if py_version: + lines.append("u'py_version_short': u'{{0}}'.format(distutils.sysconfig.get_python_version()),") + lines_as_str = u",".join(lines) + py_command = py_command % lines_as_str + return py_command + def get_paths(self): """ Get the paths for the environment by running a subcommand @@ -239,21 +314,108 @@ def get_paths(self): :return: The python paths for the environment :rtype: Dict[str, str] """ - prefix = make_posix(self.prefix.as_posix()) - install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + tmpfile = vistir.path.create_tracked_tempfile(suffix=".json") + tmpfile.close() + tmpfile_path = make_posix(tmpfile.name) + py_command = self.build_command(python_lib=True, python_inc=True, scripts=True, py_version=True) + command = [self.python, "-c", py_command.format(tmpfile_path)] + c = vistir.misc.run( + command, return_object=True, block=True, nospin=True, write_to_stdout=False + ) + if c.returncode == 0: + paths = {} + with io.open(tmpfile_path, "r", encoding="utf-8") as fh: + paths = json.load(fh) + if "purelib" in paths: + paths["libdir"] = paths["purelib"] = make_posix(paths["purelib"]) + for key in ("platlib", "scripts", "platstdlib", "stdlib", "include", "platinclude"): + if key in paths: + paths[key] = make_posix(paths[key]) + return paths + else: + vistir.misc.echo("Failed to load paths: {0}".format(c.err), fg="yellow") + vistir.misc.echo("Output: {0}".format(c.out), fg="yellow") + return None + + def get_lib_paths(self): + """Get the include path for the environment + + :return: The python include path for the environment + :rtype: Dict[str, str] + """ + tmpfile = vistir.path.create_tracked_tempfile(suffix=".json") + tmpfile.close() + tmpfile_path = make_posix(tmpfile.name) + py_command = self.build_command(python_lib=True) + command = [self.python, "-c", py_command.format(tmpfile_path)] + c = vistir.misc.run( + command, return_object=True, block=True, nospin=True, write_to_stdout=False + ) + paths = None + if c.returncode == 0: + paths = {} + with io.open(tmpfile_path, "r", encoding="utf-8") as fh: + paths = json.load(fh) + if "purelib" in paths: + paths["libdir"] = paths["purelib"] = make_posix(paths["purelib"]) + for key in ("platlib", "platstdlib", "stdlib"): + if key in paths: + paths[key] = make_posix(paths[key]) + return paths + else: + vistir.misc.echo("Failed to load paths: {0}".format(c.err), fg="yellow") + vistir.misc.echo("Output: {0}".format(c.out), fg="yellow") + if not paths: + if not self.prefix.joinpath("lib").exists(): + return {} + stdlib_path = next(iter([ + p for p in self.prefix.joinpath("lib").iterdir() + if p.name.startswith("python") + ]), None) + lib_path = None + if stdlib_path: + lib_path = next(iter([ + p.as_posix() for p in stdlib_path.iterdir() + if p.name == "site-packages" + ])) + paths = {"stdlib": stdlib_path.as_posix()} + if lib_path: + paths["purelib"] = lib_path + return paths + return {} + + def get_include_path(self): + """Get the include path for the environment + + :return: The python include path for the environment + :rtype: Dict[str, str] + """ + tmpfile = vistir.path.create_tracked_tempfile(suffix=".json") + tmpfile.close() + tmpfile_path = make_posix(tmpfile.name) py_command = ( - "import sysconfig, json, distutils.sysconfig;" - "paths = sysconfig.get_paths('{0}', vars={{'base': '{1}', 'platbase': '{1}'}}" - ");paths['purelib'] = distutils.sysconfig.get_python_lib(plat_specific=0, " - "prefix='{1}');paths['platlib'] = distutils.sysconfig.get_python_lib(" - "plat_specific=1, prefix='{1}');print(json.dumps(paths))" + "import distutils.sysconfig, io, json, sys; paths = {{u'include': " + "u'{{0}}'.format(distutils.sysconfig.get_python_inc(plat_specific=0)), " + "u'platinclude': u'{{0}}'.format(distutils.sysconfig.get_python_inc(" + "plat_specific=1)) }}; value = u'{{0}}'.format(json.dumps(paths));" + "fh = io.open('{0}', 'w'); fh.write(value); fh.close()" ) - command = [self.python, "-c", py_command.format(install_scheme, prefix)] + command = [self.python, "-c", py_command.format(tmpfile_path)] c = vistir.misc.run( command, return_object=True, block=True, nospin=True, write_to_stdout=False ) - paths = json.loads(vistir.misc.to_text(c.out.strip())) - return paths + if c.returncode == 0: + paths = [] + with io.open(tmpfile_path, "r", encoding="utf-8") as fh: + paths = json.load(fh) + for key in ("include", "platinclude"): + if key in paths: + paths[key] = make_posix(paths[key]) + return paths + else: + vistir.misc.echo("Failed to load paths: {0}".format(c.err), fg="yellow") + vistir.misc.echo("Output: {0}".format(c.out), fg="yellow") + return None @cached_property def sys_prefix(self): diff --git a/pipenv/project.py b/pipenv/project.py index 1545aebd30..d0d668bfc8 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -27,7 +27,7 @@ from .environments import ( PIPENV_DEFAULT_PYTHON_VERSION, PIPENV_IGNORE_VIRTUALENVS, PIPENV_MAX_DEPTH, PIPENV_PIPFILE, PIPENV_PYTHON, PIPENV_TEST_INDEX, PIPENV_VENV_IN_PROJECT, - is_in_virtualenv + is_in_virtualenv, is_type_checking ) from .vendor.requirementslib.models.utils import get_default_pyproject_backend from .utils import ( @@ -38,6 +38,10 @@ safe_expandvars, get_pipenv_dist ) +if is_type_checking(): + from typing import Dict, Text, Union + TSource = Dict[Text, Union[Text, bool]] + def _normalized(p): if p is None: @@ -851,6 +855,10 @@ def sources(self): else: return self.pipfile_sources + @property + def index_urls(self): + return [src.get("url") for src in self.sources] + def find_source(self, source): """ Given a source, find it. diff --git a/pipenv/utils.py b/pipenv/utils.py index 64333e2697..2b0a8cdbf9 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -20,12 +20,8 @@ import tomlkit from click import echo as click_echo -six.add_move(six.MovedAttribute("Mapping", "collections", "collections.abc")) # noqa -six.add_move(six.MovedAttribute("Sequence", "collections", "collections.abc")) # noqa -six.add_move(six.MovedAttribute("Set", "collections", "collections.abc")) # noqa -from six.moves import Mapping, Sequence, Set from six.moves.urllib.parse import urlparse -from .vendor.vistir.compat import ResourceWarning, lru_cache +from .vendor.vistir.compat import ResourceWarning, lru_cache, Mapping, Sequence, Set from .vendor.vistir.misc import fs_str, run import crayons @@ -42,9 +38,10 @@ if environments.MYPY_RUNNING: from typing import Tuple, Dict, Any, List, Union, Optional, Text from .vendor.requirementslib.models.requirements import Requirement, Line + from .vendor.requirementslib.models.pipfile import Pipfile from .vendor.packaging.markers import Marker from .vendor.packaging.specifiers import Specifier - from .project import Project + from .project import Project, TSource logging.basicConfig(level=logging.ERROR) @@ -285,6 +282,88 @@ def prepare_pip_source_args(sources, pip_args=None): return pip_args +def get_project_index(index=None, trusted_hosts=None, project=None): + # type: (Optional[Union[str, TSource]], Optional[List[str]], Optional[Project]) -> TSource + from .project import SourceNotFound + if not project: + from .core import project + if trusted_hosts is None: + trusted_hosts = [] + if isinstance(index, Mapping): + return project.find_source(index.get("url")) + try: + source = project.find_source(index) + except SourceNotFound: + index_url = urllib3_util.parse_url(index) + src_name = project.src_name_from_url(index) + verify_ssl = index_url.host not in trusted_hosts + source = {"url": index, "verify_ssl": verify_ssl, "name": src_name} + return source + + +def get_source_list( + index=None, # type: Optional[Union[str, TSource]] + extra_indexes=None, # type: Optional[List[str]] + trusted_hosts=None, # type: Optional[List[str]] + pypi_mirror=None, # type: Optional[str] + project=None, # type: Optional[Project] +): + # type: (...) -> List[TSource] + sources = [] # type: List[TSource] + if not project: + from .core import project + if index: + sources.append(get_project_index(index)) + if extra_indexes: + if isinstance(extra_indexes, six.string_types): + extra_indexes = [extra_indexes,] + for source in extra_indexes: + extra_src = get_project_index(source) + if not sources or extra_src["url"] != sources[0]["url"]: + sources.append(extra_src) + else: + for source in project.pipfile_sources: + if not sources or source["url"] != sources[0]["url"]: + sources.append(source) + if not sources: + sources = project.pipfile_sources[:] + if pypi_mirror: + sources = [ + create_mirror_source(pypi_mirror) if is_pypi_url(source["url"]) else source + for source in sources + ] + return sources + + +def get_indexes_from_requirement(req, project=None, index=None, extra_indexes=None, trusted_hosts=None, pypi_mirror=None): + # type: (Requirement, Optional[Project], Optional[Text], Optional[List[Text]], Optional[List[Text]], Optional[Text]) -> Tuple[TSource, List[TSource], List[Text]] + if not project: + from .core import project + index_sources = [] # type: List[TSource] + if not trusted_hosts: + trusted_hosts = [] # type: List[Text] + if extra_indexes is None: + extra_indexes = [] + project_indexes = project.pipfile_sources[:] + indexes = [] + if req.index: + indexes.append(req.index) + if getattr(req, "extra_indexes", None): + if not isinstance(req.extra_indexes, list): + indexes.append(req.extra_indexes) + else: + indexes.extend(req.extra_indexes) + indexes.extend(project_indexes) + if len(indexes) > 1: + index, extra_indexes = indexes[0], indexes[1:] + index_sources = get_source_list(index=index, extra_indexes=extra_indexes, trusted_hosts=trusted_hosts, pypi_mirror=pypi_mirror, project=project) + if len(index_sources) > 1: + index_source, extra_index_sources = index_sources[0], index_sources[1:] + else: + index_source, extra_index_sources = index_sources[0], [] + return index_source, extra_index_sources + + @lru_cache() def get_pipenv_sitedir(): # type: () -> Optional[str] @@ -573,6 +652,29 @@ def create( markers_lookup=markers_lookup, skipped=skipped, clear=clear, pre=pre ) + @classmethod + def from_pipfile(cls, project=None, pipfile=None, dev=False, pre=False, clear=False): + # type: (Optional[Project], Optional[Pipfile], bool, bool, bool) -> "Resolver" + from pipenv.vendor.vistir.path import create_tracked_tempdir + if not project: + from pipenv.core import project + if not pipfile: + pipfile = project._pipfile + req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") + index_lookup, markers_lookup = {}, {} + deps = set() + if dev: + deps.update(set([req.as_line() for req in pipfile.dev_packages])) + deps.update(set([req.as_line() for req in pipfile.packages])) + constraints, skipped, index_lookup, markers_lookup = cls.get_metadata( + list(deps), index_lookup, markers_lookup, project, project.sources, + req_dir=req_dir, pre=pre, clear=clear + ) + return Resolver( + constraints, req_dir, project, project.sources, index_lookup=index_lookup, + markers_lookup=markers_lookup, skipped=skipped, clear=clear, pre=pre + ) + @property def pip_command(self): if self._pip_command is None: @@ -945,6 +1047,8 @@ def format_requirement_for_lockfile(req, markers_lookup, index_lookup, hashes=No if markers: entry.update({"markers": markers}) entry = translate_markers(entry) + if req.vcs or req.editable and entry.get("index"): + del entry["index"] return name, entry diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 40a49d48c0..dc76661999 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -1156,7 +1156,8 @@ def parse_link(self): def parse_markers(self): # type: () -> None if self.markers: - markers = PackagingRequirement("fakepkg; {0}".format(self.markers)).marker + marker_str = self.markers.replace('"', "'") + markers = PackagingRequirement("fakepkg; {0}".format(marker_str)).marker self.parsed_marker = markers @property @@ -1229,7 +1230,12 @@ def line_is_installable(self): def parse(self): # type: () -> None + self.line = self.line.strip() + if self.line.startswith('"'): + self.line = self.line.strip('"') self.line, self.markers = split_markers_from_line(self.parse_hashes().line) + if self.markers: + self.markers = self.markers.replace('"', "'") self.parse_extras() self.line = self.line.strip('"').strip("'").strip() if self.line.startswith("git+file:/") and not self.line.startswith( @@ -2614,16 +2620,28 @@ def get_line_instance(self): # type: () -> Line line_parts = [] if self.req: - line_parts.append(self.req.line_part) + if self.req.line_part.startswith("-e "): + line_parts.extend(self.req.line_part.split(" ", 1)) + else: + line_parts.append(self.req.line_part) if not self.is_vcs and not self.vcs and self.extras_as_pip: line_parts.append(self.extras_as_pip) if self._specifiers and not (self.is_file_or_url or self.is_vcs): line_parts.append(self._specifiers) if self.markers: - line_parts.append("; {0}".format(self.markers)) - if self.hashes_as_pip: + line_parts.append("; {0}".format(self.markers.replace('"', "'"))) + if self.hashes_as_pip and not (self.editable or self.vcs or self.is_vcs): line_parts.append(self.hashes_as_pip) - line = "".join(line_parts) + if self.editable: + if line_parts[0] == "-e": + line = "".join(line_parts[1:]) + else: + line = "".join(line_parts) + if self.markers: + line = '"{0}"'.format(line) + line = "-e {0}".format(line) + else: + line = "".join(line_parts) return Line(line) @property diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 1c1c320d93..4b4979549d 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -504,6 +504,10 @@ def get_pyproject(path): def split_markers_from_line(line): # type: (AnyStr) -> Tuple[AnyStr, Optional[AnyStr]] """Split markers from a dependency""" + quote_chars = ["'", '"'] + line_quote = next(iter(quote for quote in quote_chars if line.startswith(quote)), None) + if line_quote and line.endswith(line_quote): + line = line.strip(line_quote) if not any(line.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): marker_sep = ";" else: diff --git a/tests/fixtures/fake-package/.coveragerc b/tests/fixtures/fake-package/.coveragerc new file mode 100644 index 0000000000..1b3a057198 --- /dev/null +++ b/tests/fixtures/fake-package/.coveragerc @@ -0,0 +1,27 @@ +[run] +branch = True +parallel = True +source = src/fake_package/ + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +[html] +directory = htmlcov + +[xml] +output = coverage.xml diff --git a/tests/fixtures/fake-package/.editorconfig b/tests/fixtures/fake-package/.editorconfig new file mode 100644 index 0000000000..7470e9dbe1 --- /dev/null +++ b/tests/fixtures/fake-package/.editorconfig @@ -0,0 +1,27 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false + +[*.toml] +indent_size = 2 + +[*.{yaml,yml}] +indent_size = 2 + +# Makefiles always use tabs for indentation. +[Makefile] +indent_style = tab + +# Batch files use tabs for indentation, and old Notepad hates LF. +[*.bat] +indent_style = tab +end_of_line = crlf diff --git a/tests/fixtures/fake-package/.gitignore b/tests/fixtures/fake-package/.gitignore new file mode 100644 index 0000000000..ab621d861a --- /dev/null +++ b/tests/fixtures/fake-package/.gitignore @@ -0,0 +1,108 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.typeshed + +.vscode/ + +pip-wheel-metadata diff --git a/tests/fixtures/fake-package/.pre-commit-config.yaml b/tests/fixtures/fake-package/.pre-commit-config.yaml new file mode 100644 index 0000000000..7ecca7dc4e --- /dev/null +++ b/tests/fixtures/fake-package/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: + - repo: https://github.com/ambv/black + rev: stable + hooks: + - id: black + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.0.0 + hooks: + - id: flake8 + + - repo: https://github.com/asottile/seed-isort-config + rev: v1.7.0 + hooks: + - id: seed-isort-config + + - repo: https://github.com/pre-commit/mirrors-isort + rev: v4.3.9 + hooks: + - id: isort diff --git a/tests/fixtures/fake-package/.travis.yml b/tests/fixtures/fake-package/.travis.yml new file mode 100644 index 0000000000..84bf6c0397 --- /dev/null +++ b/tests/fixtures/fake-package/.travis.yml @@ -0,0 +1,38 @@ +language: python +sudo: false +cache: pip +dist: trusty + +matrix: + fast_finish: true + +install: + - "python -m pip install --upgrade pip pytest-timeout" + - "python -m pip install -e .[tests]" +script: + - "python -m pytest -v -n 8 tests/" + +jobs: + include: + - stage: test + - python: "3.7" + dist: xenial + sudo: required + - python: "3.6" + - python: "2.7" + - python: "3.5" + - python: "3.4" + - stage: packaging + python: "3.6" + install: + - "pip install --upgrade twine readme-renderer[md]" + script: + - "python setup.py sdist" + - "twine check dist/*" + - stage: coverage + python: "3.6" + install: + - "python -m pip install --upgrade pip pytest-timeout pytest-cov" + - "python -m pip install --upgrade -e .[tests]" + script: + - "python -m pytest -n auto --timeout 300 --cov=fake_package --cov-report=term-missing --cov-report=xml --cov-report=html tests" diff --git a/tests/fixtures/fake-package/LICENSE b/tests/fixtures/fake-package/LICENSE new file mode 100644 index 0000000000..0beb71e0a4 --- /dev/null +++ b/tests/fixtures/fake-package/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2019, Dan Ryan + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/tests/fixtures/fake-package/MANIFEST.in b/tests/fixtures/fake-package/MANIFEST.in new file mode 100644 index 0000000000..7ab57a2180 --- /dev/null +++ b/tests/fixtures/fake-package/MANIFEST.in @@ -0,0 +1,19 @@ +include LICENSE* README* +include CHANGELOG.rst +include pyproject.toml + +exclude .editorconfig +exclude .coveragerc +exclude .travis.yml +exclude tox.ini +exclude appveyor.yml +exclude Pipfile* + +recursive-include docs Makefile *.rst *.py *.bat +recursive-exclude docs requirements*.txt + +prune .github +prune docs/build +prune news +prune tasks +prune tests diff --git a/tests/fixtures/fake-package/Pipfile b/tests/fixtures/fake-package/Pipfile new file mode 100644 index 0000000000..284b77988c --- /dev/null +++ b/tests/fixtures/fake-package/Pipfile @@ -0,0 +1,14 @@ +[packages] +fake_package = { path = '.', editable = true, extras = ["dev", "tests"] } + +[dev-packages] +towncrier = '*' +sphinx = '*' +sphinx-rtd-theme = '*' + +[scripts] +release = 'inv release' +tests = "pytest -v tests" +draft = "towncrier --draft" +changelog = "towncrier" +build = "setup.py sdist bdist_wheel" diff --git a/tests/fixtures/fake-package/README.rst b/tests/fixtures/fake-package/README.rst new file mode 100644 index 0000000000..4256cd1f8e --- /dev/null +++ b/tests/fixtures/fake-package/README.rst @@ -0,0 +1,3 @@ +=============================================================================== +fake_package: A fake python package. +=============================================================================== diff --git a/tests/fixtures/fake-package/appveyor.yml b/tests/fixtures/fake-package/appveyor.yml new file mode 100644 index 0000000000..758f4cfcfd --- /dev/null +++ b/tests/fixtures/fake-package/appveyor.yml @@ -0,0 +1,61 @@ +build: off +version: 1.0.{build} +skip_branch_with_pr: true + +init: +- ps: >- + + git config --global core.sharedRepository true + + git config --global core.longpaths true + + git config --global core.autocrlf input + + if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod ` + https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` + Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { ` + Write-Host "There are newer queued builds for this pull request, skipping build." + Exit-AppveyorBuild + } + + If (($env:SKIP_NOTAG -eq "true") -and ($env:APPVEYOR_REPO_TAG -ne "true")) { + Write-Host "Skipping build, not at a tag." + Exit-AppveyorBuild + } + +environment: + GIT_ASK_YESNO: 'false' + APPVEYOR_SAVE_CACHE_ON_ERROR: 'true' + APPVEYOR_SKIP_FINALIZE_ON_EXIT: 'true' + SHELL: 'windows' + PYTHON_ARCH: '64' + PYTHONIOENCODING: 'utf-8' + + matrix: + # Unit and integration tests. + - PYTHON: "C:\\Python27" + RUN_INTEGRATION_TESTS: "True" + - PYTHON: "C:\\Python37-x64" + RUN_INTEGRATION_TESTS: "True" + # Unit tests only. + - PYTHON: "C:\\Python36-x64" + - PYTHON: "C:\\Python34-x64" + - PYTHON: "C:\\Python35-x64" + +cache: +- '%LocalAppData%\pip\cache' +- '%LocalAppData%\pipenv\cache' + +install: + - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" + - "python --version" + - "python -m pip install --upgrade pip pytest-timeout" + - "python -m pip install -e .[tests]" + + +test_script: + # Shorten paths, workaround https://bugs.python.org/issue18199 + - "subst T: %TEMP%" + - "set TEMP=T:\\" + - "set TMP=T:\\" + - "python -m pytest -n auto -v tests" diff --git a/tests/fixtures/fake-package/docs/conf.py b/tests/fixtures/fake-package/docs/conf.py new file mode 100644 index 0000000000..3aded982fe --- /dev/null +++ b/tests/fixtures/fake-package/docs/conf.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +PACKAGE_DIR = os.path.join(ROOT, "src/fake_package") +sys.path.insert(0, PACKAGE_DIR) + + +# -- Project information ----------------------------------------------------- + +project = 'fake_package' +copyright = '2019, Dan Ryan ' +author = 'Dan Ryan ' + +# The short X.Y version +version = '0.0' +# The full version, including alpha/beta/rc tags +release = '0.0.0.dev0' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.viewcode', + 'sphinx.ext.todo', + 'sphinx.ext.intersphinx', + 'sphinx.ext.autosummary' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', '_man', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' +autosummary_generate = True + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'fake_packagedoc' +extlinks = { + 'issue': ('https://github.com/sarugaku/fake_package/issues/%s', '#'), + 'pull': ('https://github.com/sarugaku/fake_package/pull/%s', 'PR #'), +} +html_theme_options = { + 'display_version': True, + 'prev_next_buttons_location': 'bottom', + 'style_external_links': True, + 'vcs_pageview_mode': '', + # Toc options + 'collapse_navigation': True, + 'sticky_navigation': True, + 'navigation_depth': 4, + 'includehidden': True, + 'titles_only': False +} + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'fake_package.tex', 'fake_package Documentation', + 'Dan Ryan \\textless{}dan@danryan.co\\textgreater{}', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'fake_package', 'fake_package Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'fake_package', 'fake_package Documentation', + author, 'fake_package', 'A fake python package.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project +epub_author = author +epub_publisher = author +epub_copyright = copyright + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True +intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} diff --git a/tests/fixtures/fake-package/docs/requirements.txt b/tests/fixtures/fake-package/docs/requirements.txt new file mode 100644 index 0000000000..82133027c9 --- /dev/null +++ b/tests/fixtures/fake-package/docs/requirements.txt @@ -0,0 +1,2 @@ +sphinx +sphinx_rtd_theme diff --git a/tests/fixtures/fake-package/news/.gitignore b/tests/fixtures/fake-package/news/.gitignore new file mode 100644 index 0000000000..f935021a8f --- /dev/null +++ b/tests/fixtures/fake-package/news/.gitignore @@ -0,0 +1 @@ +!.gitignore diff --git a/tests/fixtures/fake-package/pyproject.toml b/tests/fixtures/fake-package/pyproject.toml new file mode 100644 index 0000000000..e157956b39 --- /dev/null +++ b/tests/fixtures/fake-package/pyproject.toml @@ -0,0 +1,50 @@ +[build-system] +requires = ['setuptools>=40.8.0', 'wheel>=0.33.0'] + +[tool.black] +line-length = 90 +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.pyre_configuration + | \.venv + | _build + | buck-out + | build + | dist +) +''' + +[tool.towncrier] +package = 'fake-package' +package_dir = 'src' +filename = 'CHANGELOG.rst' +directory = 'news/' +title_format = '{version} ({project_date})' +issue_format = '`#{issue} `_' +template = 'tasks/CHANGELOG.rst.jinja2' + + [[tool.towncrier.type]] + directory = 'feature' + name = 'Features' + showcontent = true + + [[tool.towncrier.type]] + directory = 'bugfix' + name = 'Bug Fixes' + showcontent = true + + [[tool.towncrier.type]] + directory = 'trivial' + name = 'Trivial Changes' + showcontent = false + + [[tool.towncrier.type]] + directory = 'removal' + name = 'Removals and Deprecations' + showcontent = true diff --git a/tests/fixtures/fake-package/setup.cfg b/tests/fixtures/fake-package/setup.cfg new file mode 100644 index 0000000000..c357cea97a --- /dev/null +++ b/tests/fixtures/fake-package/setup.cfg @@ -0,0 +1,120 @@ +[metadata] +name = fake_package +description = A fake python package. +url = https://github.com/sarugaku/fake_package +author = Dan Ryan +author_email = dan@danryan.co +long_description = file: README.rst +license = ISC License +keywords = fake package test +classifier = + Development Status :: 1 - Planning + License :: OSI Approved :: ISC License (ISCL) + Operating System :: OS Independent + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.6 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.4 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Topic :: Software Development :: Libraries :: Python Modules + +[options.extras_require] +tests = + pytest + pytest-xdist + pytest-cov + pytest-timeout + readme-renderer[md] + twine +dev = + black;python_version>="3.6" + flake8 + flake8-bugbear;python_version>="3.5" + invoke + isort + mypy;python_version>="3.5" + parver + pre-commit + rope + wheel + +[options] +zip_safe = true +python_requires = >=2.6,!=3.0,!=3.1,!=3.2,!=3.3 +setup_requires = setuptools>=40.8.0 +install_requires = + invoke + attrs + +[bdist_wheel] +universal = 1 + +[tool:pytest] +strict = true +plugins = cov flake8 +addopts = -ra +testpaths = tests/ +norecursedirs = .* build dist news tasks docs +flake8-ignore = + docs/source/* ALL + tests/*.py ALL + setup.py ALL +filterwarnings = + ignore::DeprecationWarning + ignore::PendingDeprecationWarning + +[isort] +atomic = true +not_skip = __init__.py +line_length = 90 +indent = ' ' +multi_line_output = 3 +known_third_party = invoke,parver,pytest,setuptools,towncrier +known_first_party = + fake_package + tests +combine_as_imports=True +include_trailing_comma = True +force_grid_wrap=0 + +[flake8] +max-line-length = 90 +select = C,E,F,W,B,B950 +ignore = + # The default ignore list: + D203,F401,E123,E203,W503,E501,E402 + #E121,E123,E126,E226,E24,E704, + # Our additions: + # E123: closing bracket does not match indentation of opening bracket’s line + # E203: whitespace before ‘:’ + # E129: visually indented line with same indent as next logical line + # E222: multiple spaces after operator + # E231: missing whitespace after ',' + # D203: 1 blank line required before class docstring + # E402: module level import not at top of file + # E501: line too long (using B950 from flake8-bugbear) + # F401: Module imported but unused + # W503: line break before binary operator (not a pep8 issue, should be ignored) +exclude = + .tox, + .git, + __pycache__, + docs/source/*, + build, + dist, + tests/*, + *.pyc, + *.egg-info, + .cache, + .eggs, + setup.py, +max-complexity=13 + +[mypy] +ignore_missing_imports=true +follow_imports=skip +html_report=mypyhtml +python_version=2.7 diff --git a/tests/fixtures/fake-package/setup.py b/tests/fixtures/fake-package/setup.py new file mode 100644 index 0000000000..1d2c88f859 --- /dev/null +++ b/tests/fixtures/fake-package/setup.py @@ -0,0 +1,35 @@ +import ast +import os + +from setuptools import find_packages, setup + + +ROOT = os.path.dirname(__file__) + +PACKAGE_NAME = 'fake_package' + +VERSION = None + +with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: + for line in f: + if line.startswith('__version__ = '): + VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + break +if VERSION is None: + raise EnvironmentError('failed to read version') + + +# Put everything in setup.cfg, except those that don't actually work? +setup( + # These really don't work. + package_dir={'': 'src'}, + packages=find_packages('src'), + + # I don't know how to specify an empty key in setup.cfg. + package_data={ + '': ['LICENSE*', 'README*'], + }, + + # I need this to be dynamic. + version=VERSION, +) diff --git a/tests/fixtures/fake-package/src/fake_package/__init__.py b/tests/fixtures/fake-package/src/fake_package/__init__.py new file mode 100644 index 0000000000..b8023d8bc0 --- /dev/null +++ b/tests/fixtures/fake-package/src/fake_package/__init__.py @@ -0,0 +1 @@ +__version__ = '0.0.1' diff --git a/tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 b/tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 new file mode 100644 index 0000000000..8aff205734 --- /dev/null +++ b/tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 @@ -0,0 +1,40 @@ +{% for section in sections %} +{% set underline = "-" %} +{% if section %} +{{section}} +{{ underline * section|length }}{% set underline = "~" %} + +{% endif %} +{% if sections[section] %} +{% for category, val in definitions.items() if category in sections[section] and category != 'trivial' %} + +{{ definitions[category]['name'] }} +{{ underline * definitions[category]['name']|length }} + +{% if definitions[category]['showcontent'] %} +{% for text, values in sections[section][category]|dictsort(by='value') %} +- {{ text }}{% if category != 'process' %} + {{ values|sort|join(',\n ') }} + {% endif %} + +{% endfor %} +{% else %} +- {{ sections[section][category]['']|sort|join(', ') }} + + +{% endif %} +{% if sections[section][category]|length == 0 %} + +No significant changes. + + +{% else %} +{% endif %} +{% endfor %} +{% else %} + +No significant changes. + + +{% endif %} +{% endfor %} diff --git a/tests/fixtures/fake-package/tasks/__init__.py b/tests/fixtures/fake-package/tasks/__init__.py new file mode 100644 index 0000000000..a8cedab4ec --- /dev/null +++ b/tests/fixtures/fake-package/tasks/__init__.py @@ -0,0 +1,175 @@ +import pathlib +import shutil +import subprocess + +import invoke +import parver + +from towncrier._builder import ( + find_fragments, render_fragments, split_fragments, +) +from towncrier._settings import load_config + + +ROOT = pathlib.Path(__file__).resolve().parent.parent + +PACKAGE_NAME = 'fake_package' + +INIT_PY = ROOT.joinpath('src', PACKAGE_NAME, '__init__.py') + + +@invoke.task() +def typecheck(ctx): + src_dir = ROOT / "src" / PACKAGE_NAME + src_dir = src_dir.as_posix() + config_file = ROOT / "setup.cfg" + env = {"MYPYPATH": src_dir} + ctx.run(f"mypy {src_dir} --config-file={config_file}", env=env) + + +@invoke.task() +def clean(ctx): + """Clean previously built package artifacts. + """ + ctx.run(f'python setup.py clean') + dist = ROOT.joinpath('dist') + print(f'[clean] Removing {dist}') + if dist.exists(): + shutil.rmtree(str(dist)) + + +def _read_version(): + out = subprocess.check_output(['git', 'tag'], encoding='ascii') + try: + version = max(parver.Version.parse(v).normalize() for v in ( + line.strip() for line in out.split('\n') + ) if v) + except ValueError: + version = parver.Version.parse('0.0.0') + return version + + +def _write_version(v): + lines = [] + with INIT_PY.open() as f: + for line in f: + if line.startswith("__version__ = "): + line = f"__version__ = {repr(str(v))}\n".replace("'", '"') + lines.append(line) + with INIT_PY.open("w", newline="\n") as f: + f.write("".join(lines)) + + +def _render_log(): + """Totally tap into Towncrier internals to get an in-memory result. + """ + config = load_config(ROOT) + definitions = config["types"] + fragments, fragment_filenames = find_fragments( + pathlib.Path(config["directory"]).absolute(), + config["sections"], + None, + definitions, + ) + rendered = render_fragments( + pathlib.Path(config["template"]).read_text(encoding="utf-8"), + config["issue_format"], + split_fragments(fragments, definitions), + definitions, + config["underlines"][1:], + False, # Don't add newlines to wrapped text. + ) + return rendered + + +REL_TYPES = ("major", "minor", "patch", "post") + + +def _bump_release(version, type_): + if type_ not in REL_TYPES: + raise ValueError(f"{type_} not in {REL_TYPES}") + index = REL_TYPES.index(type_) + next_version = version.base_version().bump_release(index=index) + print(f"[bump] {version} -> {next_version}") + return next_version + + +def _prebump(version, prebump): + next_version = version.bump_release(index=prebump).bump_dev() + print(f"[bump] {version} -> {next_version}") + return next_version + + +PREBUMP = 'patch' + + +@invoke.task(pre=[clean]) +def release(ctx, type_, repo, prebump=PREBUMP): + """Make a new release. + """ + if prebump not in REL_TYPES: + raise ValueError(f'{type_} not in {REL_TYPES}') + prebump = REL_TYPES.index(prebump) + + version = _read_version() + version = _bump_release(version, type_) + _write_version(version) + + # Needs to happen before Towncrier deletes fragment files. + tag_content = _render_log() + + ctx.run('towncrier') + + ctx.run(f'git commit -am "Release {version}"') + + tag_content = tag_content.replace('"', '\\"') + ctx.run(f'git tag -a {version} -m "Version {version}\n\n{tag_content}"') + + ctx.run(f'python setup.py sdist bdist_wheel') + + dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*' + artifacts = list(ROOT.joinpath('dist').glob(dist_pattern)) + filename_display = '\n'.join(f' {a}' for a in artifacts) + print(f'[release] Will upload:\n{filename_display}') + try: + input('[release] Release ready. ENTER to upload, CTRL-C to abort: ') + except KeyboardInterrupt: + print('\nAborted!') + return + + arg_display = ' '.join(f'"{n}"' for n in artifacts) + ctx.run(f'twine upload --repository="{repo}" {arg_display}') + + version = _prebump(version, prebump) + _write_version(version) + + ctx.run(f'git commit -am "Prebump to {version}"') + + +@invoke.task +def build_docs(ctx): + _current_version = _read_version() + minor = [str(i) for i in _current_version.release[:2]] + docs_folder = (ROOT / 'docs').as_posix() + if not docs_folder.endswith('/'): + docs_folder = '{0}/'.format(docs_folder) + args = ["--ext-autodoc", "--ext-viewcode", "-o", docs_folder] + args.extend(["-A", "'Dan Ryan '"]) + args.extend(["-R", str(_current_version)]) + args.extend(["-V", ".".join(minor)]) + args.extend(["-e", "-M", "-F", f"src/{PACKAGE_NAME}"]) + print("Building docs...") + ctx.run("sphinx-apidoc {0}".format(" ".join(args))) + + +@invoke.task +def clean_mdchangelog(ctx): + changelog = ROOT / "CHANGELOG.md" + content = changelog.read_text() + content = re.sub( + r"([^\n]+)\n?\s+\[[\\]+(#\d+)\]\(https://github\.com/sarugaku/[\w\-]+/issues/\d+\)", + r"\1 \2", + content, + flags=re.MULTILINE, + ) + changelog.write_text(content) diff --git a/tests/fixtures/fake-package/tox.ini b/tests/fixtures/fake-package/tox.ini new file mode 100644 index 0000000000..2bc8e1d286 --- /dev/null +++ b/tests/fixtures/fake-package/tox.ini @@ -0,0 +1,37 @@ +[tox] +envlist = + docs, packaging, py27, py35, py36, py37, coverage-report + +[testenv] +passenv = CI GIT_SSL_CAINFO +setenv = + LC_ALL = en_US.UTF-8 +deps = + coverage + -e .[tests] +commands = coverage run --parallel -m pytest --timeout 300 [] +install_command = python -m pip install {opts} {packages} +usedevelop = True + +[testenv:coverage-report] +deps = coverage +skip_install = true +commands = + coverage combine + coverage report + +[testenv:docs] +deps = + -r{toxinidir}/docs/requirements.txt + -e .[tests] +commands = + sphinx-build -d {envtmpdir}/doctrees -b html docs docs/build/html + sphinx-build -d {envtmpdir}/doctrees -b man docs docs/build/man + +[testenv:packaging] +deps = + check-manifest + readme_renderer +commands = + check-manifest + python setup.py check -m -r -s diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index a682d0dda2..6d936a7faf 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -223,17 +223,17 @@ def test_check_unused(PipenvInstance): with PipenvInstance(chdir=True) as p: with open('__init__.py', 'w') as f: contents = """ -import fake_package +import click import records import flask """.strip() f.write(contents) - p.pipenv('install requests fake_package flask') + p.pipenv('install requests click flask') - assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'fake_package', 'flask']) + assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'click', 'flask']), p.pipfile["packages"] c = p.pipenv('check --unused .') - assert 'fake_package' not in c.out + assert 'click' not in c.out assert 'flask' not in c.out diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 4fb936b9dd..6d9f1368de 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -315,13 +315,13 @@ def test_skip_requirements_when_pipfile(PipenvInstance): contents = """ [packages] six = "*" -"fake_package" = "<0.12" +fake_package = "<0.12" """.strip() f.write(contents) c = p.pipenv("install") assert c.ok assert "fake_package" in p.pipfile["packages"] - assert "fake_package" in p.lockfile["default"] + assert "fake-package" in p.lockfile["default"] assert "six" in p.pipfile["packages"] assert "six" in p.lockfile["default"] assert "requests" not in p.pipfile["packages"] diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 379731a417..00f9c7897b 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -27,7 +27,7 @@ def test_package_environment_markers(PipenvInstance): c = p.pipenv('install') assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['fake_package'], p.lockfile["default"]["fake_package"] + assert 'markers' in p.lockfile['default']['fake-package'], p.lockfile["default"] c = p.pipenv('run python -c "import fake_package;"') assert c.return_code == 1 @@ -68,7 +68,7 @@ def test_specific_package_environment_markers(PipenvInstance): with open(p.pipfile_path, 'w') as f: contents = """ [packages] -fake_package = {version = "*", os_name = "== 'splashwear'"} +fake-package = {version = "*", os_name = "== 'splashwear'"} """.strip() f.write(contents) @@ -76,7 +76,7 @@ def test_specific_package_environment_markers(PipenvInstance): assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['fake_package'] + assert 'markers' in p.lockfile['default']['fake-package'] c = p.pipenv('run python -c "import fake_package;"') assert c.return_code == 1 diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 58df07bd02..7a69e59581 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -166,8 +166,8 @@ def test_install_editable_git_tag(PipenvInstance_NoPyPI): # ! This is failing @pytest.mark.index @pytest.mark.install @pytest.mark.needs_internet -def test_install_named_index_alias(PipenvInstance): - with PipenvInstance() as p: +def test_install_named_index_alias(PipenvInstance_NoPyPI): + with PipenvInstance_NoPyPI() as p: with open(p.pipfile_path, "w") as f: contents = """ [[source]] diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 44f68bfe05..a69d580e64 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -289,8 +289,8 @@ def test_complex_lock_deep_extras(PipenvInstance): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.skip_lock @pytest.mark.needs_internet -def test_private_index_skip_lock(PipenvInstance): - with PipenvInstance() as p: +def test_private_index_skip_lock(PipenvInstance_NoPyPI): + with PipenvInstance_NoPyPI() as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] @@ -317,9 +317,9 @@ def test_private_index_skip_lock(PipenvInstance): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.requirements @pytest.mark.needs_internet -def test_private_index_lock_requirements(PipenvInstance): +def test_private_index_lock_requirements(PipenvInstance_NoPyPI): # Don't use the local fake pypi - with PipenvInstance() as p: + with PipenvInstance_NoPyPI() as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] @@ -350,9 +350,9 @@ def test_private_index_lock_requirements(PipenvInstance): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.requirements @pytest.mark.needs_internet -def test_private_index_mirror_lock_requirements(PipenvInstance): +def test_private_index_mirror_lock_requirements(PipenvInstance_NoPyPI): # Don't use the local fake pypi - with temp_environ(), PipenvInstance(chdir=True) as p: + with temp_environ(), PipenvInstance_NoPyPI(chdir=True) as p: # Using pypi.python.org as pipenv-test-public-package is not # included in the local pypi mirror mirror_url = os.environ.pop('PIPENV_TEST_INDEX', "https://pypi.kennethreitz.org/simple") @@ -371,7 +371,7 @@ def test_private_index_mirror_lock_requirements(PipenvInstance): [packages] six = {version = "*", index = "testpypi"} -requests = "*" +fake-package = "*" """.strip() f.write(contents) c = p.pipenv('install --pypi-mirror {0}'.format(mirror_url)) diff --git a/tests/pypi b/tests/pypi index 2c4b6de4d8..0801b3aecf 160000 --- a/tests/pypi +++ b/tests/pypi @@ -1 +1 @@ -Subproject commit 2c4b6de4d88d7d5732bdf0c9345ad10f8336abd3 +Subproject commit 0801b3aecfbe8385ea879860fb36477a13a4278b diff --git a/tests/pytest-pypi/pytest_pypi/app.py b/tests/pytest-pypi/pytest_pypi/app.py index fa494ea8cd..607d219933 100644 --- a/tests/pytest-pypi/pytest_pypi/app.py +++ b/tests/pytest-pypi/pytest_pypi/app.py @@ -3,17 +3,31 @@ import io import sys -import requests -from flask import Flask, redirect, abort, render_template, send_file, jsonify +if sys.version_info[:2] >= (3, 0): + from xmlrpc.client import ServerProxy +else: + from xmlrpclib import ServerProxy + from zipfile import is_zipfile from tarfile import is_tarfile +import requests +from flask import Flask, redirect, abort, render_template, send_file, jsonify + + app = Flask(__name__) session = requests.Session() packages = {} ARTIFACTS = {} + +def get_pypi_package_names(): + client = ServerProxy("https://pypi.org/pypi") + pypi_packages = set(client.list_packages()) + return pypi_packages + + class Package(object): """Package represents a collection of releases from one or more directories""" @@ -107,16 +121,22 @@ def prepare_packages(path): if not (os.path.exists(path) and os.path.isdir(path)): raise ValueError("{} is not a directory!".format(path)) for root, dirs, files in os.walk(path): + if all([setup_file in list(files) for setup_file in ("setup.py", "setup.cfg")]): + continue for file in files: if not file.startswith('.') and not file.endswith('.json'): package_name = os.path.basename(root) if package_name and package_name == "fixtures": prepare_fixtures(root) continue + package_name = package_name.replace("_", "-") if package_name not in packages: packages[package_name] = Package(package_name) packages[package_name].add_release(os.path.join(root, file)) + remaining = get_pypi_package_names() - set(list(packages.keys())) + for pypi_pkg in remaining: + packages[pypi_pkg] = Package(pypi_pkg) @app.route('/') @@ -136,10 +156,18 @@ def artifacts(): @app.route('/simple//') def simple_package(package): - if package in packages: + if package in packages and packages[package].releases: return render_template('package.html', package=packages[package]) else: - abort(404) + try: + r = requests.get("https://pypi.org/simple/{0}".format(package)) + r.raise_for_status() + except Exception: + abort(404) + else: + return render_template( + 'package_pypi.html', package_contents=r.text + ) @app.route('/artifacts//') diff --git a/tests/pytest-pypi/pytest_pypi/templates/package_pypi.html b/tests/pytest-pypi/pytest_pypi/templates/package_pypi.html new file mode 100644 index 0000000000..217d8aa0e8 --- /dev/null +++ b/tests/pytest-pypi/pytest_pypi/templates/package_pypi.html @@ -0,0 +1,4 @@ + +{% autoescape false %} + {{ package_contents }} +{% endautoescape %} From 9c3ee2a72b11dab3f151b686896362e307d91c8b Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 5 Jul 2019 02:55:47 -0400 Subject: [PATCH 08/18] Get rid of devpi Signed-off-by: Dan Ryan --- Pipfile.lock | 324 +----------------------- pipenv/core.py | 221 +++++++--------- pytest.ini | 2 +- tests/integration/conftest.py | 117 ++------- tests/integration/test_install_basic.py | 10 +- tests/integration/test_lock.py | 10 +- 6 files changed, 117 insertions(+), 567 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index f70c7ca198..b2586e8db5 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "44189c3f4dc6499d20a5c0b5d1f875b0dc242943175e3e38a6e9719fc5860f7d" + "sha256": "f4d89c0aab5c4e865f8c96ba24613fb1e66bae803a3ceaeadb6abf0061898091" }, "pipfile-spec": 6, "requires": {}, @@ -37,29 +37,6 @@ ], "version": "==1.4.3" }, - "argon2-cffi": { - "hashes": [ - "sha256:1029fef2f7808a89e3baa306f5ace36e768a2d847ee7b056399adcd7707f6256", - "sha256:206857d870c6ca3c92514ca70a3c371be47383f7ae6a448f5a16aa17baa550ba", - "sha256:3558a7e22b886efad0c99b23b9be24880213b4e2d1630095459978cfcae570e2", - "sha256:457fd6de741859aa91c750ffad97f12675c4356047e43392c5fb21f5d9f48b24", - "sha256:4a1daa9f6960cdbdb865efcabac4158693459f52e7582c9f8a7c92dc61cdc8e1", - "sha256:4bfb603184ea678563c0f1f1872367e81a3d2b70646a627d38ccede68d7b9194", - "sha256:5d7493ed10e384b84b6dac862fe96c443297a25b991a8364d94a67b6cd1e9569", - "sha256:5fb080047517add8d27baeb38a314814b5ab9c72630606788909b3f60a8f054a", - "sha256:7453b16496b5629005a43c5f5707ef8a31fcfa5bb0ed34b5ba7b86a3cc9d02f2", - "sha256:81548a27b919861040cb928a350733f4f9455dd67c7d1ba92eb5960a1d7f8b26", - "sha256:84fd768d523f87097d572cdfb98e868cdbdc8e80e3d444787fd32e7f6ae25b02", - "sha256:8b4cf6c0298f33b92fcd50f19899175b7421690fc8bc6ac68368320c158cbf51", - "sha256:af6a4799411eee3f7133fead973727f5fefacd18ea23f51039e70cae51ceb109", - "sha256:df7d60a4cf58dc08319fedc0506b42ec0fa5221c6e1f9e2e89fcddff92507390", - "sha256:f9072e9f70185a57e36228d34aad4bb644e6a8b4fd6a45f856c666f38f6de96c", - "sha256:fbae1d08b52f9a791500c650ab51ba00e374eaeccb5dbaa41b99dab4fd4115e8", - "sha256:fe91e3bd95aeae70366693dcc970db03a71619d19df6fbaabf662c3b3c54cdf8", - "sha256:fec86ee6f913154846171f66ee30c893c0cde3d434911f8b31c1f84a9aea410e" - ], - "version": "==19.1.0" - }, "arpeggio": { "hashes": [ "sha256:a5258b84f76661d558492fa87e42db634df143685a0e51802d59cae7daad8732", @@ -135,39 +112,6 @@ ], "version": "==2019.6.16" }, - "cffi": { - "hashes": [ - "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", - "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d", - "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90", - "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b", - "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63", - "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45", - "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25", - "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3", - "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b", - "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647", - "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016", - "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4", - "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb", - "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753", - "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7", - "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9", - "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f", - "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8", - "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f", - "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc", - "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42", - "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3", - "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909", - "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45", - "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d", - "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512", - "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff", - "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201" - ], - "version": "==1.12.3" - }, "chardet": { "hashes": [ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", @@ -221,27 +165,6 @@ ], "version": "==4.4.0" }, - "devpi-client": { - "hashes": [ - "sha256:05398402d4335a8173d9118df409d16e085a6dacf54300851212d2f6370e1497", - "sha256:e9e19e87c61a75bc93137553db69554c69efb82cfc7ee83e0305cf6abfc91aa0" - ], - "version": "==4.4.0" - }, - "devpi-common": { - "hashes": [ - "sha256:2c7a6471c0f5b07ac9257adec3b3c3a89193ee672fdeb0a6f29487dc9d675e0c", - "sha256:c743abd38447258e27cdb733fa905c275e5dd3eeae25bab9ff59182a1083ed91" - ], - "version": "==3.3.2" - }, - "devpi-server": { - "hashes": [ - "sha256:96ab6390ea8aa2e80b4acdcf8e37f105af6ce7d0cb4efa18ba82de8e8b6c91ad", - "sha256:e92de95c869927ba628d25b024c460b63740d52ebba11dae79146d8cc3b6a033" - ], - "version": "==4.9.0" - }, "docutils": { "hashes": [ "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", @@ -276,13 +199,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.6.0" }, - "filelock": { - "hashes": [ - "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", - "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" - ], - "version": "==3.0.12" - }, "flake8": { "hashes": [ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661", @@ -337,14 +253,6 @@ "markers": "python_version < '3.2'", "version": "==3.2.0" }, - "hupper": { - "hashes": [ - "sha256:5869ec2a46ba8ad481b0a27ca68f3e01dc7d3424925b7c872d9fcdff44b43442", - "sha256:8532d116fef1f89add74dbd8d5e6541cb3278b04f4fe9780a1356cb6adba1141" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.8.1" - }, "idna": { "hashes": [ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", @@ -506,31 +414,6 @@ "ref": "a2ba0b30c86339cae5ef3a03046fc9c583452c40", "version": "==0.3.1.dev0" }, - "passlib": { - "extras": [ - "argon2" - ], - "hashes": [ - "sha256:3d948f64138c25633613f303bcc471126eae67c04d5e3f6b7b8ce6242f8653e0", - "sha256:43526aea08fa32c6b6dbbbe9963c4c767285b78147b7437597f992812f69d280" - ], - "version": "==1.7.1" - }, - "pastedeploy": { - "hashes": [ - "sha256:d423fb9d51fdcf853aa4ff43ac7ec469b643ea19590f67488122d6d0d772350a", - "sha256:fe53697ec2754703096b75d0ba29112b0590b4ce46726fe4f9408fd006e4eefc" - ], - "version": "==2.0.1" - }, - "path.py": { - "hashes": [ - "sha256:de7cd643affbc23e56533a6e8d551ecdee4983501a08c24e4e71565202d8cdaa", - "sha256:ea40833e76c50485fffd3e094d52e9e8701ba8c62a3b8f67c655c28a9538aac1" - ], - "index": "pypi", - "version": "==11.5.2" - }, "pathlib2": { "hashes": [ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", @@ -561,22 +444,6 @@ ], "version": "==1.5.0.1" }, - "plaster": { - "hashes": [ - "sha256:215c921a438b5349931fd7df9a5a11a3572947f20f4bc6dd622ac08f1c3ba249", - "sha256:8351c7c7efdf33084c1de88dd0f422cbe7342534537b553c49b857b12d98c8c3" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.0" - }, - "plaster-pastedeploy": { - "hashes": [ - "sha256:391d93a4e1ff81fc3bae27508ebb765b61f1724ae6169f83577f06b6357be7fd", - "sha256:7c8aa37c917b615c70bf942b24dc1e0455c49f62f1a2214b1a0dd98871644bbb" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.7" - }, "pluggy": { "hashes": [ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", @@ -585,21 +452,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.12.0" }, - "psutil": { - "hashes": [ - "sha256:028a1ec3c6197eadd11e7b46e8cc2f0720dc18ac6d7aabdb8e8c0d6c9704f000", - "sha256:503e4b20fa9d3342bcf58191bbc20a4a5ef79ca7df8972e6197cc14c5513e73d", - "sha256:863a85c1c0a5103a12c05a35e59d336e1d665747e531256e061213e2e90f63f3", - "sha256:954f782608bfef9ae9f78e660e065bd8ffcfaea780f9f2c8a133bb7cb9e826d7", - "sha256:b6e08f965a305cd84c2d07409bc16fbef4417d67b70c53b299116c5b895e3f45", - "sha256:bc96d437dfbb8865fc8828cf363450001cb04056bbdcdd6fc152c436c8a74c61", - "sha256:cf49178021075d47c61c03c0229ac0c60d5e2830f8cab19e2d88e579b18cdb76", - "sha256:d5350cb66690915d60f8b233180f1e49938756fb2d501c93c44f8fb5b970cc63", - "sha256:eba238cf1989dfff7d483c029acb0ac4fcbfc15de295d682901f0e2497e6781a" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==5.6.3" - }, "py": { "hashes": [ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", @@ -616,12 +468,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.5.0" }, - "pycparser": { - "hashes": [ - "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" - ], - "version": "==2.19" - }, "pyflakes": { "hashes": [ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", @@ -646,14 +492,6 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.0" }, - "pyramid": { - "hashes": [ - "sha256:51bf64647345237c00d2fe558935e0e4938c156e29f17e203457fd8e1d757dc7", - "sha256:d80ccb8cfa550139b50801591d4ca8a5575334adb493c402fce2312f55d07d66" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.10.4" - }, "pytest": { "hashes": [ "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45", @@ -662,24 +500,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.6.3" }, - "pytest-devpi-server": { - "hashes": [ - "sha256:1e1a4f4783f6833e49ae72956e68694b26de85e094d39c9cc6e5900774ecf2b9", - "sha256:a8f37e448d0a8c11c10f51b48a2bae4007431786223c39c3e22343e41f3aa8ee", - "sha256:c54e2ad77f9e2031d0284038b2bd1044debfcd1965be81c3d990df39e80503a3", - "sha256:d5e06acdd89966417d781d93cd08a2f1c21265bc06d5e4c1dd9309cdd0af988f" - ], - "version": "==1.7.0" - }, - "pytest-fixture-config": { - "hashes": [ - "sha256:1413e5e2c6572a3d7709de7ad69dc35004393d777a7883c8431b6f78a2e28fd0", - "sha256:41a17417721f6862ce6b40e3280fddd8e1659b2c306ec46b237d7021fec5218e", - "sha256:9bda6a817a3ac91a118dd42274cb3cc42dc0290a11317a7217d17eaae82800c5", - "sha256:a0e35e239e70fa12614bbe9ca51d3238fbeb89519deb80cd365b487665a666b0" - ], - "version": "==1.7.0" - }, "pytest-forked": { "hashes": [ "sha256:5fe33fbd07d7b1302c95310803a5e5726a4ff7f19d5a542b7ce57c76fed8135f", @@ -691,24 +511,6 @@ "editable": true, "path": "./tests/pytest-pypi" }, - "pytest-server-fixtures": { - "hashes": [ - "sha256:0fa5b1be6a84180e50ff91a58580e81ad3eb45828878a07942fbe384fcd86d1f", - "sha256:3d93f2ca4bb0a949a55cbdd3598fc44bc3199277dd6b31be39df7f7ebb7a3280", - "sha256:42a6020e60fd0c362dae0a594777b85e6b4a6d84ff3972ac3261e7de5f2f27fc", - "sha256:716e8911e0184d0fd41aa04c2980f04f7bf1d603d90d40de4817b8d6f7b5c7d6" - ], - "version": "==1.7.0" - }, - "pytest-shutil": { - "hashes": [ - "sha256:03c67282a0c520a790ca8db6f65e18851fae3786f45e3ae34e8d9fccbf266a72", - "sha256:343a6902a8ed0cbd29cf8954e2726382228a2ad2f5f7eac589b0d0dff878d806", - "sha256:b3568a675cb092c9b15c789ebd3046b79cfaca476868939748729d14557a98ff", - "sha256:d8165261de76e7508505c341d94c02b113dc963f274543abca74dbfabd021261" - ], - "version": "==1.7.0" - }, "pytest-tap": { "hashes": [ "sha256:3b05ec931424bbe44e944726b68f7ef185bb6d25ce9ce21ac52c9af7ffa9b506", @@ -724,14 +526,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.29.0" }, - "python-dateutil": { - "hashes": [ - "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", - "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.0" - }, "pytz": { "hashes": [ "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda", @@ -746,13 +540,6 @@ ], "version": "==24.0" }, - "repoze.lru": { - "hashes": [ - "sha256:0429a75e19380e4ed50c0694e26ac8819b4ea7851ee1fc7583c8572db80aff77", - "sha256:f77bf0e1096ea445beadd35f3479c5cff2aa1efe604a133e67150bc8630a62ea" - ], - "version": "==0.7" - }, "requests": { "hashes": [ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", @@ -784,30 +571,6 @@ "index": "pypi", "version": "==0.14.0" }, - "ruamel.yaml": { - "hashes": [ - "sha256:17dbf6b7362e7aee8494f7a0f5cffd44902a6331fe89ef0853b855a7930ab845", - "sha256:23731c9efb79f3f5609dedffeb6c5c47a68125fd3d4b157d9fc71b1cd49076a9", - "sha256:2bbdd598ae57bac20968cf9028cc67d37d83bdb7942a94b9478110bc72193148", - "sha256:34586084cdd60845a3e1bece2b58f0a889be25450db8cc0ea143ddf0f40557a2", - "sha256:35957fedbb287b01313bb5c556ffdc70c0277c3500213b5e73dfd8716f748d77", - "sha256:414cb87a40974a575830b406ffab4ab8c6cbd82eeb73abd2a9d1397c1f0223e1", - "sha256:428775be75db68d908b17e4e8dda424c410222f170dc173246aa63e972d094b3", - "sha256:514f670f7d36519bda504d507edfe63e3c20489f86c86d42bc4d9a6dbdf82c7b", - "sha256:5cb962c1ac6887c5da29138fbbe3b4b7705372eb54e599907fa63d4cd743246d", - "sha256:5f6e30282cf70fb7754e1a5f101e27b5240009766376e131b31ab49f14fe81be", - "sha256:86f8e010af6af0b4f42de2d0d9b19cb441e61d3416082186f9dd03c8552d13ad", - "sha256:8d47ed1e557d546bd2dfe54f504d7274274602ff7a0652cde84c258ad6c2d96d", - "sha256:98668876720bce1ac08562d8b93a564a80e3397e442c7ea19cebdcdf73da7f74", - "sha256:9e1f0ddc18d8355dcf5586a5d90417df56074f237812b8682a93b62cca9d2043", - "sha256:a7bc812a72a79d6b7dbb96fa5bee3950464b65ec055d3abc4db6572f2373a95c", - "sha256:b72e13f9f206ee103247b07afd5a39c8b1aa98e8eba80ddba184d030337220ba", - "sha256:bcff8ea9d916789e85e24beed8830c157fb8bc7c313e554733a8151540e66c01", - "sha256:c76e78b3bab652069b8d6f7889b0e72f3455c2b854b2e0a8818393d149ad0a0d" - ], - "markers": "python_version >= '3.5'", - "version": "==0.15.97" - }, "scandir": { "hashes": [ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", @@ -878,12 +641,6 @@ "markers": "sys_platform == 'linux'", "version": "==0.8.5" }, - "strictyaml": { - "hashes": [ - "sha256:06d7100587695a0edfabd772a6c6fb69071fc38c413df599e22dfd40e52f5fad" - ], - "version": "==1.0.1" - }, "tap.py": { "hashes": [ "sha256:8ad62ba6898fcef4913c67d468d0c4beae3109b74c03363538145e31b1840b29", @@ -912,14 +669,6 @@ ], "version": "==19.2.0" }, - "tox": { - "hashes": [ - "sha256:f5c8e446b51edd2ea97df31d4ded8c8b72e7d6c619519da6bb6084b9dd5770f9", - "sha256:f87fd33892a2df0950e5e034def9468988b8d008c7e9416be665fcc0dd45b14f" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==3.12.1" - }, "tqdm": { "hashes": [ "sha256:14a285392c32b6f8222ecfbcd217838f88e11630affe9006cd0e94c7eff3cb61", @@ -928,13 +677,6 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.32.2" }, - "translationstring": { - "hashes": [ - "sha256:4ee44cfa58c52ade8910ea0ebc3d2d84bdcad9fa0422405b1801ec9b9a65b72d", - "sha256:e26c7bf383413234ed442e0980a2ebe192b95e3745288a8fd2805156d27515b4" - ], - "version": "==1.3" - }, "twine": { "hashes": [ "sha256:0fb0bfa3df4f62076cab5def36b1a71a2e4acb4d1fa5c97475b048117b1a6446", @@ -960,13 +702,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", "version": "==1.25.3" }, - "venusian": { - "hashes": [ - "sha256:2f2d077a1eedc3fda40425f65687c8c494da7e83d7c23bc2c4d1a40eb3ca5b6d", - "sha256:64ec8285b80b110d0ae5db4280e90e31848a59db98db1aba4d7d46f48ce91e3e" - ], - "version": "==1.2.0" - }, "virtualenv": { "hashes": [ "sha256:b7335cddd9260a3dd214b73a2521ffc09647bde3e9457fcca31dc3be3999d04a", @@ -983,13 +718,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.5.3" }, - "waitress": { - "hashes": [ - "sha256:4e2a6e6fca56d6d3c279f68a2b2cc9b4798d834ea3c3a9db3e2b76b6d66f4526", - "sha256:90fe750cd40b282fae877d3c866255d485de18e8a232e93de42ebd9fb750eebb" - ], - "version": "==1.3.0" - }, "wcwidth": { "hashes": [ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", @@ -1004,14 +732,6 @@ ], "version": "==0.5.1" }, - "webob": { - "hashes": [ - "sha256:05aaab7975e0ee8af2026325d656e5ce14a71f1883c52276181821d6d5bf7086", - "sha256:36db8203c67023d68c1b00208a7bf55e3b10de2aa317555740add29c619de12b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.8.5" - }, "werkzeug": { "hashes": [ "sha256:865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", @@ -1027,48 +747,6 @@ ], "markers": "python_version >= '2.7'", "version": "==0.5.1" - }, - "zope.deprecation": { - "hashes": [ - "sha256:0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df", - "sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113" - ], - "version": "==4.4.0" - }, - "zope.interface": { - "hashes": [ - "sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c", - "sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b", - "sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02", - "sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f", - "sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5", - "sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375", - "sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487", - "sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2", - "sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0", - "sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b", - "sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63", - "sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39", - "sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745", - "sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc", - "sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2", - "sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa", - "sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1", - "sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc", - "sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98", - "sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97", - "sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab", - "sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127", - "sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d", - "sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe", - "sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891", - "sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1", - "sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b", - "sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966", - "sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==4.6.0" } } } diff --git a/pipenv/core.py b/pipenv/core.py index b5bad11ac1..49b27afe6e 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -37,14 +37,15 @@ get_canonical_names, is_pinned, is_pypi_url, is_required_version, is_star, is_valid_url, parse_indexes, pep423_name, prepare_pip_source_args, proper_case, python_version, venv_resolve_deps, run_command, - is_python_command, find_python, make_posix, interrupt_handled_subprocess + is_python_command, find_python, make_posix, interrupt_handled_subprocess, + get_indexes_from_requirement, get_source_list, get_project_index, ) if is_type_checking(): - from typing import Dict, List, Mapping, Optional, Union + from typing import Dict, List, Mapping, Optional, Union, Text from pipenv.vendor.requirementslib.models.requirements import Requirement - TSourceDict = Dict[str, Union[str, bool]] + TSourceDict = Dict[Text, Union[Text, bool]] # Packages that should be ignored later. @@ -690,8 +691,10 @@ def _cleanup_procs(procs, failed_deps_queue, retry=True): def batch_install(deps_list, procs, failed_deps_queue, requirements_dir, no_deps=True, ignore_hashes=False, allow_global=False, blocking=False, pypi_mirror=None, - retry=True): + retry=True, sequential_deps=None): from .vendor.requirementslib.models.utils import strip_extras_markers_from_requirement + if sequential_deps is None: + sequential_deps = [] failed = (not retry) install_deps = not no_deps if not failed: @@ -699,31 +702,30 @@ def batch_install(deps_list, procs, failed_deps_queue, else: label = INSTALL_LABEL2 + deps_to_install = deps_list[:] + deps_to_install.extend(sequential_deps) + sequential_dep_names = [d.name for d in sequential_deps] + deps_list_bar = progress.bar( - deps_list, width=32, + deps_to_install, width=32, label=label ) + + indexes = [] trusted_hosts = [] # Install these because for dep in deps_list_bar: + extra_indexes = [] if dep.req.req: dep.req.req = strip_extras_markers_from_requirement(dep.req.req) if dep.markers: dep.markers = str(strip_extras_markers_from_requirement(dep.get_markers())) - index = None - if dep.index: - index = project.find_source(dep.index) - indexes.append(index) - if not index.get("verify_ssl", False): - trusted_hosts.append(urllib3_util.parse_url(index.get("url")).host) # Install the module. is_artifact = False if no_deps: link = getattr(dep.req, "link", None) - is_wheel = False - if link: - is_wheel = link.is_wheel + is_wheel = getattr(link, "is_wheel", False) if link else False if dep.is_file_or_url and (dep.is_direct_url or any( dep.req.uri.endswith(ext) for ext in ["zip", "tar.gz"] )): @@ -734,12 +736,6 @@ def batch_install(deps_list, procs, failed_deps_queue, install_deps = True no_deps = False - extra_indexes = [] - if not index and indexes: - index = next(iter(indexes)) - if len(indexes) > 1: - extra_indexes = indexes[1:] - with vistir.contextmanagers.temp_environ(): if not allow_global: os.environ["PIP_USER"] = vistir.compat.fs_str("0") @@ -754,7 +750,7 @@ def batch_install(deps_list, procs, failed_deps_queue, allow_global=allow_global, no_deps=not install_deps, block=any([dep.editable, dep.is_vcs, blocking]), - index=index, + index=dep.index, requirements_dir=requirements_dir, pypi_mirror=pypi_mirror, trusted_hosts=trusted_hosts, @@ -762,11 +758,13 @@ def batch_install(deps_list, procs, failed_deps_queue, use_pep517=not failed, ) c.dep = dep - if dep.is_vcs or dep.editable: + # if dep.is_vcs or dep.editable: + is_sequential = sequential_deps and dep.name in sequential_dep_names + if is_sequential: c.block() procs.put(c) - if procs.full() or procs.qsize() == len(deps_list): + if procs.full() or procs.qsize() == len(deps_list) or is_sequential: _cleanup_procs(procs, failed_deps_queue, retry=retry) @@ -834,20 +832,33 @@ def do_install_dependencies( failed_deps_queue = queue.Queue() if skip_lock: ignore_hashes = True - + editable_or_vcs_deps = [dep for dep in deps_list if (dep.editable or dep.vcs)] + normal_deps = [dep for dep in deps_list if not (dep.editable or dep.vcs)] install_kwargs = { "no_deps": no_deps, "ignore_hashes": ignore_hashes, "allow_global": allow_global, - "blocking": not concurrent, "pypi_mirror": pypi_mirror + "blocking": not concurrent, "pypi_mirror": pypi_mirror, + "sequential_deps": editable_or_vcs_deps } - # with project.environment.activated(): batch_install( - deps_list, procs, failed_deps_queue, requirements_dir, **install_kwargs + normal_deps, procs, failed_deps_queue, requirements_dir, **install_kwargs ) if not procs.empty(): _cleanup_procs(procs, failed_deps_queue) + # click.echo(crayons.normal( + # decode_for_output("Installing editable and vcs dependencies…"), bold=True + # )) + + # install_kwargs.update({"blocking": True}) + # # XXX: All failed and editable/vcs deps should be installed in sequential mode! + # procs = queue.Queue(maxsize=1) + # batch_install( + # editable_or_vcs_deps, procs, failed_deps_queue, requirements_dir, + # **install_kwargs + # ) + # Iterate over the hopefully-poorly-packaged dependencies… if not failed_deps_queue.empty(): click.echo( @@ -857,10 +868,7 @@ def do_install_dependencies( while not failed_deps_queue.empty(): failed_dep = failed_deps_queue.get() retry_list.append(failed_dep) - install_kwargs.update({ - "retry": False, - "blocking": True, - }) + install_kwargs.update({"retry": False}) batch_install( retry_list, procs, failed_deps_queue, requirements_dir, **install_kwargs ) @@ -1323,54 +1331,6 @@ def get_pip_args( return list(vistir.misc.dedup(arg_set)) -def get_project_index(index=None, trusted_hosts=None): - # type: (Optional[Union[str, TSourceDict]], Optional[List[str]]) -> TSourceDict - from .vendor.urllib3.util import parse_url - if trusted_hosts is None: - trusted_hosts = [] - if isinstance(index, vistir.compat.Mapping): - return index - try: - source = project.find_source(index) - except SourceNotFound: - index_url = parse_url(index) - src_name = project.src_name_from_url(index) - verify_ssl = index_url.host not in trusted_hosts - source = {"url": index, "verify_ssl": verify_ssl, "name": src_name} - return source - - -def get_source_list( - index=None, # type: Optional[Union[str, TSourceDict]] - extra_indexes=None, # type: Optional[List[str]] - trusted_hosts=None, # type: Optional[List[str]] - pypi_mirror=None, # type: Optional[str] -): - # type: (...) -> List[TSourceDict] - sources = [] # type: List[TSourceDict] - if index: - sources.append(get_project_index(index)) - if extra_indexes: - if isinstance(extra_indexes, six.string_types): - extra_indexes = [extra_indexes,] - for source in extra_indexes: - extra_src = get_project_index(source) - if not sources or extra_src["url"] != sources[0]["url"]: - sources.append(extra_src) - else: - for source in project.pipfile_sources: - if not sources or source["url"] != sources[0]["url"]: - sources.append(source) - if not sources: - sources = project.pipfile_sources - if pypi_mirror: - sources = [ - create_mirror_source(pypi_mirror) if is_pypi_url(source["url"]) else source - for source in sources - ] - return sources - - def get_requirement_line( requirement, # type: Requirement src_dir=None, # type: Optional[str] @@ -1452,8 +1412,7 @@ def pip_install( trusted_hosts.extend(os.environ.get("PIP_TRUSTED_HOSTS", [])) if not allow_global: - src_dir = project.virtualenv_src_location - # src_dir = os.getenv("PIP_SRC", os.getenv("PIP_SRC_DIR", project.virtualenv_src_location)) + src_dir = os.getenv("PIP_SRC", os.getenv("PIP_SRC_DIR", project.virtualenv_src_location)) else: src_dir = os.getenv("PIP_SRC", os.getenv("PIP_SRC_DIR")) if requirement: @@ -1462,16 +1421,26 @@ def pip_install( elif not (requirement.is_vcs or requirement.editable or requirement.vcs): ignore_hashes = False line = None - if requirement.vcs: - line = requirement.line_instance.get_line( - with_prefix=True, with_hashes=False, with_markers=True, as_list=True - ) - else: - r = write_requirement_to_file( - requirement, requirements_dir=requirements_dir, src_dir=src_dir, - include_hashes=not ignore_hashes - ) # Try installing for each source in project.sources. + if not index and requirement.index: + index = requirement.index + if index and not extra_indexes: + extra_indexes = list(project.sources) + if requirement and requirement.vcs or requirement.editable: + requirement.index = None + # Install dependencies when a package is a non-editable VCS dependency. + # Don't specify a source directory when using --system. + if not requirement.editable and no_deps is not True: + # Leave this off becauase old lockfiles don't have all deps included + # TODO: When can it be turned back on? + no_deps = False + elif requirement.editable and no_deps is None: + no_deps = True + + r = write_requirement_to_file( + requirement, requirements_dir=requirements_dir, src_dir=src_dir, + include_hashes=not ignore_hashes + ) sources = get_source_list( index, extra_indexes=extra_indexes, trusted_hosts=trusted_hosts, pypi_mirror=pypi_mirror @@ -1481,22 +1450,13 @@ def pip_install( if "--hash" not in fh.read(): ignore_hashes = True if environments.is_verbose(): - piplogger.setLevel(logging.INFO) + piplogger.setLevel(logging.WARN) if requirement: click.echo( crayons.normal("Installing {0!r}".format(requirement.name), bold=True), err=True, ) - if requirement and requirement.vcs: - # Install dependencies when a package is a non-editable VCS dependency. - # Don't specify a source directory when using --system. - if not requirement.editable and no_deps is not True: - # Leave this off becauase old lockfiles don't have all deps included - # TODO: When can it be turned back on? - no_deps = False - elif requirement.editable and no_deps is None: - no_deps = True pip_command = [which_pip(allow_global=allow_global), "install"] pip_args = get_pip_args( pre=pre, verbose=environments.is_verbose(), upgrade=True, @@ -2068,7 +2028,7 @@ def do_install( from .vendor.requirementslib.models.requirements import Requirement # make a tuple of (display_name, entry) - pkg_list = packages + ["-e {0}".format(pkg) for pkg in editable_packages] + pkg_list = packages + ['-e {0}'.format(pkg) for pkg in editable_packages] if not system and not project.virtualenv_exists: do_init( dev=dev, @@ -2123,21 +2083,33 @@ def do_install( pypi_mirror=pypi_mirror, ) if not c.ok: - sp.write_err(vistir.compat.fs_str( - "{0}: {1}".format( - crayons.red("WARNING"), - "Failed installing package {0}".format(pkg_line) + sp.write_err(u"{0}: {1}".format( + crayons.red("WARNING"), + vistir.compat.fs_str("Failed installing package {0}".format(pkg_line))) + ) + sp.write_err( + vistir.compat.fs_str(u"Error text: {0}".format(c.out)) + ) + sp.write_err( + vistir.compat.fs_str(u"{0}".format(c.err)) + ) + sp.write_err( + u"{0} An error occurred while installing {1}!".format( + crayons.red(u"Error: ", bold=True), crayons.green(pkg_line) ), - )) - sp.write_err(vistir.compat.fs_str( - "Error text: {0}".format(c.out) - )) - sp.write_err(vistir.compat.fs_str( - "{0}".format(c.err) - )) - raise RuntimeError(c.err) - if environments.is_verbose(): - click.echo(crayons.blue(format_pip_output(c.out))) + ) + sp.write_err(crayons.blue(vistir.compat.fs_str(format_pip_error(c.err)))) + if environments.is_verbose(): + sp.write_err(crayons.blue(vistir.compat.fs_str(format_pip_output(c.out)))) + if "setup.py egg_info" in c.err: + sp.write_err(vistir.compat.fs_str( + "This is likely caused by a bug in {0}. " + "Report this to its maintainers.".format( + crayons.green(pkg_requirement.name) + ) + )) + sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Installation Failed")) + sys.exit(1) except (ValueError, RuntimeError) as e: sp.write_err(vistir.compat.fs_str( "{0}: {1}".format(crayons.red("WARNING"), e), @@ -2145,7 +2117,7 @@ def do_install( sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format( "Installation Failed", )) - # sys.exit(1) + sys.exit(1) # Warn if --editable wasn't passed. if pkg_requirement.is_vcs and not pkg_requirement.editable and not PIPENV_RESOLVE_VCS: sp.write_err( @@ -2157,23 +2129,6 @@ def do_install( crayons.red("$ pipenv lock"), ) ) - # Ensure that package was successfully installed. - if c.return_code != 0: - sp.write_err(vistir.compat.fs_str( - "{0} An error occurred while installing {1}!".format( - crayons.red("Error: ", bold=True), crayons.green(pkg_line) - ), - )) - sp.write_err(vistir.compat.fs_str(crayons.blue(format_pip_error(c.err)))) - if "setup.py egg_info" in c.err: - sp.write_err(vistir.compat.fs_str( - "This is likely caused by a bug in {0}. " - "Report this to its maintainers.".format( - crayons.green(pkg_requirement.name) - ) - )) - sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Installation Failed")) - sys.exit(1) sp.write(vistir.compat.fs_str( u"{0} {1} {2} {3}{4}".format( crayons.normal(u"Adding", bold=True), diff --git a/pytest.ini b/pytest.ini index 8bbd020839..da966ec968 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,6 @@ [pytest] addopts = -ra -n auto -plugins = pytest_devpi_server xdist +plugins = xdist testpaths = tests ; Add vendor and patched in addition to the default list of ignored dirs ; Additionally, ignore tasks, news, test subdirectories and peeps directory diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index e5088d1922..3579cd84e3 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -24,8 +24,7 @@ from pipenv._compat import Path from pipenv.exceptions import VirtualenvActivationException from pipenv.vendor import delegator, toml, tomlkit -from pytest_pypi.app import prepare_fixtures -from _pytest_devpi_server import DevpiServer as _DevpiServer +from pytest_pypi.app import prepare_fixtures, prepare_packages as prepare_pypi_packages log = logging.getLogger(__name__) warnings.simplefilter("default", category=ResourceWarning) @@ -43,78 +42,6 @@ class ServerNotDead(Exception): pass -class DevpiServer(_DevpiServer): - - term_signal = signal.SIGTERM if not os.name == "nt" else signal.CTRL_C_EVENT - kill_signal = signal.SIGKILL if not os.name == "nt" else signal.CTRL_BREAK_EVENT - - def _find_and_kill(self, retries, signal): - log.debug("Killing server running at {}:{} using signal {}".format(self.hostname, self.port, signal)) - for _ in range(retries): - cd_path = "/" - pids = [] - netstat_cmd = "" - if sys.platform == "darwin": - netstat_cmd = "lsof -n -i:{} | grep LISTEN | awk '{{ print $2 }}'".format(self.port) - elif sys.platform == "linux": - netstat_cmd = ("netstat -anp 2>/dev/null | grep %s:%s | grep LISTEN | " - "awk '{ print $7 }' | cut -d'/' -f1" % (socket.gethostbyname(self.hostname), self.port)) - else: - procs = self.run("tasklist /NH /FI devpi-server.exe", capture=True) - pids = [ - task.strip().split()[1] for task in procs.strip().splitlines() - if "No tasks are running" not in task.strip() - ] - if netstat_cmd: - pids = [ - p.strip() for p in - self.run(netstat_cmd, capture=True, cd=cd_path).split('\n') - if p.strip() - ] - - if not pids: - # No PIDs remaining, server has died. - break - - for pid in pids: - try: - pid = int(pid) - except ValueError: - log.error("Can't determine port, process shutting down or owned by someone else") - else: - try: - os.kill(pid, signal) - except OSError as oe: - if oe.errno == errno.ESRCH: # Process doesn't appear to exist. - log.error("For some reason couldn't find PID {} to kill.".format(p)) - else: - raise - self.run("taskkill /f /pid %s" % pid, capture=False, check_rc=False) - time.sleep(self.kill_retry_delay) - else: - raise ServerNotDead("Server not dead after %d retries" % retries) - - def kill(self, retries=5): - """Kill all running versions of this server. - Just killing the thread.server pid isn't good enough, it may have spawned children. - """ - # Prevent traceback printed when the server goes away as we kill it - if self.server: - self.server.exit = True - - if self.dead: - return - - try: - self._find_and_kill(retries, self.term_signal) - except ServerNotDead: - log.error("Server not dead after %d retries, trying with SIGKILL" % retries) - try: - self._find_and_kill(retries, self.kill_signal) - except ServerNotDead: - log.error("Server still not dead, giving up") - - def check_internet(): has_internet = False for url in ("http://httpbin.org/ip", "http://clients3.google.com/generate_204"): @@ -174,18 +101,7 @@ def check_for_mercurial(): PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi') WE_HAVE_HG = check_for_mercurial() prepare_fixtures(os.path.join(PYPI_VENDOR_DIR, "fixtures")) - - -@pytest.fixture(scope="session") -def pipenv_devpi_server(): - with DevpiServer(offline=False) as server: - server.start() - server.api("index", "-c", "pipenv", "bases=root/pypi", "volatile=False") - server.index = "pipenv" - for path in Path(PYPI_VENDOR_DIR).iterdir(): - if path.is_dir(): - server.api("upload", "--from-dir", path.as_posix()) - yield server +prepare_pypi_packages(PYPI_VENDOR_DIR) def pytest_runtest_setup(item): @@ -263,7 +179,7 @@ def isolate(create_tmpdir): fp.write( b"[user]\n\tname = pipenv\n\temail = pipenv@pipenv.org\n" ) - os.environ["GIT_CONFIG"] = fs_str(git_config_file) + # os.environ["GIT_CONFIG"] = fs_str(git_config_file) os.environ["GIT_CONFIG_NOSYSTEM"] = fs_str("1") os.environ["GIT_AUTHOR_NAME"] = fs_str("pipenv") os.environ["GIT_AUTHOR_EMAIL"] = fs_str("pipenv@pipenv.org") @@ -409,7 +325,7 @@ def __init__( self.pipfile_path = None self.chdir = chdir - if self.pypi: + if self.pypi and "PIPENV_PYPI_URL" not in os.environ: os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi)) # os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi.url)) # os.environ['PIPENV_TEST_INDEX'] = fs_str('{0}/simple'.format(self.pypi.url)) @@ -512,16 +428,17 @@ def finalize(): @pytest.fixture() -def PipenvInstance(pip_src_dir, monkeypatch, pipenv_devpi_server, pypi): +def PipenvInstance(pip_src_dir, monkeypatch, pypi): with temp_environ(), monkeypatch.context() as m: m.setattr(shutil, "rmtree", _rmtree_func) original_umask = os.umask(0o007) - os.environ["PIPENV_NOSPIN"] = fs_str("1") - os.environ["CI"] = fs_str("1") - os.environ['PIPENV_DONT_USE_PYENV'] = fs_str('1') - os.environ["PIPENV_TEST_INDEX"] = "{0}/{1}/{2}/+simple".format(pipenv_devpi_server.uri, pipenv_devpi_server.user, pipenv_devpi_server.index) - os.environ["PIPENV_PYPI_INDEX"] = pipenv_devpi_server.index - os.environ["ARTIFACT_PYPI_URL"] = pypi.url + m.setenv("PIPENV_NOSPIN", fs_str("1")) + m.setenv("CI", fs_str("1")) + m.setenv('PIPENV_DONT_USE_PYENV', fs_str('1')) + m.setenv("PIPENV_TEST_INDEX", "{0}/simple".format(pypi.url)) + m.setenv("PIPENV_PYPI_INDEX", "simple") + m.setenv("ARTIFACT_PYPI_URL", pypi.url) + m.setenv("PIPENV_PYPI_URL", pypi.url) warnings.simplefilter("ignore", category=ResourceWarning) warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") try: @@ -531,13 +448,15 @@ def PipenvInstance(pip_src_dir, monkeypatch, pipenv_devpi_server, pypi): @pytest.fixture() -def PipenvInstance_NoPyPI(monkeypatch, pip_src_dir): +def PipenvInstance_NoPyPI(monkeypatch, pip_src_dir, pypi): with temp_environ(), monkeypatch.context() as m: m.setattr(shutil, "rmtree", _rmtree_func) original_umask = os.umask(0o007) - os.environ["PIPENV_NOSPIN"] = fs_str("1") - os.environ["CI"] = fs_str("1") - os.environ['PIPENV_DONT_USE_PYENV'] = fs_str('1') + m.setenv("PIPENV_NOSPIN", fs_str("1")) + m.setenv("CI", fs_str("1")) + m.setenv('PIPENV_DONT_USE_PYENV', fs_str('1')) + m.setenv("PIPENV_TEST_INDEX", "{0}/simple".format(pypi.url)) + m.setenv("ARTIFACT_PYPI_URL", pypi.url) warnings.simplefilter("ignore", category=ResourceWarning) warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") try: diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 6d9f1368de..c2709f8536 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -348,7 +348,7 @@ def test_install_does_not_extrapolate_environ(PipenvInstance): f.write( """ [[source]] -url = '${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple' +url = '${PYPI_URL}/simple' verify_ssl = true name = 'mockpi' """ @@ -357,14 +357,14 @@ def test_install_does_not_extrapolate_environ(PipenvInstance): # Ensure simple install does not extrapolate. c = p.pipenv("install") assert c.return_code == 0 - assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" - assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" + assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/simple" + assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/simple" # Ensure package install does not extrapolate. c = p.pipenv("install six") assert c.return_code == 0 - assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" - assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/${PIPENV_PYPI_INDEX}/+simple" + assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/simple" + assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/simple" @pytest.mark.editable diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index a69d580e64..4c22739573 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -393,16 +393,15 @@ def test_lock_updated_source(PipenvInstance): with open(p.pipfile_path, 'w') as f: contents = """ [[source]] -url = "{url}/${{MY_ENV_VAR}}/+simple" +url = "{url}/${{MY_ENV_VAR}}" [packages] requests = "==2.14.0" """.strip().format(url=p.pypi) - # """.strip().format(url=pypi.url) f.write(contents) with temp_environ(): - os.environ['MY_ENV_VAR'] = p.index + os.environ['MY_ENV_VAR'] = 'simple' c = p.pipenv('lock') assert c.return_code == 0 assert 'requests' in p.lockfile['default'] @@ -410,12 +409,11 @@ def test_lock_updated_source(PipenvInstance): with open(p.pipfile_path, 'w') as f: contents = """ [[source]] -url = "{url}" +url = "{url}/simple" [packages] requests = "==2.14.0" - """.strip().format(url=p.index_url) - # """.strip().format(url=pypi.url) + """.strip().format(url=p.pypi) f.write(contents) c = p.pipenv('lock') From e57898ac585ccc5ddea6bb6e13c40f8f3fe78f47 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 5 Jul 2019 23:51:10 -0400 Subject: [PATCH 09/18] Fix windows test failure and leaky session from xmlrpc call Signed-off-by: Dan Ryan --- tests/integration/test_project.py | 2 +- tests/pytest-pypi/pytest_pypi/app.py | 34 ++++++++++++++++++++-------- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index efdc03b698..cf1efa63f9 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -161,7 +161,7 @@ def test_include_editable_packages(PipenvInstance, testsroot, pathlib_tmpdir): with PipenvInstance(chdir=True) as p: with tarfile.open(source_path, "r:gz") as tarinfo: tarinfo.extractall(path=str(pathlib_tmpdir)) - c = p.pipenv('install -e {}'.format(package)) + c = p.pipenv('install -e {0!r}'.format(package)) assert c.return_code == 0 project = Project() assert "requests" in [ diff --git a/tests/pytest-pypi/pytest_pypi/app.py b/tests/pytest-pypi/pytest_pypi/app.py index 607d219933..fd7b64a11c 100644 --- a/tests/pytest-pypi/pytest_pypi/app.py +++ b/tests/pytest-pypi/pytest_pypi/app.py @@ -1,19 +1,22 @@ -import os -import json +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function +import contextlib import io +import json +import os import sys -if sys.version_info[:2] >= (3, 0): - from xmlrpc.client import ServerProxy -else: - from xmlrpclib import ServerProxy - -from zipfile import is_zipfile from tarfile import is_tarfile +from zipfile import is_zipfile import requests + from flask import Flask, redirect, abort, render_template, send_file, jsonify +if sys.version_info[:2] >= (3, 0): + from xmlrpc.client import ServerProxy +else: + from xmlrpclib import ServerProxy app = Flask(__name__) session = requests.Session() @@ -22,9 +25,20 @@ ARTIFACTS = {} +@contextlib.contextmanager +def xml_pypi_server(server): + session = requests.Session() + client = ServerProxy(server, session) + try: + yield client + finally: + session.close() + + def get_pypi_package_names(): - client = ServerProxy("https://pypi.org/pypi") - pypi_packages = set(client.list_packages()) + pypi_packages = set() + with xml_pypi_server("https://pypi.org/pypi") as client: + pypi_packages = set(client.list_packages()) return pypi_packages From 3cf15898c725ee3d116fb92769ae1d456f80f336 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 00:58:46 -0400 Subject: [PATCH 10/18] Remove devpi dependency Signed-off-by: Dan Ryan --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 178b3d3a91..d262eb354d 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ "parver", "invoke", ], - "tests": ["pytest", "pytest-devpi-server", "pytest-tap", "pytest-xdist", "flaky", "mock"], + "tests": ["pytest", "pytest-tap", "pytest-xdist", "flaky", "mock"], } # https://pypi.python.org/pypi/stdeb/0.8.5#quickstart-2-just-tell-me-the-fastest-way-to-make-a-deb From 8abaa06c2387c141b09dcfeee4146af2a12b4f0d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 01:02:16 -0400 Subject: [PATCH 11/18] Remove extraneous exception added to conftest.py Signed-off-by: Dan Ryan --- tests/integration/conftest.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 3579cd84e3..b05eacfa13 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -38,10 +38,6 @@ def try_internet(url="http://httpbin.org/ip", timeout=1.5): resp.raise_for_status() -class ServerNotDead(Exception): - pass - - def check_internet(): has_internet = False for url in ("http://httpbin.org/ip", "http://clients3.google.com/generate_204"): From 1630da82c0170a05f47a5aa7dec8a32f2a30b9a7 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 01:36:43 -0400 Subject: [PATCH 12/18] Pin pytest<5 Signed-off-by: Dan Ryan --- setup.py | 2 +- tests/integration/test_install_uri.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index d262eb354d..d86d85e040 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ "parver", "invoke", ], - "tests": ["pytest", "pytest-tap", "pytest-xdist", "flaky", "mock"], + "tests": ["pytest<5.0", "pytest-tap", "pytest-xdist", "flaky", "mock"], } # https://pypi.python.org/pypi/stdeb/0.8.5#quickstart-2-just-tell-me-the-fastest-way-to-make-a-deb diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 7a69e59581..b71df9651c 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -125,10 +125,10 @@ def test_local_vcs_urls_work(PipenvInstance, tmpdir): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_editable_vcs_install(PipenvInstance_NoPyPI): # ! This is failing +def test_editable_vcs_install(PipenvInstance_NoPyPI): with PipenvInstance_NoPyPI(chdir=True) as p: c = p.pipenv( - "install -e git+https://github.com/kennethreitz/requests.git#egg=requests --verbose" + "install -e git+https://github.com/kennethreitz/requests.git#egg=requests" ) assert c.return_code == 0 assert "requests" in p.pipfile["packages"] @@ -145,12 +145,12 @@ def test_editable_vcs_install(PipenvInstance_NoPyPI): # ! This is failing @pytest.mark.tablib @pytest.mark.install @pytest.mark.needs_internet -def test_install_editable_git_tag(PipenvInstance_NoPyPI): # ! This is failing +def test_install_editable_git_tag(PipenvInstance_NoPyPI): # This uses the real PyPI since we need Internet to access the Git # dependency anyway. with PipenvInstance_NoPyPI(chdir=True) as p: c = p.pipenv( - "install -e git+https://github.com/benjaminp/six.git@1.11.0#egg=six --verbose" + "install -e git+https://github.com/benjaminp/six.git@1.11.0#egg=six" ) assert c.return_code == 0 assert "six" in p.pipfile["packages"] @@ -209,10 +209,10 @@ def test_install_local_vcs_not_in_lockfile(PipenvInstance): @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_get_vcs_refs(PipenvInstance_NoPyPI): # ! this is failing +def test_get_vcs_refs(PipenvInstance_NoPyPI): with PipenvInstance_NoPyPI(chdir=True) as p: c = p.pipenv( - "install -e git+https://github.com/benjaminp/six.git@1.9.0#egg=six --verbose" + "install -e git+https://github.com/benjaminp/six.git@1.9.0#egg=six" ) assert c.return_code == 0 assert "six" in p.pipfile["packages"] From e559b00a90080cf1d95d28c6620cca3981288467 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 02:01:48 -0400 Subject: [PATCH 13/18] Fix xmlrpc implementation Signed-off-by: Dan Ryan fix xmlrpc implementation Signed-off-by: Dan Ryan --- tests/pytest-pypi/pytest_pypi/app.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/pytest-pypi/pytest_pypi/app.py b/tests/pytest-pypi/pytest_pypi/app.py index fd7b64a11c..95dbd076f4 100644 --- a/tests/pytest-pypi/pytest_pypi/app.py +++ b/tests/pytest-pypi/pytest_pypi/app.py @@ -10,13 +10,10 @@ from zipfile import is_zipfile import requests +from six.moves import xmlrpc_client from flask import Flask, redirect, abort, render_template, send_file, jsonify -if sys.version_info[:2] >= (3, 0): - from xmlrpc.client import ServerProxy -else: - from xmlrpclib import ServerProxy app = Flask(__name__) session = requests.Session() @@ -27,12 +24,12 @@ @contextlib.contextmanager def xml_pypi_server(server): - session = requests.Session() - client = ServerProxy(server, session) + transport = xmlrpc_client.Transport() + client = xmlrpc_client.ServerProxy(server, transport) try: yield client finally: - session.close() + transport.close() def get_pypi_package_names(): From e2b38d274131c901d51b2358c3a6f2faf6236a77 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 03:26:51 -0400 Subject: [PATCH 14/18] Update pythonfinder Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/__init__.py | 2 +- .../_vendor/pep514tools/environment.py | 3 +- pipenv/vendor/pythonfinder/models/path.py | 4 +- pipenv/vendor/pythonfinder/models/python.py | 37 ++++++++++++++----- pipenv/vendor/pythonfinder/models/windows.py | 11 ++++-- pipenv/vendor/pythonfinder/utils.py | 34 +++++++++++++++++ 6 files changed, 74 insertions(+), 17 deletions(-) diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index d1f70c3b3c..428599bdcd 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -10,7 +10,7 @@ from .models import SystemPath, WindowsFinder from .pythonfinder import Finder -__version__ = "1.2.1" +__version__ = "1.2.2.dev0" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/pythonfinder/_vendor/pep514tools/environment.py b/pipenv/vendor/pythonfinder/_vendor/pep514tools/environment.py index 2c09ccbcab..e201d0b59e 100644 --- a/pipenv/vendor/pythonfinder/_vendor/pep514tools/environment.py +++ b/pipenv/vendor/pythonfinder/_vendor/pep514tools/environment.py @@ -15,7 +15,8 @@ # These tags are treated specially when the Company is 'PythonCore' _PYTHONCORE_COMPATIBILITY_TAGS = { '2.0', '2.1', '2.2', '2.3', '2.4', '2.5', '2.6', '2.7', - '3.0', '3.1', '3.2', '3.3', '3.4' + '3.0', '3.1', '3.2', '3.3', '3.4', '3.5', '3.6', '3.7', + '3.8' } _IS_64BIT_OS = None diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 34559f7dc1..b3b8d71280 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -14,8 +14,6 @@ from vistir.compat import Path, fs_str from vistir.misc import dedup -from .mixins import BaseFinder, BasePath -from .python import PythonVersion from ..environment import ( ASDF_DATA_DIR, ASDF_INSTALLED, @@ -42,6 +40,8 @@ split_version_and_name, unnest, ) +from .mixins import BaseFinder, BasePath +from .python import PythonVersion if MYPY_RUNNING: from typing import ( diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 8e5eecd6e9..49fce70914 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -13,7 +13,6 @@ from packaging.version import Version from vistir.compat import Path, lru_cache -from .mixins import BaseFinder, BasePath from ..environment import ASDF_DATA_DIR, MYPY_RUNNING, PYENV_ROOT, SYSTEM_ARCH from ..exceptions import InvalidPythonVersion from ..utils import ( @@ -21,14 +20,17 @@ _filter_none, ensure_path, get_python_version, + guess_company, is_in_path, looks_like_python, optional_instance_of, parse_asdf_version_order, parse_pyenv_version_order, parse_python_version, + path_is_pythoncore, unnest, ) +from .mixins import BaseFinder, BasePath if MYPY_RUNNING: from typing import ( @@ -114,7 +116,8 @@ def get_version_order(self): versions[v] for v in parse_asdf_version_order() if v in versions ] for version in version_order: - version_paths.remove(version) + if version in version_paths: + version_paths.remove(version) if version_order: version_order += version_paths else: @@ -351,6 +354,7 @@ class PythonVersion(object): architecture = attr.ib(default=None) # type: Optional[str] comes_from = attr.ib(default=None) # type: Optional[PathEntry] executable = attr.ib(default=None) # type: Optional[str] + company = attr.ib(default="PythonCore") # type: Optional[str] name = attr.ib(default=None, type=str) def __getattribute__(self, key): @@ -381,11 +385,14 @@ def version_sort(self): """ A tuple for sorting against other instances of the same class. - Returns a tuple of the python version but includes a point for non-dev, - and a point for non-prerelease versions. So released versions will have 2 points - for this value. E.g. `(3, 6, 6, 2)` is a release, `(3, 6, 6, 1)` is a prerelease, - `(3, 6, 6, 0)` is a dev release, and `(3, 6, 6, 3)` is a postrelease. + Returns a tuple of the python version but includes points for core python, + non-dev, and non-prerelease versions. So released versions will have 2 points + for this value. E.g. ``(1, 3, 6, 6, 2)`` is a release, ``(1, 3, 6, 6, 1)`` is a + prerelease, ``(1, 3, 6, 6, 0)`` is a dev release, and ``(1, 3, 6, 6, 3)`` is a + postrelease. ``(0, 3, 7, 3, 2)`` represents a non-core python release, e.g. by + a repackager of python like Continuum. """ + company_sort = 1 if self.company == "PythonCore" else 0 release_sort = 2 if self.is_postrelease: release_sort = 3 @@ -395,7 +402,13 @@ def version_sort(self): release_sort = 0 elif self.is_debug: release_sort = 1 - return (self.major, self.minor, self.patch if self.patch else 0, release_sort) + return ( + company_sort, + self.major, + self.minor, + self.patch if self.patch else 0, + release_sort, + ) @property def version_tuple(self): @@ -473,6 +486,7 @@ def as_dict(self): "is_devrelease": self.is_devrelease, "is_debug": self.is_debug, "version": self.version, + "company": self.company, } def update_metadata(self, metadata): @@ -532,8 +546,8 @@ def get_architecture(self): return self.architecture @classmethod - def from_path(cls, path, name=None, ignore_unsupported=True): - # type: (Union[str, PathEntry], Optional[str], bool) -> PythonVersion + def from_path(cls, path, name=None, ignore_unsupported=True, company=None): + # type: (Union[str, PathEntry], Optional[str], bool, Optional[str]) -> PythonVersion """ Parses a python version from a system path. @@ -544,6 +558,7 @@ def from_path(cls, path, name=None, ignore_unsupported=True): :type path: str or :class:`~pythonfinder.models.path.PathEntry` instance :param str name: Name of the python distribution in question :param bool ignore_unsupported: Whether to ignore or error on unsupported paths. + :param Optional[str] company: The company or vendor packaging the distribution. :return: An instance of a PythonVersion. :rtype: :class:`~pythonfinder.models.python.PythonVersion` """ @@ -576,6 +591,8 @@ def from_path(cls, path, name=None, ignore_unsupported=True): instance_dict = cls.parse_executable(path.path.absolute().as_posix()) if name is None: name = path_name + if company is None: + company = guess_company(path.path.as_posix()) instance_dict.update( {"comes_from": path, "name": name, "executable": path.path.as_posix()} ) @@ -622,6 +639,7 @@ def from_windows_launcher(cls, launcher_entry, name=None): exe_path = ensure_path( getattr(launcher_entry.info.install_path, "executable_path", default_path) ) + company = getattr(launcher_entry, "company", "PythonCore") creation_dict.update( { "architecture": getattr( @@ -629,6 +647,7 @@ def from_windows_launcher(cls, launcher_entry, name=None): ), "executable": exe_path, "name": name, + "company": company, } ) py_version = cls.create(**creation_dict) diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py index e96e10815b..92da74c5e0 100644 --- a/pipenv/vendor/pythonfinder/models/windows.py +++ b/pipenv/vendor/pythonfinder/models/windows.py @@ -6,12 +6,12 @@ import attr -from .mixins import BaseFinder -from .path import PathEntry -from .python import PythonVersion, VersionMap from ..environment import MYPY_RUNNING from ..exceptions import InvalidPythonVersion from ..utils import ensure_path +from .mixins import BaseFinder +from .path import PathEntry +from .python import PythonVersion, VersionMap if MYPY_RUNNING: from typing import DefaultDict, Tuple, List, Optional, Union, TypeVar, Type, Any @@ -81,6 +81,7 @@ def get_versions(self): path = None for version_object in env_versions: install_path = getattr(version_object.info, "install_path", None) + name = getattr(version_object, "tag", None) if install_path is None: continue try: @@ -88,7 +89,9 @@ def get_versions(self): except AttributeError: continue try: - py_version = PythonVersion.from_windows_launcher(version_object) + py_version = PythonVersion.from_windows_launcher( + version_object, name=name + ) except InvalidPythonVersion: continue if py_version is None: diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index bbab53811f..477f366860 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -239,6 +239,40 @@ def path_is_python(path): return path_is_executable(path) and looks_like_python(path.name) +@lru_cache(maxsize=1024) +def guess_company(path): + # type: (str) -> Optional[str] + """Given a path to python, guess the company who created it + + :param str path: The path to guess about + :return: The guessed company + :rtype: Optional[str] + """ + non_core_pythons = [impl for impl in PYTHON_IMPLEMENTATIONS if impl != "python"] + return next( + iter(impl for impl in non_core_pythons if impl in path.lower()), "PythonCore" + ) + + +@lru_cache(maxsize=1024) +def path_is_pythoncore(path): + # type: (str) -> bool + """Given a path, determine whether it appears to be pythoncore. + + Does not verify whether the path is in fact a path to python, but simply + does an exclusionary check on the possible known python implementations + to see if their names are present in the path (fairly dumb check). + + :param str path: The path to check + :return: Whether that path is a PythonCore path or not + :rtype: bool + """ + company = guess_company(path) + if company: + return company == "PythonCore" + return False + + @lru_cache(maxsize=1024) def ensure_path(path): # type: (Union[vistir.compat.Path, str]) -> vistir.compat.Path From f1bfee0375edc42b6bd337de56c589f02a156d4c Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 03:40:00 -0400 Subject: [PATCH 15/18] Allow better name metadata Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/python.py | 10 ++++++---- pipenv/vendor/pythonfinder/models/windows.py | 3 ++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 49fce70914..15fc9b5f60 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -381,7 +381,7 @@ def __getattribute__(self, key): @property def version_sort(self): - # type: () -> Tuple[Optional[int], Optional[int], int, int] + # type: () -> Tuple[int, int, Optional[int], int, int] """ A tuple for sorting against other instances of the same class. @@ -620,11 +620,13 @@ def parse_executable(cls, path): return result_dict @classmethod - def from_windows_launcher(cls, launcher_entry, name=None): - # type: (Environment, Optional[str]) -> PythonVersion + def from_windows_launcher(cls, launcher_entry, name=None, company=None): + # type: (Environment, Optional[str], Optional[str]) -> PythonVersion """Create a new PythonVersion instance from a Windows Launcher Entry :param launcher_entry: A python launcher environment object. + :param Optional[str] name: The name of the distribution. + :param Optional[str] company: The name of the distributing company. :return: An instance of a PythonVersion. :rtype: :class:`~pythonfinder.models.python.PythonVersion` """ @@ -639,7 +641,7 @@ def from_windows_launcher(cls, launcher_entry, name=None): exe_path = ensure_path( getattr(launcher_entry.info.install_path, "executable_path", default_path) ) - company = getattr(launcher_entry, "company", "PythonCore") + company = getattr(launcher_entry, "company", guess_company(exe_path.as_posix())) creation_dict.update( { "architecture": getattr( diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py index 92da74c5e0..a0e69b034e 100644 --- a/pipenv/vendor/pythonfinder/models/windows.py +++ b/pipenv/vendor/pythonfinder/models/windows.py @@ -82,6 +82,7 @@ def get_versions(self): for version_object in env_versions: install_path = getattr(version_object.info, "install_path", None) name = getattr(version_object, "tag", None) + company = getattr(version_object, "company", None) if install_path is None: continue try: @@ -90,7 +91,7 @@ def get_versions(self): continue try: py_version = PythonVersion.from_windows_launcher( - version_object, name=name + version_object, name=name, company=company ) except InvalidPythonVersion: continue From 85d01523619e597d5c6733a69c5596c2e155e92d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 03:50:23 -0400 Subject: [PATCH 16/18] Fix windows specific sort order quirks Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/python.py | 4 ++-- pipenv/vendor/pythonfinder/pythonfinder.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 15fc9b5f60..427eb694ff 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -354,7 +354,7 @@ class PythonVersion(object): architecture = attr.ib(default=None) # type: Optional[str] comes_from = attr.ib(default=None) # type: Optional[PathEntry] executable = attr.ib(default=None) # type: Optional[str] - company = attr.ib(default="PythonCore") # type: Optional[str] + company = attr.ib(default=None) # type: Optional[str] name = attr.ib(default=None, type=str) def __getattribute__(self, key): @@ -392,7 +392,7 @@ def version_sort(self): postrelease. ``(0, 3, 7, 3, 2)`` represents a non-core python release, e.g. by a repackager of python like Continuum. """ - company_sort = 1 if self.company == "PythonCore" else 0 + company_sort = 1 if (self.company and self.company == "PythonCore") else 0 release_sort = 2 if self.is_postrelease: release_sort = 3 diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index b0097c2236..400a317005 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -308,6 +308,7 @@ def find_all_python_versions( ) if not isinstance(versions, Iterable): versions = [versions] + # This list has already been mostly sorted on windows, we don't need to reverse it again path_list = sorted(versions, key=version_sort, reverse=True) path_map = {} # type: Dict[str, PathEntry] for path in path_list: @@ -317,4 +318,4 @@ def find_all_python_versions( resolved_path = path.path.absolute() if not path_map.get(resolved_path.as_posix()): path_map[resolved_path.as_posix()] = path - return list(path_map.values()) + return path_list From b105ea534ba9e04965a6b9e39fcf893eef5a605f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 14:47:39 -0400 Subject: [PATCH 17/18] Fix test for formatting paths Signed-off-by: Dan Ryan --- tests/integration/test_project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index cf1efa63f9..d366ed9d90 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -161,7 +161,7 @@ def test_include_editable_packages(PipenvInstance, testsroot, pathlib_tmpdir): with PipenvInstance(chdir=True) as p: with tarfile.open(source_path, "r:gz") as tarinfo: tarinfo.extractall(path=str(pathlib_tmpdir)) - c = p.pipenv('install -e {0!r}'.format(package)) + c = p.pipenv('install -e {0}'.format(package.as_posix())) assert c.return_code == 0 project = Project() assert "requests" in [ From 13a34d20d922906a67538d31d69af99acf45423d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Jul 2019 17:20:55 -0400 Subject: [PATCH 18/18] Fix pythonfinder performance Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/mixins.py | 130 ++++++++++++++++---- pipenv/vendor/pythonfinder/models/path.py | 33 ++++- 2 files changed, 132 insertions(+), 31 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py index b725f7f9f5..a3637e1261 100644 --- a/pipenv/vendor/pythonfinder/models/mixins.py +++ b/pipenv/vendor/pythonfinder/models/mixins.py @@ -41,21 +41,42 @@ BaseFinderType = TypeVar("BaseFinderType") -@attr.s +@attr.s(slots=True) class BasePath(object): path = attr.ib(default=None) # type: Path - _children = attr.ib(default=attr.Factory(dict)) # type: Dict[str, PathEntry] + _children = attr.ib( + default=attr.Factory(dict), cmp=False + ) # type: Dict[str, PathEntry] only_python = attr.ib(default=False) # type: bool name = attr.ib(type=str) - _py_version = attr.ib(default=None) # type: Optional[PythonVersion] + _py_version = attr.ib(default=None, cmp=False) # type: Optional[PythonVersion] _pythons = attr.ib( - default=attr.Factory(defaultdict) + default=attr.Factory(defaultdict), cmp=False ) # type: DefaultDict[str, PathEntry] + _is_dir = attr.ib(default=None, cmp=False) # type: Optional[bool] + _is_executable = attr.ib(default=None, cmp=False) # type: Optional[bool] + _is_python = attr.ib(default=None, cmp=False) # type: Optional[bool] def __str__(self): # type: () -> str return fs_str("{0}".format(self.path.as_posix())) + def __lt__(self, other): + # type: ("BasePath") -> bool + return self.path.as_posix() < other.path.as_posix() + + def __lte__(self, other): + # type: ("BasePath") -> bool + return self.path.as_posix() <= other.path.as_posix() + + def __gt__(self, other): + # type: ("BasePath") -> bool + return self.path.as_posix() > other.path.as_posix() + + def __gte__(self, other): + # type: ("BasePath") -> bool + return self.path.as_posix() >= other.path.as_posix() + def which(self, name): # type: (str) -> Optional[PathEntry] """Search in this path for an executable. @@ -83,9 +104,12 @@ def which(self, name): return found def __del__(self): - for key in ["as_python", "is_dir", "is_python", "is_executable", "py_version"]: - if key in self.__dict__: - del self.__dict__[key] + for key in ["_is_dir", "_is_python", "_is_executable", "_py_version"]: + if getattr(self, key, None): + try: + delattr(self, key) + except Exception: + print("failed deleting key: {0}".format(key)) self._children = {} for key in list(self._pythons.keys()): del self._pythons[key] @@ -100,7 +124,7 @@ def children(self): return {} return self._children - @cached_property + @property def as_python(self): # type: () -> PythonVersion py_version = None @@ -117,6 +141,7 @@ def as_python(self): pass if py_version is None: pass + self.py_version = py_version return py_version # type: ignore @name.default @@ -126,30 +151,72 @@ def get_name(self): return self.path.name return None - @cached_property + @property def is_dir(self): # type: () -> bool - if not self.path: - return False - try: - ret_val = self.path.is_dir() - except OSError: - ret_val = False - return ret_val - - @cached_property + if self._is_dir is None: + if not self.path: + ret_val = False + try: + ret_val = self.path.is_dir() + except OSError: + ret_val = False + self._is_dir = ret_val + return self._is_dir + + @is_dir.setter + def is_dir(self, val): + # type: (bool) -> None + self._is_dir = val + + @is_dir.deleter + def is_dir(self): + # type: () -> None + self._is_dir = None + + # @cached_property + @property def is_executable(self): # type: () -> bool - if not self.path: - return False - return path_is_known_executable(self.path) + if self._is_executable is None: + if not self.path: + self._is_executable = False + else: + self._is_executable = path_is_known_executable(self.path) + return self._is_executable + + @is_executable.setter + def is_executable(self, val): + # type: (bool) -> None + self._is_executable = val + + @is_executable.deleter + def is_executable(self): + # type: () -> None + self._is_executable = None - @cached_property + # @cached_property + @property def is_python(self): # type: () -> bool - if not self.path: - return False - return self.is_executable and (looks_like_python(self.path.name)) + if self._is_python is None: + if not self.path: + self._is_python = False + else: + self._is_python = self.is_executable and ( + looks_like_python(self.path.name) + ) + return self._is_python + + @is_python.setter + def is_python(self, val): + # type: (bool) -> None + self._is_python = val + + @is_python.deleter + def is_python(self): + # type: () -> None + self._is_python = None def get_py_version(self): # type: () -> Optional[PythonVersion] @@ -173,7 +240,8 @@ def get_py_version(self): return py_version return None - @cached_property + # @cached_property + @property def py_version(self): # type: () -> Optional[PythonVersion] if not self._py_version: @@ -183,6 +251,16 @@ def py_version(self): py_version = self._py_version return py_version + @py_version.setter + def py_version(self, val): + # type: (Optional[PythonVersion]) -> None + self._py_version = val + + @py_version.deleter + def py_version(self): + # type: () -> None + self._py_version = None + def _iter_pythons(self): # type: () -> Iterator if self.is_dir: diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index b3b8d71280..80d5ac5982 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -688,11 +688,23 @@ def create( @attr.s(slots=True) class PathEntry(BasePath): - is_root = attr.ib(default=True, type=bool) + is_root = attr.ib(default=True, type=bool, cmp=False) + + def __lt__(self, other): + return self.path.as_posix() < other.path.as_posix() + + def __lte__(self, other): + return self.path.as_posix() <= other.path.as_posix() + + def __gt__(self, other): + return self.path.as_posix() > other.path.as_posix() + + def __gte__(self, other): + return self.path.as_posix() >= other.path.as_posix() def __del__(self): - if "_children" in self.__dict__: - del self.__dict__["_children"] + if getattr(self, "_children"): + del self._children BasePath.__del__(self) def _filter_children(self): @@ -730,16 +742,27 @@ def _gen_children(self): yield (child.as_posix(), entry) return - @cached_property + # @cached_property + @property def children(self): # type: () -> Dict[str, PathEntry] children = getattr(self, "_children", {}) # type: Dict[str, PathEntry] if not children: for child_key, child_val in self._gen_children(): children[child_key] = child_val - self._children = children + self.children = children return self._children + @children.setter + def children(self, val): + # type: (Dict[str, PathEntry]) -> None + self._children = val + + @children.deleter + def children(self): + # type: () -> None + del self._children + @classmethod def create(cls, path, is_root=False, only_python=False, pythons=None, name=None): # type: (Union[str, Path], bool, bool, Dict[str, PythonVersion], Optional[str]) -> PathEntry