diff --git a/.gitignore b/.gitignore index bea156313d..dc9fff3933 100644 --- a/.gitignore +++ b/.gitignore @@ -167,3 +167,4 @@ snap/ prime/ stage/ pip-wheel-metadata/ +.vim/ diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c00ba932a4..82859246eb 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,236 @@ +2020.5.28 (2020-05-28) +====================== + +Features & Improvements +----------------------- + +- ``pipenv install`` and ``pipenv sync`` will no longer attempt to install satisfied dependencies during installation. `#3057 `_, + `#3506 `_ +- Added support for resolution of direct-url dependencies in ``setup.py`` files to respect ``PEP-508`` style URL dependencies. `#3148 `_ +- Added full support for resolution of all dependency types including direct URLs, zip archives, tarballs, etc. + + - Improved error handling and formatting. + + - Introduced improved cross platform stream wrappers for better ``stdout`` and ``stderr`` consistency. `#3298 `_ +- For consistency with other commands and the ``--dev`` option + description, ``pipenv lock --requirements --dev`` now emits + both default and development dependencies. + The new ``--dev-only`` option requests the previous + behaviour (e.g. to generate a ``dev-requirements.txt`` file). `#3316 `_ +- Pipenv will now successfully recursively lock VCS sub-dependencies. `#3328 `_ +- Added support for ``--verbose`` output to ``pipenv run``. `#3348 `_ +- Pipenv will now discover and resolve the intrinsic dependencies of **all** VCS dependencies, whether they are editable or not, to prevent resolution conflicts. `#3368 `_ +- Added a new environment variable, ``PIPENV_RESOLVE_VCS``, to toggle dependency resolution off for non-editable VCS, file, and URL based dependencies. `#3577 `_ +- Added the ability for Windows users to enable emojis by setting ``PIPENV_HIDE_EMOJIS=0``. `#3595 `_ +- Allow overriding PIPENV_INSTALL_TIMEOUT environment variable (in seconds). `#3652 `_ +- Allow overriding PIP_EXISTS_ACTION evironment variable (value is passed to pip install). + Possible values here: https://pip.pypa.io/en/stable/reference/pip/#exists-action-option + Useful when you need to `PIP_EXISTS_ACTION=i` (ignore existing packages) - great for CI environments, where you need really fast setup. `#3738 `_ +- Pipenv will no longer forcibly override ``PIP_NO_DEPS`` on all vcs and file dependencies as resolution happens on these in a pre-lock step. `#3763 `_ +- Improved verbose logging output during ``pipenv lock`` will now stream output to the console while maintaining a spinner. `#3810 `_ +- Added support for automatic python installs via ``asdf`` and associated ``PIPENV_DONT_USE_ASDF`` environment variable. `#4018 `_ +- Pyenv/asdf can now be used whether or not they are available on PATH. Setting PYENV_ROOT/ASDF_DIR in a Pipenv's .env allows Pipenv to install an interpreter without any shell customizations, so long as pyenv/asdf is installed. `#4245 `_ +- Added ``--key`` command line parameter for including personal PyUp.io API tokens when running ``pipenv check``. `#4257 `_ + +Behavior Changes +---------------- + +- Make conservative checks of known exceptions when subprocess returns output, so user won't see the whole traceback - just the error. `#2553 `_ +- Do not touch Pipfile early and rely on it so that one can do ``pipenv sync`` without a Pipfile. `#3386 `_ +- Re-enable ``--help`` option for ``pipenv run`` command. `#3844 `_ +- Make sure ``pipenv lock -r --pypi-mirror {MIRROR_URL}`` will respect the pypi-mirror in requirements output. `#4199 `_ + +Bug Fixes +--------- + +- Raise `PipenvUsageError` when [[source]] does not contain url field. `#2373 `_ +- Fixed a bug which caused editable package resolution to sometimes fail with an unhelpful setuptools-related error message. `#2722 `_ +- Fixed an issue which caused errors due to reliance on the system utilities ``which`` and ``where`` which may not always exist on some systems. + - Fixed a bug which caused periodic failures in python discovery when executables named ``python`` were not present on the target ``$PATH``. `#2783 `_ +- Dependency resolution now writes hashes for local and remote files to the lockfile. `#3053 `_ +- Fixed a bug which prevented ``pipenv graph`` from correctly showing all dependencies when running from within ``pipenv shell``. `#3071 `_ +- Fixed resolution of direct-url dependencies in ``setup.py`` files to respect ``PEP-508`` style URL dependencies. `#3148 `_ +- Fixed a bug which caused failures in warning reporting when running pipenv inside a virtualenv under some circumstances. + + - Fixed a bug with package discovery when running ``pipenv clean``. `#3298 `_ +- Quote command arguments with carets (``^``) on Windows to work around unintended shell escapes. `#3307 `_ +- Handle alternate names for UTF-8 encoding. `#3313 `_ +- Abort pipenv before adding the non-exist package to Pipfile. `#3318 `_ +- Don't normalize the package name user passes in. `#3324 `_ +- Fix a bug where custom virtualenv can not be activated with pipenv shell `#3339 `_ +- Fix a bug that ``--site-packages`` flag is not recognized. `#3351 `_ +- Fix a bug where pipenv --clear is not working `#3353 `_ +- Fix unhashable type error during ``$ pipenv install --selective-upgrade`` `#3384 `_ +- Dependencies with direct ``PEP508`` compliant VCS URLs specified in their ``install_requires`` will now be successfully locked during the resolution process. `#3396 `_ +- Fixed a keyerror which could occur when locking VCS dependencies in some cases. `#3404 `_ +- Fixed a bug that ``ValidationError`` is thrown when some fields are missing in source section. `#3427 `_ +- Updated the index names in lock file when source name in Pipfile is changed. `#3449 `_ +- Fixed an issue which caused ``pipenv install --help`` to show duplicate entries for ``--pre``. `#3479 `_ +- Fix bug causing ``[SSL: CERTIFICATE_VERIFY_FAILED]`` when Pipfile ``[[source]]`` has verify_ssl=false and url with custom port. `#3502 `_ +- Fix ``sync --sequential`` ignoring ``pip install`` errors and logs. `#3537 `_ +- Fix the issue that lock file can't be created when ``PIPENV_PIPFILE`` is not under working directory. `#3584 `_ +- Pipenv will no longer inadvertently set ``editable=True`` on all vcs dependencies. `#3647 `_ +- The ``--keep-outdated`` argument to ``pipenv install`` and ``pipenv lock`` will now drop specifier constraints when encountering editable dependencies. + - In addition, ``--keep-outdated`` will retain specifiers that would otherwise be dropped from any entries that have not been updated. `#3656 `_ +- Fixed a bug which sometimes caused pipenv to fail to respect the ``--site-packages`` flag when passed with ``pipenv install``. `#3718 `_ +- Normalize the package names to lowercase when comparing used and in-Pipfile packages. `#3745 `_ +- ``pipenv update --outdated`` will now correctly handle comparisons between pre/post-releases and normal releases. `#3766 `_ +- Fixed a ``KeyError`` which could occur when pinning outdated VCS dependencies via ``pipenv lock --keep-outdated``. `#3768 `_ +- Resolved an issue which caused resolution to fail when encountering poorly formatted ``python_version`` markers in ``setup.py`` and ``setup.cfg`` files. `#3786 `_ +- Fix a bug that installation errors are displayed as a list. `#3794 `_ +- Update ``pythonfinder`` to fix a problem that ``python.exe`` will be mistakenly chosen for + virtualenv creation under WSL. `#3807 `_ +- Fixed several bugs which could prevent editable VCS dependencies from being installed into target environments, even when reporting successful installation. `#3809 `_ +- ``pipenv check --system`` should find the correct Python interpreter when ``python`` does not exist on the system. `#3819 `_ +- Resolve the symlinks when the path is absolute. `#3842 `_ +- Pass ``--pre`` and ``--clear`` options to ``pipenv update --outdated``. `#3879 `_ +- Fixed a bug which prevented resolution of direct URL dependencies which have PEP508 style direct url VCS sub-dependencies with subdirectories. `#3976 `_ +- Honor PIPENV_SPINNER environment variable `#4045 `_ +- Fixed an issue with ``pipenv check`` failing due to an invalid API key from ``pyup.io``. `#4188 `_ +- Fixed a bug which caused versions from VCS dependencies to be included in ``Pipfile.lock`` inadvertently. `#4217 `_ +- Fixed a bug which caused pipenv to search non-existent virtual environments for ``pip`` when installing using ``--system``. `#4220 `_ +- ``Requires-Python`` values specifying constraint versions of python starting from ``1.x`` will now be parsed successfully. `#4226 `_ +- Fix a bug of ``pipenv update --outdated`` that can't print output correctly. `#4229 `_ +- Fixed a bug which caused pipenv to prefer source distributions over wheels from ``PyPI`` during the dependency resolution phase. + Fixed an issue which prevented proper build isolation using ``pep517`` based builders during dependency resolution. `#4231 `_ +- Don't fallback to system Python when no matching Python version is found. `#4232 `_ + +Vendored Libraries +------------------ + +- Updated vendored dependencies: + + - **attrs**: ``18.2.0`` => ``19.1.0`` + - **certifi**: ``2018.10.15`` => ``2019.3.9`` + - **cached_property**: ``1.4.3`` => ``1.5.1`` + - **cerberus**: ``1.2.0`` => ``1.3.1`` + - **click-completion**: ``0.5.0`` => ``0.5.1`` + - **colorama**: ``0.3.9`` => ``0.4.1`` + - **distlib**: ``0.2.8`` => ``0.2.9`` + - **idna**: ``2.7`` => ``2.8`` + - **jinja2**: ``2.10.0`` => ``2.10.1`` + - **markupsafe**: ``1.0`` => ``1.1.1`` + - **orderedmultidict**: ``(new)`` => ``1.0`` + - **packaging**: ``18.0`` => ``19.0`` + - **parse**: ``1.9.0`` => ``1.12.0`` + - **pathlib2**: ``2.3.2`` => ``2.3.3`` + - **pep517**: ``(new)`` => ``0.5.0`` + - **pexpect**: ``4.6.0`` => ``4.7.0`` + - **pipdeptree**: ``0.13.0`` => ``0.13.2`` + - **pyparsing**: ``2.2.2`` => ``2.3.1`` + - **python-dotenv**: ``0.9.1`` => ``0.10.2`` + - **pythonfinder**: ``1.1.10`` => ``1.2.1`` + - **pytoml**: ``(new)`` => ``0.1.20`` + - **requests**: ``2.20.1`` => ``2.21.0`` + - **requirementslib**: ``1.3.3`` => ``1.5.0`` + - **scandir**: ``1.9.0`` => ``1.10.0`` + - **shellingham**: ``1.2.7`` => ``1.3.1`` + - **six**: ``1.11.0`` => ``1.12.0`` + - **tomlkit**: ``0.5.2`` => ``0.5.3`` + - **urllib3**: ``1.24`` => ``1.25.2`` + - **vistir**: ``0.3.0`` => ``0.4.1`` + - **yaspin**: ``0.14.0`` => ``0.14.3`` + + - Removed vendored dependency **cursor**. `#3298 `_ +- Updated ``pip_shims`` to support ``--outdated`` with new pip versions. `#3766 `_ +- Update vendored dependencies and invocations + + - Update vendored and patched dependencies + - Update patches on ``piptools``, ``pip``, ``pip-shims``, ``tomlkit` + - Fix invocations of dependencies + - Fix custom ``InstallCommand` instantiation + - Update ``PackageFinder` usage + - Fix ``Bool` stringify attempts from ``tomlkit` + + Updated vendored dependencies: + - **attrs**: ```18.2.0`` => ```19.1.0`` + - **certifi**: ```2018.10.15`` => ```2019.3.9`` + - **cached_property**: ```1.4.3`` => ```1.5.1`` + - **cerberus**: ```1.2.0`` => ```1.3.1`` + - **click**: ```7.0.0`` => ```7.1.1`` + - **click-completion**: ```0.5.0`` => ```0.5.1`` + - **colorama**: ```0.3.9`` => ```0.4.3`` + - **contextlib2**: ```(new)`` => ```0.6.0.post1`` + - **distlib**: ```0.2.8`` => ```0.2.9`` + - **funcsigs**: ```(new)`` => ```1.0.2`` + - **importlib_metadata** ```1.3.0`` => ```1.5.1`` + - **importlib-resources**: ```(new)`` => ```1.4.0`` + - **idna**: ```2.7`` => ```2.9`` + - **jinja2**: ```2.10.0`` => ```2.11.1`` + - **markupsafe**: ```1.0`` => ```1.1.1`` + - **more-itertools**: ```(new)`` => ```5.0.0`` + - **orderedmultidict**: ```(new)`` => ```1.0`` + - **packaging**: ```18.0`` => ```19.0`` + - **parse**: ```1.9.0`` => ```1.15.0`` + - **pathlib2**: ```2.3.2`` => ```2.3.3`` + - **pep517**: ```(new)`` => ```0.5.0`` + - **pexpect**: ```4.6.0`` => ```4.8.0`` + - **pip-shims**: ```0.2.0`` => ```0.5.1`` + - **pipdeptree**: ```0.13.0`` => ```0.13.2`` + - **pyparsing**: ```2.2.2`` => ```2.4.6`` + - **python-dotenv**: ```0.9.1`` => ```0.10.2`` + - **pythonfinder**: ```1.1.10`` => ```1.2.2`` + - **pytoml**: ```(new)`` => ```0.1.20`` + - **requests**: ```2.20.1`` => ```2.23.0`` + - **requirementslib**: ```1.3.3`` => ```1.5.4`` + - **scandir**: ```1.9.0`` => ```1.10.0`` + - **shellingham**: ```1.2.7`` => ```1.3.2`` + - **six**: ```1.11.0`` => ```1.14.0`` + - **tomlkit**: ```0.5.2`` => ```0.5.11`` + - **urllib3**: ```1.24`` => ```1.25.8`` + - **vistir**: ```0.3.0`` => ```0.5.0`` + - **yaspin**: ```0.14.0`` => ```0.14.3`` + - **zipp**: ```0.6.0`` + + - Removed vendored dependency **cursor**. `#4169 `_ +- Add and update vendored dependencies to accommodate ``safety`` vendoring: + - **safety** ``(none)`` => ``1.8.7`` + - **dparse** ``(none)`` => ``0.5.0`` + - **pyyaml** ``(none)`` => ``5.3.1`` + - **urllib3** ``1.25.8`` => ``1.25.9`` + - **certifi** ``2019.11.28`` => ``2020.4.5.1`` + - **pyparsing** ``2.4.6`` => ``2.4.7`` + - **resolvelib** ``0.2.2`` => ``0.3.0`` + - **importlib-metadata** ``1.5.1`` => ``1.6.0`` + - **pip-shims** ``0.5.1`` => ``0.5.2`` + - **requirementslib** ``1.5.5`` => ``1.5.6`` `#4188 `_ +- Updated vendored ``pip`` => ``20.0.2`` and ``pip-tools`` => ``5.0.0``. `#4215 `_ +- Updated vendored dependencies to latest versions for security and bug fixes: + + - **requirementslib** ``1.5.8`` => ``1.5.9`` + - **vistir** ``0.5.0`` => ``0.5.1`` + - **jinja2** ``2.11.1`` => ``2.11.2`` + - **click** ``7.1.1`` => ``7.1.2`` + - **dateutil** ``(none)`` => ``2.8.1`` + - **backports.functools_lru_cache** ``1.5.0`` => ``1.6.1`` + - **enum34** ``1.1.6`` => ``1.1.10`` + - **toml** ``0.10.0`` => ``0.10.1`` + - **importlib_resources** ``1.4.0`` => ``1.5.0`` `#4226 `_ +- Changed attrs import path in vendored dependencies to always import from ``pipenv.vendor``. `#4267 `_ + +Improved Documentation +---------------------- + +- Added documenation about variable expansion in ``Pipfile`` entries. `#2317 `_ +- Consolidate all contributing docs in the rst file `#3120 `_ +- Update the out-dated manual page. `#3246 `_ +- Move CLI docs to its own page. `#3346 `_ +- Replace (non-existant) video on docs index.rst with equivalent gif. `#3499 `_ +- Clarify wording in Basic Usage example on using double quotes to escape shell redirection `#3522 `_ +- Ensure docs show navigation on small-screen devices `#3527 `_ +- Added a link to the TOML Spec under General Recommendations & Version Control to clarify how Pipfiles should be written. `#3629 `_ +- Updated the documentation with the new ``pytest`` entrypoint. `#3759 `_ +- Fix link to GIF in README.md demonstrating Pipenv's usage, and add descriptive alt text. `#3911 `_ +- Added a line describing potential issues in fancy extension. `#3912 `_ +- Documental description of how Pipfile works and association with Pipenv. `#3913 `_ +- Clarify the proper value of ``python_version`` and ``python_full_version``. `#3914 `_ +- Write description for --deploy extension and few extensions differences. `#3915 `_ +- More documentation for ``.env`` files `#4100 `_ +- Updated documentation to point to working links. `#4137 `_ +- Replace docs.pipenv.org with pipenv.pypa.io `#4167 `_ +- Added functionality to check spelling in documentation and cleaned up existing typographical issues. `#4209 `_ + + 2018.11.26 (2018-11-26) ======================= @@ -10,14 +243,14 @@ Bug Fixes - Fixed a bug which could cause failures to occur when parsing python entries from global pyenv version files. `#3224 `_ - Fixed an issue which prevented the parsing of named extras sections from certain ``setup.py`` files. `#3230 `_ - Correctly detect the virtualenv location inside an activated virtualenv. `#3231 `_ -- Fixed a bug which caused spinner frames to be written to stdout during locking operations which could cause redirection pipes to fail. `#3239 `_ -- Fixed a bug that editable pacakges can't be uninstalled correctly. `#3240 `_ +- Fixed a bug which caused spinner frames to be written to standard output during locking operations which could cause redirection pipes to fail. `#3239 `_ +- Fixed a bug that editable packages can't be uninstalled correctly. `#3240 `_ - Corrected an issue with installation timeouts which caused dependency resolution to fail for longer duration resolution steps. `#3244 `_ - Adding normal pep 508 compatible markers is now fully functional when using VCS dependencies. `#3249 `_ -- Updated ``requirementslib`` and ``pythonfinder`` for multiple bugfixes. `#3254 `_ +- Updated ``requirementslib`` and ``pythonfinder`` for multiple bug fixes. `#3254 `_ - Pipenv will now ignore hashes when installing with ``--skip-lock``. `#3255 `_ - Fixed an issue where pipenv could crash when multiple pipenv processes attempted to create the same directory. `#3257 `_ -- Fixed an issue which sometimes prevented successful creation of project pipfiles. `#3260 `_ +- Fixed an issue which sometimes prevented successful creation of a project Pipfile. `#3260 `_ - ``pipenv install`` will now unset the ``PYTHONHOME`` environment variable when not combined with ``--system``. `#3261 `_ - Pipenv will ensure that warnings do not interfere with the resolution process by suppressing warnings' usage of standard output and writing to standard error instead. `#3273 `_ - Fixed an issue which prevented variables from the environment, such as ``PIPENV_DEV`` or ``PIPENV_SYSTEM``, from being parsed and implemented correctly. `#3278 `_ @@ -43,16 +276,16 @@ Features & Improvements - Improved exceptions and error handling on failures. `#1977 `_ - Added persistent settings for all CLI flags via ``PIPENV_{FLAG_NAME}`` environment variables by enabling ``auto_envvar_prefix=PIPENV`` in click (implements PEEP-0002). `#2200 `_ - Added improved messaging about available but skipped updates due to dependency conflicts when running ``pipenv update --outdated``. `#2411 `_ -- Added environment variable `PIPENV_PYUP_API_KEY` to add ability - to override the bundled pyup.io API key. `#2825 `_ -- Added additional output to ``pipenv update --outdated`` to indicate that the operation succeded and all packages were already up to date. `#2828 `_ +- Added environment variable ``PIPENV_PYUP_API_KEY`` to add ability + to override the bundled PyUP.io API key. `#2825 `_ +- Added additional output to ``pipenv update --outdated`` to indicate that the operation succeeded and all packages were already up to date. `#2828 `_ - Updated ``crayons`` patch to enable colors on native powershell but swap native blue for magenta. `#3020 `_ - Added support for ``--bare`` to ``pipenv clean``, and fixed ``pipenv sync --bare`` to actually reduce output. `#3041 `_ - Added windows-compatible spinner via upgraded ``vistir`` dependency. `#3089 `_ - - Added support for python installations managed by ``asdf``. `#3096 `_ - Improved runtime performance of no-op commands such as ``pipenv --venv`` by around 2/3. `#3158 `_ - Do not show error but success for running ``pipenv uninstall --all`` in a fresh virtual environment. `#3170 `_ -- Improved asynchronous installation and error handling via queued subprocess paralleization. `#3217 `_ +- Improved asynchronous installation and error handling via queued subprocess parallelization. `#3217 `_ Bug Fixes --------- @@ -60,7 +293,7 @@ Bug Fixes - Remote non-PyPI artifacts and local wheels and artifacts will now include their own hashes rather than including hashes from ``PyPI``. `#2394 `_ - Non-ascii characters will now be handled correctly when parsed by pipenv's ``ToML`` parsers. `#2737 `_ - Updated ``pipenv uninstall`` to respect the ``--skip-lock`` argument. `#2848 `_ -- Fixed a bug which caused uninstallation to sometimes fail to successfullly remove packages from ``Pipfiles`` with comments on preceding or following lines. `#2885 `_, +- Fixed a bug which caused uninstallation to sometimes fail to successfully remove packages from ``Pipfiles`` with comments on preceding or following lines. `#2885 `_, `#3099 `_ - Pipenv will no longer fail when encountering python versions on Windows that have been uninstalled. `#2983 `_ - Fixed unnecessary extras are added when translating markers `#3026 `_ @@ -79,7 +312,7 @@ Bug Fixes - Fixed an issue which caused ``pipenv clean`` to sometimes clean packages from the base ``site-packages`` folder or fail entirely. `#3113 `_ - Updated ``pythonfinder`` to correct an issue with unnesting of nested paths when searching for python versions. `#3121 `_ - Added additional logic for ignoring and replacing non-ascii characters when formatting console output on non-UTF-8 systems. `#3131 `_ -- Fix virtual environment discovery when `PIPENV_VENV_IN_PROJECT` is set, but the in-project `.venv` is a file. `#3134 `_ +- Fix virtual environment discovery when ``PIPENV_VENV_IN_PROJECT`` is set, but the in-project `.venv` is a file. `#3134 `_ - Hashes for remote and local non-PyPI artifacts will now be included in ``Pipfile.lock`` during resolution. `#3145 `_ - Fix project path hashing logic in purpose to prevent collisions of virtual environments. `#3151 `_ - Fix package installation when the virtual environment path contains parentheses. `#3158 `_ @@ -166,7 +399,7 @@ Features & Improvements - Added environment variables `PIPENV_VERBOSE` and `PIPENV_QUIET` to control output verbosity without needing to pass options. `#2527 `_ -- Updated test-pypi addon to better support json-api access (forward compatibility). +- Updated test-PyPI add-on to better support json-API access (forward compatibility). Improved testing process for new contributors. `#2568 `_ - Greatly enhanced python discovery functionality: @@ -202,11 +435,11 @@ Behavior Changes Bug Fixes --------- -- Fixed a bug which prevented installation of editable requirements using ``ssh://`` style urls `#1393 `_ +- Fixed a bug which prevented installation of editable requirements using ``ssh://`` style URLs `#1393 `_ - VCS Refs for locked local editable dependencies will now update appropriately to the latest hash when running ``pipenv update``. `#1690 `_ -- ``.tar.gz`` and ``.zip`` artifacts will now have dependencies installed even when they are missing from the lockfile. `#2173 `_ +- ``.tar.gz`` and ``.zip`` artifacts will now have dependencies installed even when they are missing from the Lockfile. `#2173 `_ - The command line parser will now handle multiple ``-e/--editable`` dependencies properly via click's option parser to help mitigate future parsing issues. `#2279 `_ @@ -230,12 +463,12 @@ Bug Fixes - Fix subshell invocation on Windows for Python 2. `#2515 `_ -- Fixed a bug which sometimes caused pipenv to throw a ``TypeError`` or to run into encoding issues when writing lockfiles on python 2. `#2561 `_ +- Fixed a bug which sometimes caused pipenv to throw a ``TypeError`` or to run into encoding issues when writing a Lockfile on python 2. `#2561 `_ - Improve quoting logic for ``pipenv run`` so it works better with Windows built-in commands. `#2563 `_ -- Fixed a bug related to parsing vcs requirements with both extras and subdirectory fragments. +- Fixed a bug related to parsing VCS requirements with both extras and subdirectory fragments. Corrected an issue in the ``requirementslib`` parser which led to some markers being discarded rather than evaluated. `#2564 `_ - Fixed multiple issues with finding the correct system python locations. `#2582 `_ @@ -264,7 +497,7 @@ Bug Fixes - Fixed a bug which could cause the ``-e/--editable`` argument on a dependency to be accidentally parsed as a dependency itself. `#2714 `_ -- Correctly pass `verbose` and `debug` flags to the resolver subprocess so it generates appropriate output. This also resolves a bug introduced by the fix to #2527. `#2732 `_ +- Correctly pass ``verbose`` and ``debug`` flags to the resolver subprocess so it generates appropriate output. This also resolves a bug introduced by the fix to #2527. `#2732 `_ - All markers are now included in ``pipenv lock --requirements`` output. `#2748 `_ @@ -273,7 +506,7 @@ Bug Fixes - Fixed a bug in the dependency resolver which caused regular issues when handling ``setup.py`` based dependency resolution. `#2766 `_ - Updated vendored dependencies: - - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibility) - ``pip 10.0.1 => 18.0`` - ``click 6.7 => 7.0`` - ``toml 0.9.4 => 0.10.0`` @@ -327,7 +560,7 @@ Vendored Libraries - ``python-dotenv`` to ``0.9.1`` `#2639 `_ - Updated vendored dependencies: - - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibility) - ``pip 10.0.1 => 18.0`` - ``click 6.7 => 7.0`` - ``toml 0.9.4 => 0.10.0`` @@ -353,7 +586,7 @@ Improved Documentation - Simplified the test configuration process. `#2568 `_ -- Updated documentation to use working fortune cookie addon. `#2644 `_ +- Updated documentation to use working fortune cookie add-on. `#2644 `_ - Added additional information about troubleshooting ``pipenv shell`` by using the the ``$PIPENV_SHELL`` environment variable. `#2671 `_ @@ -364,7 +597,7 @@ Improved Documentation - Stopped recommending `--system` for Docker contexts. `#2762 `_ - Fixed the example url for doing "pipenv install -e - some-repo-url#egg=something", it was missing the "egg=" in the fragment + some-repository-url#egg=something", it was missing the "egg=" in the fragment identifier. `#2792 `_ - Fixed link to the "be cordial" essay in the contribution documentation. `#2793 `_ @@ -382,37 +615,37 @@ Features & Improvements - All calls to ``pipenv shell`` are now implemented from the ground up using `shellingham `_, a custom library which was purpose built to handle edge cases and shell detection. `#2371 `_ -- Added support for python 3.7 via a few small compatibility / bugfixes. `#2427 `_, +- Added support for python 3.7 via a few small compatibility / bug fixes. `#2427 `_, `#2434 `_, `#2436 `_ - Added new flag ``pipenv --support`` to replace the diagnostic command ``python -m pipenv.help``. `#2477 `_, `#2478 `_ -- Improved import times and CLI runtimes with minor tweaks. `#2485 `_ +- Improved import times and CLI run times with minor tweaks. `#2485 `_ Bug Fixes --------- -- Fixed an ongoing bug which sometimes resolved incompatible versions into lockfiles. `#1901 `_ +- Fixed an ongoing bug which sometimes resolved incompatible versions into the project Lockfile. `#1901 `_ - Fixed a bug which caused errors when creating virtualenvs which contained leading dash characters. `#2415 `_ -- Fixed a logic error which caused ``--deploy --system`` to overwrite editable vcs packages in the pipfile before installing, which caused any installation to fail by default. `#2417 `_ +- Fixed a logic error which caused ``--deploy --system`` to overwrite editable vcs packages in the Pipfile before installing, which caused any installation to fail by default. `#2417 `_ - Updated requirementslib to fix an issue with properly quoting markers in VCS requirements. `#2419 `_ - Installed new vendored jinja2 templates for ``click-completion`` which were causing template errors for users with completion enabled. `#2422 `_ -- Added support for python 3.7 via a few small compatibility / bugfixes. `#2427 `_ +- Added support for python 3.7 via a few small compatibility / bug fixes. `#2427 `_ - Fixed an issue reading package names from ``setup.py`` files in projects which imported utilities such as ``versioneer``. `#2433 `_ - Pipenv will now ensure that its internal package names registry files are written with unicode strings. `#2450 `_ - Fixed a bug causing requirements input as relative paths to be output as absolute paths or URIs. - Fixed a bug affecting normalization of ``git+git@host`` uris. `#2453 `_ + Fixed a bug affecting normalization of ``git+git@host`` URLs. `#2453 `_ - Pipenv will now always use ``pathlib2`` for ``Path`` based filesystem interactions by default on ``python<3.5``. `#2454 `_ @@ -479,7 +712,7 @@ Features & Improvements patched piptools version. `#2255 `_ -- PyPI mirror URLs can now be set to override instances of PyPI urls by passing +- PyPI mirror URLs can now be set to override instances of PyPI URLs by passing the ``--pypi-mirror`` argument from the command line or setting the ``PIPENV_PYPI_MIRROR`` environment variable. `#2281 `_ @@ -518,7 +751,7 @@ Behavior Changes - Pipenv will now parse & capitalize ``platform_python_implementation`` markers .. warning:: This could cause an issue if you have an out of date ``Pipfile`` - which lowercases the comparison value (e.g. ``cpython`` instead of + which lower-cases the comparison value (e.g. ``cpython`` instead of ``CPython``). `#2123 `_ - Pipenv will now only search for ``requirements.txt`` files when creating new @@ -572,7 +805,7 @@ Bug Fixes - Fixed a bug causing pipenv graph to fail to display sometimes. `#2268 `_ -- Updated ``requirementslib`` to fix a bug in pipfile parsing affecting +- Updated ``requirementslib`` to fix a bug in Pipfile parsing affecting relative path conversions. `#2269 `_ @@ -590,11 +823,11 @@ Bug Fixes requested ref. `#2304 `_ - Added error handling functionality to properly cope with single-digit - ``Requires-Python`` metatdata with no specifiers. `#2377 + ``Requires-Python`` metadata with no specifiers. `#2377 `_ - ``pipenv update`` will now always run the resolver and lock before ensuring - your dependencies are in sync with your lockfile. `#2379 + dependencies are in sync with project Lockfile. `#2379 `_ - Resolved a bug in our patched resolvers which could cause nondeterministic @@ -631,7 +864,7 @@ Vendored Libraries patched piptools version. `#2255 `_ -- Updated ``requirementslib`` to fix a bug in pipfile parsing affecting +- Updated ``requirementslib`` to fix a bug in Pipfile parsing affecting relative path conversions. `#2269 `_ diff --git a/Makefile b/Makefile index 16964d5c1b..74fe2a82c1 100644 --- a/Makefile +++ b/Makefile @@ -98,6 +98,9 @@ pre-bump: lint: flake8 . +man: + $(MAKE) -C docs $@ + .PHONY: check check: build.stamp pipenv run twine check dist/* && pipenv run check-manifest . diff --git a/Pipfile b/Pipfile index 826df207d7..f46243c7c9 100644 --- a/Pipfile +++ b/Pipfile @@ -8,6 +8,7 @@ jedi = "*" isort = "*" rope = "*" passa = {git = "https://github.com/sarugaku/passa.git"} +sphinxcontrib-spelling = "<4.3.0" [packages] diff --git a/Pipfile.lock b/Pipfile.lock index 913f12e849..1a936698cb 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f4d89c0aab5c4e865f8c96ba24613fb1e66bae803a3ceaeadb6abf0061898091" + "sha256": "e3e8319381b7b60d8c841564b12ed69b9ab85a81de1241e0361a8e9f7486f162" }, "pipfile-spec": 6, "requires": {}, @@ -554,6 +554,21 @@ ], "version": "==5.4.5" }, + "pep517": { + "hashes": [ + "sha256:576c480be81f3e1a70a16182c762311eb80d1f8a7b0d11971e5234967d7a342c", + "sha256:8e6199cf1288d48a0c44057f112acf18aa5ebabbf73faa242f598fbe145ba29e" + ], + "version": "==0.8.2" + }, + "pip-shims": { + "hashes": [ + "sha256:39193b8c4aa5e4cb82e250be58df4d5eaebe931a33b0df43b369f4ae92ee5753", + "sha256:423978c27d0e24e8ecb3e82b4a6c1f607e2e364153e73d0803c671d48b23195e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.5.2" + }, "pipenv": { "editable": true, "extras": [ @@ -612,6 +627,15 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, + "pyenchant": { + "hashes": [ + "sha256:e8000144e61551fcab9cd1b6fdccdded20e577e8d6d0985533f0b2b9c38fd952", + "sha256:b9526fc2c5f1ba0637e50200b645a7c20fb6644dbc6f6322027e7d2fbf1084a5", + "sha256:fc31cda72ace001da8fe5d42f11c26e514a91fa8c70468739216ddd8de64e2a0", + "sha256:9a66aa441535e27d228baca320f7feed1b08d0c5e6167d5e5cf455b545b7c2cd" + ], + "version": "==2.0.0" + }, "pyflakes": { "hashes": [ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", @@ -734,6 +758,21 @@ ], "version": "==0.9.1" }, + "requirementslib": { + "hashes": [ + "sha256:4999223a26504e0a3cedf9b58def69eae3a93d39db945a85e2135e0239e28fa8", + "sha256:b9989e4815ada8ed71f5d4059e4e6be6f864fb57de744c04ac3d0c744df52304" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.5.7" + }, + "resolvelib": { + "hashes": [ + "sha256:8ea817c951ffced489e71be10310c99eee36ff3dca02c24cd0dfe9e42d366da6", + "sha256:9781c2038be2ba3377d075dd3aa8f5f0f7b508b6f59779b1414bea08ed402f1e" + ], + "version": "==0.3.0" + }, "retry": { "hashes": [ "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", @@ -821,6 +860,14 @@ "index": "pypi", "version": "==2.3.2" }, + "sphinxcontrib-spelling": { + "hashes": [ + "sha256:7bcbaabef7aa9c176b81d960b20d0f67817ccea5e098968c366d2db4ad76d476", + "sha256:d76b113d538ad55b9e9e5a8e68d3734473926306edfdad3f707cece44d9b5d29" + ], + "index": "pypi", + "version": "==4.2.1" + }, "sphinxcontrib-websupport": { "hashes": [ "sha256:1501befb0fdf1d1c29a800fdbf4ef5dc5369377300ddbdd16d2cd40e54c6eefc", @@ -850,6 +897,14 @@ ], "version": "==0.10.0" }, + "tomlkit": { + "hashes": [ + "sha256:74f976908030ff164c0aa1edabe3bf83ea004b3daa5b0940b9c86a060c004e9a", + "sha256:e5d5f20809c2b09276a6c5d98fb0202325aee441a651db84ac12e0812ab7e569" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.6.0" + }, "towncrier": { "hashes": [ "sha256:48251a1ae66d2cf7e6fa5552016386831b3e12bb3b2d08eb70374508c17a8196", @@ -933,6 +988,17 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.5.4" }, + "vistir": { + "extras": [ + "spinner" + ], + "hashes": [ + "sha256:33f8e905d40a77276b3d5310c8b57c1479a4e46930042b4894fcf7ed60ad76c4", + "sha256:e47afdec8baf35032a8d17116765f751ecd2f2146d47e5af457c5de1fe5a334e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.5.0" + }, "wcwidth": { "hashes": [ "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1", diff --git a/RELEASING.md b/RELEASING.md index 3ba623174f..379fd9754f 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -105,6 +105,20 @@ $ export PIPENV_PYTHON=2.7 $ pipenv install --dev && pytest -ra tests ``` +## Check Spelling in Documentation + +Pipenv now leverages `sphinxcontrib.spelling` to help ensure documentation does not contain typographical mistakes. To validate documentation, please make sure to rebuild and rectify any documentation issues before pushing the new release: + +```console +$ pipenv shell +$ cd docs +$ make clean && make html +$ make spelling +``` + +Validate the results, adding any new exceptions to `docs/spelling_wordlist.txt`. + + ## Releasing 1. Set a version: `pipenv run inv release.bump-version --trunc-month --pre --tag=a` - this will truncate the current month, creating an alpha pre-release, e.g. `2020.4.1a1` diff --git a/docs/advanced.rst b/docs/advanced.rst index 25bbdf6d60..b2944da964 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -41,7 +41,7 @@ Very fancy. ☤ Using a PyPI Mirror ---------------------------- -If you'd like to override the default PyPI index urls with the url for a PyPI mirror, you can use the following:: +If you would like to override the default PyPI index URLs with the URL for a PyPI mirror, you can use the following:: $ pipenv install --pypi-mirror @@ -149,7 +149,9 @@ Anaconda uses Conda to manage packages. To reuse Conda–installed Python packag ☤ Generating a ``requirements.txt`` ----------------------------------- -You can convert a ``Pipfile`` and ``Pipfile.lock`` into a ``requirements.txt`` file very easily, and get all the benefits of extras and other goodies we have included. +You can convert a ``Pipfile`` and ``Pipfile.lock`` into a ``requirements.txt`` +file very easily, and get all the benefits of extras and other goodies we have +included. Let's take this ``Pipfile``:: @@ -160,7 +162,10 @@ Let's take this ``Pipfile``:: [packages] requests = {version="*"} -And generate a ``requirements.txt`` out of it:: + [dev-packages] + pytest = {version="*"} + +And generate a set of requirements out of it with only the default dependencies:: $ pipenv lock -r chardet==3.0.4 @@ -169,22 +174,41 @@ And generate a ``requirements.txt`` out of it:: idna==2.6 urllib3==1.22 -If you wish to generate a ``requirements.txt`` with only the development requirements you can do that too! Let's take the following ``Pipfile``:: +As with other commands, passing ``--dev`` will include both the default and +development dependencies:: - [[source]] - url = "https://pypi.python.org/simple" - verify_ssl = true + $ pipenv lock -r --dev + chardet==3.0.4 + requests==2.18.4 + certifi==2017.7.27.1 + idna==2.6 + urllib3==1.22 + py==1.4.34 + pytest==3.2.3 - [dev-packages] - pytest = {version="*"} +Finally, if you wish to generate a requirements file with only the +development requirements you can do that too, using the ``--dev-only`` +flag:: -And generate a ``requirements.txt`` out of it:: + $ pipenv lock -r --dev-only + py==1.4.34 + pytest==3.2.3 - $ pipenv lock -r --dev +The locked requirements are written to stdout, with shell output redirection +used to write them to a file:: + + $ pipenv lock -r > requirements.txt + $ pipenv lock -r --dev-only > dev-requirements.txt + $ cat requirements.txt + chardet==3.0.4 + requests==2.18.4 + certifi==2017.7.27.1 + idna==2.6 + urllib3==1.22 + $ cat dev-requirements.txt py==1.4.34 pytest==3.2.3 -Very fancy. ☤ Detection of Security Vulnerabilities --------------------------------------- @@ -467,7 +491,7 @@ In addition, you can also have Pipenv stick the virtualenv in ``project/.venv`` Pipenv is being used in projects like `Requests`_ for declaring development dependencies and running the test suite. -We've currently tested deployments with both `Travis-CI`_ and `tox`_ with success. +We have currently tested deployments with both `Travis-CI`_ and `tox`_ with success. Travis CI ///////// @@ -549,11 +573,11 @@ A 3rd party plugin, `tox-pipenv`_ is also available to use Pipenv natively with ☤ Shell Completion ------------------ -To enable completion in fish, add this to your config:: +To enable completion in fish, add this to your configuration:: eval (pipenv --completion) -Alternatively, with bash or zsh, add this to your config:: +Alternatively, with bash or zsh, add this to your configuration:: eval "$(pipenv --completion)" @@ -587,9 +611,9 @@ at all, use the `PIP_IGNORE_INSTALLED` setting:: There is a subtle but very important distinction to be made between **applications** and **libraries**. This is a very common source of confusion in the Python community. -Libraries provide reusable functionality to other libraries and applications (let's use the umbrella term **projects** here). They are required to work alongside other libraries, all with their own set of subdependencies. They define **abstract dependencies**. To avoid version conflicts in subdependencies of different libraries within a project, libraries should never ever pin dependency versions. Although they may specify lower or (less frequently) upper bounds, if they rely on some specific feature/fix/bug. Library dependencies are specified via ``install_requires`` in ``setup.py``. +Libraries provide reusable functionality to other libraries and applications (let's use the umbrella term **projects** here). They are required to work alongside other libraries, all with their own set of sub-dependencies. They define **abstract dependencies**. To avoid version conflicts in sub-dependencies of different libraries within a project, libraries should never ever pin dependency versions. Although they may specify lower or (less frequently) upper bounds, if they rely on some specific feature/fix/bug. Library dependencies are specified via ``install_requires`` in ``setup.py``. -Libraries are ultimately meant to be used in some **application**. Applications are different in that they usually are not depended on by other projects. They are meant to be deployed into some specific environment and only then should the exact versions of all their dependencies and subdependencies be made concrete. To make this process easier is currently the main goal of Pipenv. +Libraries are ultimately meant to be used in some **application**. Applications are different in that they usually are not depended on by other projects. They are meant to be deployed into some specific environment and only then should the exact versions of all their dependencies and sub-dependencies be made concrete. To make this process easier is currently the main goal of Pipenv. To summarize: diff --git a/docs/conf.py b/docs/conf.py index 90e16afeef..4031c337c9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,7 +18,6 @@ # import os - # Path hackery to get current version number. here = os.path.abspath(os.path.dirname(__file__)) @@ -41,6 +40,7 @@ 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx_click.ext', + 'sphinxcontrib.spelling', ] # Add any paths that contain templates here, relative to this directory. @@ -87,6 +87,8 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True +spelling_word_list_filename = "spelling_wordlist.txt" +spelling_show_suggestions = True # -- Options for HTML output ---------------------------------------------- diff --git a/docs/dev/contributing.rst b/docs/dev/contributing.rst index fb116696c1..5979924508 100644 --- a/docs/dev/contributing.rst +++ b/docs/dev/contributing.rst @@ -118,10 +118,10 @@ To get your development environment setup, run: pipenv install --dev -This will install the repo version of Pipenv and then install the development +This will install the repository version of Pipenv and then install the development dependencies. Once that has completed, you can start developing. -The repo version of Pipenv must be installed over other global versions to +The repository version of Pipenv must be installed over other global versions to resolve conflicts with the ``pipenv`` folder being implicitly added to ``sys.path``. See `pypa/pipenv#2557`_ for more details. @@ -152,7 +152,7 @@ tests, the standard pytest filters are available, such as: Code Review ~~~~~~~~~~~ -Contributions will not be merged until they've been code reviewed. You should +Contributions will not be merged until they have been code reviewed. You should implement any code review feedback unless you strongly object to it. In the event that you object to the code review feedback, you should make your case clearly and calmly. If, after doing so, the feedback is judged to still apply, @@ -205,7 +205,7 @@ be aware of the following things when filing bug reports: to check whether your bug report or feature request has been mentioned in the past. Duplicate bug reports and feature requests are a huge maintenance burden on the limited resources of the project. If it is clear from your - report that you would have struggled to find the original, that's ok, but + report that you would have struggled to find the original, that's okay, but if searching for a selection of words in your issue title would have found the duplicate then the issue will likely be closed extremely abruptly. 2. When filing bug reports about exceptions or tracebacks, please include the diff --git a/docs/dev/philosophy.rst b/docs/dev/philosophy.rst index 3f4c0bd00e..b3461c35f3 100644 --- a/docs/dev/philosophy.rst +++ b/docs/dev/philosophy.rst @@ -13,7 +13,7 @@ Management Style `Dan Ryan `__, `Tzu-ping Chung `__, and `Nate Prewitt `__ are the core contributors. They are responsible for triaging bug reports, reviewing pull requests and ensuring that Kenneth is kept up to speed with developments around the library. -The day-to-day managing of the project is done by the core contributors. They are responsible for making judgements about whether or not a feature request is +The day-to-day managing of the project is done by the core contributors. They are responsible for making judgments about whether or not a feature request is likely to be accepted by Kenneth. Values diff --git a/docs/diagnose.rst b/docs/diagnose.rst index 5cc47f4c54..b6b4fa9fdf 100644 --- a/docs/diagnose.rst +++ b/docs/diagnose.rst @@ -29,7 +29,7 @@ usually one of the following locations: * ``%LOCALAPPDATA%\pipenv\pipenv\Cache`` (Windows) * ``~/.cache/pipenv`` (other operating systems) -Pipenv does not install prereleases (i.e. a version with an alpha/beta/etc. +Pipenv does not install pre-releases (i.e. a version with an alpha/beta/etc. suffix, such as *1.0b1*) by default. You will need to pass the ``--pre`` flag in your command, or set @@ -124,7 +124,7 @@ for more information. --------------------------------------------- When you configure a supervisor program's ``command`` with ``pipenv run ...``, you -need to set locale enviroment variables properly to make it work. +need to set locale environment variables properly to make it work. Add this line under ``[supervisord]`` section in ``/etc/supervisor/supervisord.conf``:: diff --git a/docs/index.rst b/docs/index.rst index 26bfde19b2..799d778d33 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -84,7 +84,7 @@ User Testimonials - Automatically finds your project home, recursively, by looking for a ``Pipfile``. - Automatically generates a ``Pipfile``, if one doesn't exist. - Automatically creates a virtualenv in a standard location. -- Automatically adds/removes packages to a ``Pipfile`` when they are un/installed. +- Automatically adds/removes packages to a ``Pipfile`` when they are installed or uninstalled. - Automatically loads ``.env`` files, if they exist. The main commands are ``install``, ``uninstall``, and ``lock``, which generates a ``Pipfile.lock``. These are intended to replace ``$ pip install`` usage, as well as manual virtualenv management (to activate a virtualenv, run ``$ pipenv shell``). diff --git a/docs/install.rst b/docs/install.rst index 6aceba0c1a..97e756981c 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -71,7 +71,7 @@ Homebrew/Linuxbrew installer takes care of pip for you. =================== Pipenv is a dependency manager for Python projects. If you're familiar -with Node.js' `npm`_ or Ruby's `bundler`_, it is similar in spirit to those +with Node\.js's `npm`_ or Ruby's `bundler`_, it is similar in spirit to those tools. While pip can install Python packages, Pipenv is recommended as it's a higher-level tool that simplifies dependency management for common use cases. @@ -101,7 +101,7 @@ To upgrade pipenv at any time:: ☤ Pragmatic Installation of Pipenv ---------------------------------- -If you have a working installation of pip, and maintain certain "toolchain" type Python modules as global utilities in your user environment, pip `user installs `_ allow for installation into your home directory. Note that due to interaction between dependencies, you should limit tools installed in this way to basic building blocks for a Python workflow like virtualenv, pipenv, tox, and similar software. +If you have a working installation of pip, and maintain certain "tool-chain" type Python modules as global utilities in your user environment, pip `user installs `_ allow for installation into your home directory. Note that due to interaction between dependencies, you should limit tools installed in this way to basic building blocks for a Python workflow like virtualenv, pipenv, tox, and similar software. To install:: diff --git a/docs/requirements.txt b/docs/requirements.txt index ee44a2ca5b..0e349f9cd1 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -20,6 +20,7 @@ six==1.11.0 snowballstemmer==1.2.1 Sphinx==1.6.3 sphinx-click==1.3.0 +sphinxcontrib-spelling==4.2.1 sphinxcontrib-websupport==1.1.0 urllib3==1.24.1 virtualenv==16.1.0 diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt new file mode 100644 index 0000000000..219984ec6b --- /dev/null +++ b/docs/spelling_wordlist.txt @@ -0,0 +1,87 @@ +appdir +ascii +asdf +backport +bashrc +bundler +canonicalized +cmder +Cmder +codebase +Conda +CPython +cygwin +Deduplicate +Devops +eval +filesystem +Homebrew +ini +installable +Integrations +io +js +json +Linuxbrew +lockfile +macOS +Makefile +manpage +metadata +mingw +misconfiguration +misconfigured +msys +natively +npm +parallelization +parsers +pathlib +pexpect +pipenv +Pipenv +Pipfile +Pipfiles +piptools +powershell +Powershell +pre +py +pyenv +pypi +PyPI +pythonfinder +resolvers +runtime +runtimes +sayers +scandir +sha +stateful +subdirectory +subprocess +subprocesses +subshell +supervisord +tox +Tox +tracebacks +triaging +txt +unicode +uninstallation +unnesting +untrusted +url +urls +UTF +vcs +vendored +Vendored +venv +virtualenv +virtualenvs +Virtualenv +Virtualenvs +zsh +zshrc diff --git a/news/2317.doc.rst b/news/2317.doc.rst deleted file mode 100644 index ff56fe4df3..0000000000 --- a/news/2317.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added documenation about variable expansion in ``Pipfile`` entries. diff --git a/news/2373.bugfix.rst b/news/2373.bugfix.rst deleted file mode 100644 index 9b42add116..0000000000 --- a/news/2373.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Raise `PipenvUsageError` when [[source]] does not contain url field. diff --git a/news/2553.behavior.rst b/news/2553.behavior.rst deleted file mode 100644 index d66edfa2fa..0000000000 --- a/news/2553.behavior.rst +++ /dev/null @@ -1 +0,0 @@ -Make conservative checks of known exceptions when subprocess returns output, so user won't see the whole traceback - just the error. \ No newline at end of file diff --git a/news/2722.bugfix.rst b/news/2722.bugfix.rst deleted file mode 100644 index 8c26df8d32..0000000000 --- a/news/2722.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug which caused editable package resolution to sometimes fail with an unhelpful setuptools-related error message. diff --git a/news/2783.bugfix.rst b/news/2783.bugfix.rst deleted file mode 100644 index 7fa3cfd1f3..0000000000 --- a/news/2783.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed an issue which caused errors due to reliance on the system utilities ``which`` and ``where`` which may not always exist on some systems. -- Fixed a bug which caused periodic failures in python discovery when executables named ``python`` were not present on the target ``$PATH``. diff --git a/news/3053.bugfix.rst b/news/3053.bugfix.rst deleted file mode 100644 index 21134f59f9..0000000000 --- a/news/3053.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Dependency resolution now writes hashes for local and remote files to the lockfile. diff --git a/news/3071.bugfix.rst b/news/3071.bugfix.rst deleted file mode 100644 index dd4145ea67..0000000000 --- a/news/3071.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug which prevented ``pipenv graph`` from correctly showing all dependencies when running from within ``pipenv shell``. diff --git a/news/3120.doc.rst b/news/3120.doc.rst deleted file mode 100644 index a2f8ae6cd2..0000000000 --- a/news/3120.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Consolidate all contributing docs in the rst file diff --git a/news/3148.bugfix.rst b/news/3148.bugfix.rst deleted file mode 100644 index 1f0f4a62f4..0000000000 --- a/news/3148.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed resolution of direct-url dependencies in ``setup.py`` files to respect ``PEP-508`` style URL dependencies. diff --git a/news/3148.feature.rst b/news/3148.feature.rst deleted file mode 100644 index e33434db9b..0000000000 --- a/news/3148.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for resolution of direct-url dependencies in ``setup.py`` files to respect ``PEP-508`` style URL dependencies. diff --git a/news/3246.doc.rst b/news/3246.doc.rst deleted file mode 100644 index 284ecd0aee..0000000000 --- a/news/3246.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Update the out-dated manual page. diff --git a/news/3292.trivial.rst b/news/3292.trivial.rst deleted file mode 100644 index 9cab5de14c..0000000000 --- a/news/3292.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Update pytest-pypi documentation not to be pytest-httpbin documentation. diff --git a/news/3298.bugfix.rst b/news/3298.bugfix.rst deleted file mode 100644 index aa378723e4..0000000000 --- a/news/3298.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed a bug which caused failures in warning reporting when running pipenv inside a virtualenv under some circumstances. - -- Fixed a bug with package discovery when running ``pipenv clean``. diff --git a/news/3298.feature.rst b/news/3298.feature.rst deleted file mode 100644 index 65a49424bc..0000000000 --- a/news/3298.feature.rst +++ /dev/null @@ -1,5 +0,0 @@ -Added full support for resolution of all dependency types including direct URLs, zip archives, tarballs, etc. - -- Improved error handling and formatting. - -- Introduced improved cross platform stream wrappers for better ``stdout`` and ``stderr`` consistency. diff --git a/news/3298.vendor.rst b/news/3298.vendor.rst deleted file mode 100644 index cab9a50bfb..0000000000 --- a/news/3298.vendor.rst +++ /dev/null @@ -1,34 +0,0 @@ -Updated vendored dependencies: - - - **attrs**: ``18.2.0`` => ``19.1.0`` - - **certifi**: ``2018.10.15`` => ``2019.3.9`` - - **cached_property**: ``1.4.3`` => ``1.5.1`` - - **cerberus**: ``1.2.0`` => ``1.3.1`` - - **click-completion**: ``0.5.0`` => ``0.5.1`` - - **colorama**: ``0.3.9`` => ``0.4.1`` - - **distlib**: ``0.2.8`` => ``0.2.9`` - - **idna**: ``2.7`` => ``2.8`` - - **jinja2**: ``2.10.0`` => ``2.10.1`` - - **markupsafe**: ``1.0`` => ``1.1.1`` - - **orderedmultidict**: ``(new)`` => ``1.0`` - - **packaging**: ``18.0`` => ``19.0`` - - **parse**: ``1.9.0`` => ``1.12.0`` - - **pathlib2**: ``2.3.2`` => ``2.3.3`` - - **pep517**: ``(new)`` => ``0.5.0`` - - **pexpect**: ``4.6.0`` => ``4.7.0`` - - **pipdeptree**: ``0.13.0`` => ``0.13.2`` - - **pyparsing**: ``2.2.2`` => ``2.3.1`` - - **python-dotenv**: ``0.9.1`` => ``0.10.2`` - - **pythonfinder**: ``1.1.10`` => ``1.2.1`` - - **pytoml**: ``(new)`` => ``0.1.20`` - - **requests**: ``2.20.1`` => ``2.21.0`` - - **requirementslib**: ``1.3.3`` => ``1.5.0`` - - **scandir**: ``1.9.0`` => ``1.10.0`` - - **shellingham**: ``1.2.7`` => ``1.3.1`` - - **six**: ``1.11.0`` => ``1.12.0`` - - **tomlkit**: ``0.5.2`` => ``0.5.3`` - - **urllib3**: ``1.24`` => ``1.25.2`` - - **vistir**: ``0.3.0`` => ``0.4.1`` - - **yaspin**: ``0.14.0`` => ``0.14.3`` - -- Removed vendored dependency **cursor**. diff --git a/news/3307.bugfix.rst b/news/3307.bugfix.rst deleted file mode 100644 index 0f095c1afd..0000000000 --- a/news/3307.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Quote command arguments with carets (``^``) on Windows to work around unintended shell escapes. diff --git a/news/3313.bugfix.rst b/news/3313.bugfix.rst deleted file mode 100644 index 2f7a6ffc54..0000000000 --- a/news/3313.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Handle alternate names for UTF-8 encoding. diff --git a/news/3318.bugfix.rst b/news/3318.bugfix.rst deleted file mode 100644 index b56f75dd40..0000000000 --- a/news/3318.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Abort pipenv before adding the non-exist package to Pipfile. diff --git a/news/3324.bugfix.rst b/news/3324.bugfix.rst deleted file mode 100644 index d13a8d468f..0000000000 --- a/news/3324.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Don't normalize the package name user passes in. diff --git a/news/3328.feature.rst b/news/3328.feature.rst deleted file mode 100644 index 7e92d39fdb..0000000000 --- a/news/3328.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Pipenv will now successfully recursively lock VCS sub-dependencies. diff --git a/news/3339.bugfix b/news/3339.bugfix deleted file mode 100644 index 8e67e36f96..0000000000 --- a/news/3339.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug where custom virtualenv can not be activated with pipenv shell diff --git a/news/3346.doc.rst b/news/3346.doc.rst deleted file mode 100644 index c985f001f4..0000000000 --- a/news/3346.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Move CLI docs to its own page. diff --git a/news/3348.feature.rst b/news/3348.feature.rst deleted file mode 100644 index 50547a3d79..0000000000 --- a/news/3348.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for ``--verbose`` output to ``pipenv run``. \ No newline at end of file diff --git a/news/3351.bugfix.rst b/news/3351.bugfix.rst deleted file mode 100644 index d2d9c675f0..0000000000 --- a/news/3351.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a bug that ``--site-packages`` flag is not recognized. diff --git a/news/3353.bugfix b/news/3353.bugfix deleted file mode 100644 index 23e2b6af8a..0000000000 --- a/news/3353.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug where pipenv --clear is not working diff --git a/news/3362.trivial.rst b/news/3362.trivial.rst deleted file mode 100644 index 2216b071d2..0000000000 --- a/news/3362.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -The inline tables won't be rewritten now. diff --git a/news/3368.feature.rst b/news/3368.feature.rst deleted file mode 100644 index a998fce185..0000000000 --- a/news/3368.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Pipenv will now discover and resolve the intrinsic dependencies of **all** VCS dependencies, whether they are editable or not, to prevent resolution conflicts. diff --git a/news/3384.bugfix.rst b/news/3384.bugfix.rst deleted file mode 100644 index f85cd16898..0000000000 --- a/news/3384.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix unhashable type error during ``$ pipenv install --selective-upgrade`` diff --git a/news/3386.behavior.rst b/news/3386.behavior.rst deleted file mode 100644 index 8ddc27c6b2..0000000000 --- a/news/3386.behavior.rst +++ /dev/null @@ -1 +0,0 @@ -Do not touch Pipfile early and rely on it so that one can do ``pipenv sync`` without a Pipfile. diff --git a/news/3404.bugfix.rst b/news/3404.bugfix.rst deleted file mode 100644 index fa678d6ab8..0000000000 --- a/news/3404.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a keyerror which could occur when locking VCS dependencies in some cases. diff --git a/news/3427.bugfix.rst b/news/3427.bugfix.rst deleted file mode 100644 index 76aeb48985..0000000000 --- a/news/3427.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug that ``ValidationError`` is thrown when some fields are missing in source section. diff --git a/news/3434.trivial.rst b/news/3434.trivial.rst deleted file mode 100644 index 622b52db4a..0000000000 --- a/news/3434.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Improve the error message when one tries to initialize a Pipenv project under ``/``. diff --git a/news/3446.trivial.rst b/news/3446.trivial.rst deleted file mode 100644 index c3f6a00062..0000000000 --- a/news/3446.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the wrong order of old and new hashes in message. diff --git a/news/3449.bugfix.rst b/news/3449.bugfix.rst deleted file mode 100644 index 4ed0704686..0000000000 --- a/news/3449.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Updated the index names in lock file when source name in Pipfile is changed. diff --git a/news/3479.bugfix.rst b/news/3479.bugfix.rst deleted file mode 100644 index 15e8e0f652..0000000000 --- a/news/3479.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an issue which caused ``pipenv install --help`` to show duplicate entries for ``--pre``. diff --git a/news/3499.doc.rst b/news/3499.doc.rst deleted file mode 100644 index d98b0f1597..0000000000 --- a/news/3499.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Replace (non-existant) video on docs index.rst with equivalent gif. diff --git a/news/3502.bugfix.rst b/news/3502.bugfix.rst deleted file mode 100644 index 2700a7400e..0000000000 --- a/news/3502.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix bug causing ``[SSL: CERTIFICATE_VERIFY_FAILED]`` when Pipfile ``[[source]]`` has verify_ssl=false and url with custom port. diff --git a/news/3522.doc.rst b/news/3522.doc.rst deleted file mode 100644 index 3d71061fd6..0000000000 --- a/news/3522.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Clarify wording in Basic Usage example on using double quotes to escape shell redirection diff --git a/news/3527.doc.rst b/news/3527.doc.rst deleted file mode 100644 index b6043a08d9..0000000000 --- a/news/3527.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Ensure docs show navigation on small-screen devices diff --git a/news/3537.bugfix.rst b/news/3537.bugfix.rst deleted file mode 100644 index 779b9d7cc5..0000000000 --- a/news/3537.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``sync --sequential`` ignoring ``pip install`` errors and logs. diff --git a/news/3577.feature.rst b/news/3577.feature.rst deleted file mode 100644 index 7944c09878..0000000000 --- a/news/3577.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added a new environment variable, ``PIPENV_RESOLVE_VCS``, to toggle dependency resolution off for non-editable VCS, file, and URL based dependencies. diff --git a/news/3584.bugfix.rst b/news/3584.bugfix.rst deleted file mode 100644 index 09684d1dcb..0000000000 --- a/news/3584.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the issue that lock file can't be created when ``PIPENV_PIPFILE`` is not under working directory. diff --git a/news/3595.feature.rst b/news/3595.feature.rst deleted file mode 100644 index 30b755b974..0000000000 --- a/news/3595.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added the ability for Windows users to enable emojis by setting ``PIPENV_HIDE_EMOJIS=0``. diff --git a/news/3621.trivial.rst b/news/3621.trivial.rst deleted file mode 100644 index 4d38a31ef9..0000000000 --- a/news/3621.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Removed unused vendored package shutilwhich diff --git a/news/3629.doc.rst b/news/3629.doc.rst deleted file mode 100644 index 4d878c4023..0000000000 --- a/news/3629.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added a link to the TOML Spec under General Recommendations & Version Control to clarify how Pipfiles should be written. diff --git a/news/3640.trivial.rst b/news/3640.trivial.rst deleted file mode 100644 index eb9b718d69..0000000000 --- a/news/3640.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Removed unused vendored package blindspin diff --git a/news/3644.trivial.rst b/news/3644.trivial.rst deleted file mode 100644 index 5a7db2e18b..0000000000 --- a/news/3644.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Use tablib instead of requests in tests to avoid failures when vendored diff --git a/news/3647.bugfix.rst b/news/3647.bugfix.rst deleted file mode 100644 index cb64edc1bc..0000000000 --- a/news/3647.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Pipenv will no longer inadvertently set ``editable=True`` on all vcs dependencies. diff --git a/news/3652.feature.rst b/news/3652.feature.rst deleted file mode 100644 index 7e5becb965..0000000000 --- a/news/3652.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Allow overriding PIPENV_INSTALL_TIMEOUT environment variable (in seconds). diff --git a/news/3656.bugfix.rst b/news/3656.bugfix.rst deleted file mode 100644 index 58df202019..0000000000 --- a/news/3656.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -The ``--keep-outdated`` argument to ``pipenv install`` and ``pipenv lock`` will now drop specifier constraints when encountering editable dependencies. -- In addition, ``--keep-outdated`` will retain specifiers that would otherwise be dropped from any entries that have not been updated. diff --git a/news/3669.trivial.rst b/news/3669.trivial.rst deleted file mode 100644 index 86ff928050..0000000000 --- a/news/3669.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Allow KeyboardInterrupt to cancel test suite checks for working internet and ssh diff --git a/news/3684.trivial.rst b/news/3684.trivial.rst deleted file mode 100644 index 64561ec747..0000000000 --- a/news/3684.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Cleaned up some conditional logic that would always evaluate ``True``. diff --git a/news/3711.trivial.rst b/news/3711.trivial.rst deleted file mode 100644 index 48c531b210..0000000000 --- a/news/3711.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Add installation instructions for Debian Buster+ in README diff --git a/news/3718.bugfix.rst b/news/3718.bugfix.rst deleted file mode 100644 index 7a90ea50bc..0000000000 --- a/news/3718.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug which sometimes caused pipenv to fail to respect the ``--site-packages`` flag when passed with ``pipenv install``. diff --git a/news/3724.trivial.rst b/news/3724.trivial.rst deleted file mode 100644 index 63a550133c..0000000000 --- a/news/3724.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Update pytest configuration to support pytest 4. diff --git a/news/3738.feature.rst b/news/3738.feature.rst deleted file mode 100644 index bb8237e7ad..0000000000 --- a/news/3738.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Allow overriding PIP_EXISTS_ACTION evironment variable (value is passed to pip install). -Possible values here: https://pip.pypa.io/en/stable/reference/pip/#exists-action-option -Useful when you need to `PIP_EXISTS_ACTION=i` (ignore existing packages) - great for CI environments, where you need really fast setup. diff --git a/news/3745.bugfix.rst b/news/3745.bugfix.rst deleted file mode 100644 index 229047a40d..0000000000 --- a/news/3745.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Normalize the package names to lowercase when comparing used and in-Pipfile packages. diff --git a/news/3753.trivial.rst b/news/3753.trivial.rst deleted file mode 100644 index 2ab71d388f..0000000000 --- a/news/3753.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Improve the error message of ``pipenv --py`` when virtualenv can't be found. diff --git a/news/3759.doc.rst b/news/3759.doc.rst deleted file mode 100644 index 5aebd29e78..0000000000 --- a/news/3759.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Updated the documentation with the new ``pytest`` entrypoint. diff --git a/news/3763.feature.rst b/news/3763.feature.rst deleted file mode 100644 index 544a1ace42..0000000000 --- a/news/3763.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Pipenv will no longer forcibly override ``PIP_NO_DEPS`` on all vcs and file dependencies as resolution happens on these in a pre-lock step. diff --git a/news/3766.bugfix.rst b/news/3766.bugfix.rst deleted file mode 100644 index f7f7d304f7..0000000000 --- a/news/3766.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -``pipenv update --outdated`` will now correctly handle comparisons between pre/post-releases and normal releases. diff --git a/news/3766.vendor.rst b/news/3766.vendor.rst deleted file mode 100644 index 16ebbed917..0000000000 --- a/news/3766.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Updated ``pip_shims`` to support ``--outdated`` with new pip versions. diff --git a/news/3768.bugfix.rst b/news/3768.bugfix.rst deleted file mode 100644 index 8efe019787..0000000000 --- a/news/3768.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a ``KeyError`` which could occur when pinning outdated VCS dependencies via ``pipenv lock --keep-outdated``. diff --git a/news/3786.bugfix.rst b/news/3786.bugfix.rst deleted file mode 100644 index 210f7973ee..0000000000 --- a/news/3786.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Resolved an issue which caused resolution to fail when encountering poorly formatted ``python_version`` markers in ``setup.py`` and ``setup.cfg`` files. diff --git a/news/3794.bugfix.rst b/news/3794.bugfix.rst deleted file mode 100644 index a2999fdd6b..0000000000 --- a/news/3794.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a bug that installation errors are displayed as a list. diff --git a/news/3807.bugfix.rst b/news/3807.bugfix.rst deleted file mode 100644 index 6330c6bce9..0000000000 --- a/news/3807.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update ``pythonfinder`` to fix a problem that ``python.exe`` will be mistakenly chosen for -virtualenv creation under WSL. diff --git a/news/3809.bugfix.rst b/news/3809.bugfix.rst deleted file mode 100644 index bd603aaf9d..0000000000 --- a/news/3809.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed several bugs which could prevent editable VCS dependencies from being installed into target environments, even when reporting successful installation. diff --git a/news/3810.feature.rst b/news/3810.feature.rst deleted file mode 100644 index 335037790c..0000000000 --- a/news/3810.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improved verbose logging output during ``pipenv lock`` will now stream output to the console while maintaining a spinner. diff --git a/news/3819.bugfix.rst b/news/3819.bugfix.rst deleted file mode 100644 index a6e05fb5da..0000000000 --- a/news/3819.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -``pipenv check --system`` should find the correct Python interpreter when ``python`` does not exist on the system. diff --git a/news/3842.bugfix.rst b/news/3842.bugfix.rst deleted file mode 100644 index fb21be89de..0000000000 --- a/news/3842.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Resolve the symlinks when the path is absolute. diff --git a/news/3844.behavior.rst b/news/3844.behavior.rst deleted file mode 100644 index 2648e8398b..0000000000 --- a/news/3844.behavior.rst +++ /dev/null @@ -1 +0,0 @@ -Re-enable ``--help`` option for ``pipenv run`` command. diff --git a/news/3879.bugfix.rst b/news/3879.bugfix.rst deleted file mode 100644 index 95413ca515..0000000000 --- a/news/3879.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Pass ``--pre`` and ``--clear`` options to ``pipenv update --outdated``. diff --git a/news/3885.trivial.rst b/news/3885.trivial.rst deleted file mode 100644 index 7782e0c97f..0000000000 --- a/news/3885.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Remove a misleading code comment from Specifying Versions documentation. diff --git a/news/3911.doc.rst b/news/3911.doc.rst deleted file mode 100644 index a5ab134f9a..0000000000 --- a/news/3911.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Fix link to GIF in README.md demonstrating Pipenv's usage, and add descriptive alt text. diff --git a/news/3912.doc.rst b/news/3912.doc.rst deleted file mode 100644 index 24598d19f2..0000000000 --- a/news/3912.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added a line describing potential issues in fancy extension. diff --git a/news/3913.doc.rst b/news/3913.doc.rst deleted file mode 100644 index 54fbbfe8e4..0000000000 --- a/news/3913.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Documental description of how Pipfile works and association with Pipenv. diff --git a/news/3914.doc.rst b/news/3914.doc.rst deleted file mode 100644 index ae37d61d86..0000000000 --- a/news/3914.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Clarify the proper value of `python_version` and `python_full_version`. diff --git a/news/3915.doc.rst b/news/3915.doc.rst deleted file mode 100644 index 2cc94a205b..0000000000 --- a/news/3915.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Write description for --deploy extension and few extensions differences. diff --git a/news/4018.feature.rst b/news/4018.feature.rst deleted file mode 100644 index fcd6a2a91d..0000000000 --- a/news/4018.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for automatic python installs via ``asdf`` and associated ``PIPENV_DONT_USE_ASDF`` environment variable. diff --git a/news/4045.bugfix.rst b/news/4045.bugfix.rst deleted file mode 100644 index 6558018c03..0000000000 --- a/news/4045.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Honor PIPENV_SPINNER environment variable diff --git a/news/4100.doc.rst b/news/4100.doc.rst deleted file mode 100644 index 050bdcca96..0000000000 --- a/news/4100.doc.rst +++ /dev/null @@ -1 +0,0 @@ -More documentation for ``.env`` files diff --git a/news/4137.doc b/news/4137.doc deleted file mode 100644 index 45de74d280..0000000000 --- a/news/4137.doc +++ /dev/null @@ -1 +0,0 @@ -Updated documentation to point to working links. \ No newline at end of file diff --git a/news/4167.doc.rst b/news/4167.doc.rst deleted file mode 100644 index da71ef026d..0000000000 --- a/news/4167.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Replace docs.pipenv.org with pipenv.pypa.io diff --git a/news/4169.vendor.rst b/news/4169.vendor.rst deleted file mode 100644 index d128f56cbc..0000000000 --- a/news/4169.vendor.rst +++ /dev/null @@ -1,50 +0,0 @@ -Update vendored dependencies and invocations - -- Update vendored and patched dependencies - - Update patches on `piptools`, `pip`, `pip-shims`, `tomlkit` -- Fix invocations of dependencies - - Fix custom `InstallCommand` instantiation - - Update `PackageFinder` usage - - Fix `Bool` stringify attempts from `tomlkit` - -Updated vendored dependencies: - - **attrs**: ``18.2.0`` => ``19.1.0`` - - **certifi**: ``2018.10.15`` => ``2019.3.9`` - - **cached_property**: ``1.4.3`` => ``1.5.1`` - - **cerberus**: ``1.2.0`` => ``1.3.1`` - - **click**: ``7.0.0`` => ``7.1.1`` - - **click-completion**: ``0.5.0`` => ``0.5.1`` - - **colorama**: ``0.3.9`` => ``0.4.3`` - - **contextlib2**: ``(new)`` => ``0.6.0.post1`` - - **distlib**: ``0.2.8`` => ``0.2.9`` - - **funcsigs**: ``(new)`` => ``1.0.2`` - - **importlib_metadata** ``1.3.0`` => ``1.5.1`` - - **importlib-resources**: ``(new)`` => ``1.4.0`` - - **idna**: ``2.7`` => ``2.9`` - - **jinja2**: ``2.10.0`` => ``2.11.1`` - - **markupsafe**: ``1.0`` => ``1.1.1`` - - **more-itertools**: ``(new)`` => ``5.0.0`` - - **orderedmultidict**: ``(new)`` => ``1.0`` - - **packaging**: ``18.0`` => ``19.0`` - - **parse**: ``1.9.0`` => ``1.15.0`` - - **pathlib2**: ``2.3.2`` => ``2.3.3`` - - **pep517**: ``(new)`` => ``0.5.0`` - - **pexpect**: ``4.6.0`` => ``4.8.0`` - - **pip-shims**: ``0.2.0`` => ``0.5.1`` - - **pipdeptree**: ``0.13.0`` => ``0.13.2`` - - **pyparsing**: ``2.2.2`` => ``2.4.6`` - - **python-dotenv**: ``0.9.1`` => ``0.10.2`` - - **pythonfinder**: ``1.1.10`` => ``1.2.2`` - - **pytoml**: ``(new)`` => ``0.1.20`` - - **requests**: ``2.20.1`` => ``2.23.0`` - - **requirementslib**: ``1.3.3`` => ``1.5.4`` - - **scandir**: ``1.9.0`` => ``1.10.0`` - - **shellingham**: ``1.2.7`` => ``1.3.2`` - - **six**: ``1.11.0`` => ``1.14.0`` - - **tomlkit**: ``0.5.2`` => ``0.5.11`` - - **urllib3**: ``1.24`` => ``1.25.8`` - - **vistir**: ``0.3.0`` => ``0.5.0`` - - **yaspin**: ``0.14.0`` => ``0.14.3`` - - **zipp**: ``0.6.0`` - -- Removed vendored dependency **cursor**. diff --git a/news/4188.bugfix.rst b/news/4188.bugfix.rst deleted file mode 100644 index 0ea2c94331..0000000000 --- a/news/4188.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an issue with ``pipenv check`` failing due to an invalid API key from ``pyup.io``. diff --git a/news/4188.vendor.rst b/news/4188.vendor.rst deleted file mode 100644 index e24a45fdfb..0000000000 --- a/news/4188.vendor.rst +++ /dev/null @@ -1,11 +0,0 @@ -Add and update vendored dependencies to accommodate ``safety`` vendoring: -- **safety** ``(none)`` => ``1.8.7`` -- **dparse** ``(none)`` => ``0.5.0`` -- **pyyaml** ``(none)`` => ``5.3.1`` -- **urllib3** ``1.25.8`` => ``1.25.9`` -- **certifi** ``2019.11.28`` => ``2020.4.5.1`` -- **pyparsing** ``2.4.6`` => ``2.4.7`` -- **resolvelib** ``0.2.2`` => ``0.3.0`` -- **importlib-metadata** ``1.5.1`` => ``1.6.0`` -- **pip-shims** ``0.5.1`` => ``0.5.2`` -- **requirementslib** ``1.5.5`` => ``1.5.6`` diff --git a/news/4199.behavior.rst b/news/4199.behavior.rst deleted file mode 100644 index eb1dae3c3f..0000000000 --- a/news/4199.behavior.rst +++ /dev/null @@ -1 +0,0 @@ -Make sure `pipenv lock -r --pypi-mirror {MIRROR_URL}` will respect the pypi-mirror in requirements output. diff --git a/news/4210.trivial.rst b/news/4210.trivial.rst deleted file mode 100644 index 3116af9ed1..0000000000 --- a/news/4210.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Updated PyUp.io information to reflect current situation. diff --git a/news/4215.vendor.rst b/news/4215.vendor.rst deleted file mode 100644 index 20a34b0244..0000000000 --- a/news/4215.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Updated vendored ``pip`` => ``20.0.2`` and ``pip-tools`` => ``5.0.0``. diff --git a/pipenv/__version__.py b/pipenv/__version__.py index edda169d15..24299f3f32 100644 --- a/pipenv/__version__.py +++ b/pipenv/__version__.py @@ -2,4 +2,4 @@ # // ) ) / / // ) ) //___) ) // ) ) || / / # //___/ / / / //___/ / // // / / || / / # // / / // ((____ // / / ||/ / -__version__ = "2020.4.1b1" +__version__ = "2020.5.28" diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index a2f6cbc9a3..718475b2ad 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -43,7 +43,7 @@ "--completion", is_flag=True, default=False, - help="Output completion (to be eval'd).", + help="Output completion (to be executed by the shell).", ) @option("--man", is_flag=True, default=False, help="Display manpage.") @option( @@ -237,7 +237,7 @@ def install( lock=not state.installstate.skip_lock, ignore_pipfile=state.installstate.ignore_pipfile, skip_lock=state.installstate.skip_lock, - requirements=state.installstate.requirementstxt, + requirementstxt=state.installstate.requirementstxt, sequential=state.installstate.sequential, pre=state.installstate.pre, code=state.installstate.code, @@ -255,14 +255,14 @@ def install( @cli.command( - short_help="Un-installs a provided package and removes it from Pipfile.", + short_help="Uninstalls a provided package and removes it from Pipfile.", context_settings=subcommand_context ) @option( "--all-dev", is_flag=True, default=False, - help="Un-install all package from [dev-packages].", + help="Uninstall all package from [dev-packages].", ) @option( "--all", @@ -280,7 +280,7 @@ def uninstall( all=False, **kwargs ): - """Un-installs a provided package and removes it from Pipfile.""" + """Uninstalls a provided package and removes it from Pipfile.""" from ..core import do_uninstall retcode = do_uninstall( packages=state.installstate.packages, @@ -298,6 +298,19 @@ def uninstall( if retcode: sys.exit(retcode) +LOCK_HEADER = """\ +# +# These requirements were autogenerated by pipenv +# To regenerate from the project's Pipfile, run: +# +# pipenv lock {options} +# +""" + +LOCK_DEV_NOTE="""\ +# Note: in pipenv 2020.x, "--dev" changed to emit both default and development +# requirements. To emit only development requirements, pass "--dev-only". +""" @cli.command(short_help="Generates Pipfile.lock.", context_settings=CONTEXT_SETTINGS) @lock_options @@ -317,13 +330,37 @@ def lock( three=state.three, python=state.python, pypi_mirror=state.pypi_mirror, warn=(not state.quiet), site_packages=state.site_packages, ) - if state.installstate.requirementstxt: + emit_requirements = state.lockoptions.emit_requirements + dev = state.installstate.dev + dev_only = state.lockoptions.dev_only + pre = state.installstate.pre + if emit_requirements: + # Emit requirements file header (unless turned off with --no-header) + if state.lockoptions.emit_requirements_header: + header_options = ["--requirements"] + if dev_only: + header_options.append("--dev-only") + elif dev: + header_options.append("--dev") + click.echo(LOCK_HEADER.format(options=" ".join(header_options))) + # TODO: Emit pip-compile style header + if dev and not dev_only: + click.echo(LOCK_DEV_NOTE) + # Setting "emit_requirements=True" means do_init() just emits the + # install requirements file to stdout, it doesn't install anything do_init( - dev=state.installstate.dev, - requirements=state.installstate.requirementstxt, + dev=dev, + dev_only=dev_only, + emit_requirements=emit_requirements, pypi_mirror=state.pypi_mirror, pre=state.installstate.pre, ) + elif state.lockoptions.dev_only: + raise exceptions.PipenvOptionsError( + "--dev-only", + "--dev-only is only permitted in combination with --requirements. " + "Aborting." + ) do_lock( ctx=ctx, clear=state.clear, @@ -349,7 +386,7 @@ def lock( "--anyway", is_flag=True, default=False, - help="Always spawn a subshell, even if one is already spawned.", + help="Always spawn a sub-shell, even if one is already spawned.", ) @argument("shell_args", nargs=-1) @pypi_mirror_option @@ -410,7 +447,8 @@ def run(state, command, args): @cli.command( - short_help="Checks for security vulnerabilities and against PEP 508 markers provided in Pipfile.", + short_help="Checks for PyUp Safety security vulnerabilities and against" + " PEP 508 markers provided in Pipfile.", context_settings=subcommand_context ) @option( @@ -423,24 +461,31 @@ def run(state, command, args): "--db", nargs=1, default=lambda: os.environ.get('PIPENV_SAFETY_DB', False), - help="Path to a local vulnerability database. Default: ENV PIPENV_SAFETY_DB or None", + help="Path to a local PyUp Safety vulnerabilities database." + " Default: ENV PIPENV_SAFETY_DB or None.", ) @option( "--ignore", "-i", multiple=True, - help="Ignore specified vulnerability during safety checks.", + help="Ignore specified vulnerability during PyUp Safety checks.", ) @option( "--output", type=Choice(["default", "json", "full-report", "bare"]), default="default", - help="Translates to --json, --full-report or --bare from safety check", + help="Translates to --json, --full-report or --bare from PyUp Safety check", +) +@option( + "--key", + help="Safety API key from PyUp.io for scanning dependencies against a live" + " vulnerabilities database. Leave blank for scanning against a" + " database that only updates once a month.", ) @option( "--quiet", is_flag=True, - help="Quiet stdout except vulnerability report." + help="Quiet standard output, except vulnerability report." ) @common_options @system_option @@ -453,11 +498,12 @@ def check( style=False, ignore=None, output="default", + key=None, quiet=False, args=None, **kwargs ): - """Checks for security vulnerabilities and against PEP 508 markers provided in Pipfile.""" + """Checks for PyUp Safety security vulnerabilities and against PEP 508 markers provided in Pipfile.""" from ..core import do_check do_check( @@ -468,6 +514,7 @@ def check( db=db, ignore=ignore, output=output, + key=key, quiet=quiet, args=args, pypi_mirror=state.pypi_mirror, diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index fc45256f1f..30a6882fde 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -65,6 +65,7 @@ def __init__(self): self.clear = False self.system = False self.installstate = InstallState() + self.lockoptions = LockOptions() class InstallState(object): @@ -82,6 +83,11 @@ def __init__(self): self.packages = [] self.editables = [] +class LockOptions(object): + def __init__(self): + self.dev_only = False + self.emit_requirements = False + self.emit_requirements_header = False pass_state = make_pass_decorator(State, ensure=True) @@ -102,7 +108,7 @@ def callback(ctx, param, value): state.extra_index_urls.extend(list(value)) return value return option("--extra-index-url", multiple=True, expose_value=False, - help=u"URLs to the extra PyPI compatible indexes to query for package lookups.", + help=u"URLs to the extra PyPI compatible indexes to query for package look-ups.", callback=callback, envvar="PIP_EXTRA_INDEX_URL")(f) @@ -112,8 +118,10 @@ def callback(ctx, param, value): state.installstate.editables.extend(value) return value return option('-e', '--editable', expose_value=False, multiple=True, - help='An editable python package URL or path, often to a VCS repo.', - callback=callback, type=click.types.STRING)(f) + callback=callback, type=click.types.STRING, help=( + "An editable Python package URL or path, often to a VCS " + "repository." + ))(f) def sequential_option(f): @@ -167,16 +175,28 @@ def callback(ctx, param, value): callback=callback, type=click.types.BOOL, show_envvar=True)(f) -def dev_option(f): +def _dev_option(f, help_text): def callback(ctx, param, value): state = ctx.ensure_object(State) state.installstate.dev = value return value return option("--dev", "-d", is_flag=True, default=False, type=click.types.BOOL, - help="Install both develop and default packages.", callback=callback, + help=help_text, callback=callback, expose_value=False, show_envvar=True)(f) +def install_dev_option(f): + return _dev_option(f, "Install both develop and default packages") + + +def lock_dev_option(f): + return _dev_option(f, "Generate both develop and default requirements") + + +def uninstall_dev_option(f): + return _dev_option(f, "Deprecated (as it has no effect). May be removed in a future release.") + + def pre_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) @@ -300,15 +320,32 @@ def callback(ctx, param, value): help="Import a requirements.txt file.", callback=callback)(f) -def requirements_flag(f): +def emit_requirements_flag(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if value: - state.installstate.requirementstxt = value + state.lockoptions.emit_requirements = value return value return option("--requirements", "-r", default=False, is_flag=True, expose_value=False, help="Generate output in requirements.txt format.", callback=callback)(f) +def emit_requirements_header_flag(f): + def callback(ctx, param, value): + state = ctx.ensure_object(State) + if value: + state.lockoptions.emit_requirements_header = value + return value + return option("--header/--no-header", default=True, is_flag=True, expose_value=False, + help="Add header to generated requirements", callback=callback)(f) + +def dev_only_flag(f): + def callback(ctx, param, value): + state = ctx.ensure_object(State) + if value: + state.lockoptions.dev_only = value + return value + return option("--dev-only", default=False, is_flag=True, expose_value=False, + help="Emit development dependencies *only* (overrides --dev)", callback=callback)(f) def code_option(f): def callback(ctx, param, value): @@ -380,7 +417,6 @@ def common_options(f): def install_base_options(f): f = common_options(f) - f = dev_option(f) f = pre_option(f) f = keep_outdated_option(f) return f @@ -388,6 +424,7 @@ def install_base_options(f): def uninstall_options(f): f = install_base_options(f) + f = uninstall_dev_option(f) f = skip_lock_option(f) f = editable_option(f) f = package_arg(f) @@ -396,12 +433,15 @@ def uninstall_options(f): def lock_options(f): f = install_base_options(f) - f = requirements_flag(f) + f = lock_dev_option(f) + f = emit_requirements_flag(f) + f = dev_only_flag(f) return f def sync_options(f): f = install_base_options(f) + f = install_dev_option(f) f = sequential_option(f) return f diff --git a/pipenv/core.py b/pipenv/core.py index 3b549c3537..1938e7f587 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -164,6 +164,7 @@ def load_dot_env(): err=True, ) dotenv.load_dotenv(dotenv_file, override=True) + six.moves.reload_module(environments) def add_to_path(p): @@ -352,15 +353,16 @@ def find_a_system_python(line): def ensure_python(three=None, python=None): - # Support for the PIPENV_PYTHON environment variable. - from .environments import PIPENV_PYTHON + # Runtime import is necessary due to the possibility that the environments module may have been reloaded. + from .environments import PIPENV_PYTHON, PIPENV_YES if PIPENV_PYTHON and python is False and three is None: python = PIPENV_PYTHON - def abort(): + def abort(msg=''): click.echo( - "You can specify specific versions of Python with:\n {0}".format( + "{0}\nYou can specify specific versions of Python with:\n{1}".format( + crayons.red(msg), crayons.red( "$ pipenv --python {0}".format( os.sep.join(("path", "to", "python")) @@ -395,21 +397,25 @@ def abort(): err=True, ) # check for python installers - from .vendor.pythonfinder.environment import PYENV_INSTALLED, ASDF_INSTALLED - from .installers import Pyenv, Asdf, InstallerError + from .installers import Pyenv, Asdf, InstallerError, InstallerNotFound # prefer pyenv if both pyenv and asdf are installed as it's # dedicated to python installs so probably the preferred # method of the user for new python installs. - if PYENV_INSTALLED and not PIPENV_DONT_USE_PYENV: - installer = Pyenv("pyenv") - elif ASDF_INSTALLED and not PIPENV_DONT_USE_ASDF: - installer = Asdf("asdf") - else: - installer = None + installer = None + if not PIPENV_DONT_USE_PYENV: + try: + installer = Pyenv() + except InstallerNotFound: + pass + if installer is None and not PIPENV_DONT_USE_ASDF: + try: + installer = Asdf() + except InstallerNotFound: + pass if not installer: - abort() + abort("Neither 'pyenv' nor 'asdf' could be found to install Python.") else: if SESSION_IS_INTERACTIVE or PIPENV_YES: try: @@ -417,9 +423,7 @@ def abort(): except ValueError: abort() except InstallerError as e: - click.echo(fix_utf8("Something went wrong…")) - click.echo(crayons.blue(e.err), err=True) - abort() + abort('Something went wrong while installing Python:\n{}'.format(e.err)) s = "{0} {1} {2}".format( "Would you like us to install", crayons.green("CPython {0}".format(version)), @@ -434,7 +438,7 @@ def abort(): u"{0} {1} {2} {3}{4}".format( crayons.normal(u"Installing", bold=True), crayons.green(u"CPython {0}".format(version), bold=True), - crayons.normal(u"with {0}".format(installer), bold=True), + crayons.normal(u"with {0}".format(installer.cmd), bold=True), crayons.normal(u"(this may take a few minutes)"), crayons.normal(fix_utf8("…"), bold=True), ) @@ -732,6 +736,9 @@ def batch_install(deps_list, procs, failed_deps_queue, deps_to_install = deps_list[:] deps_to_install.extend(sequential_deps) + deps_to_install = [ + dep for dep in deps_to_install if not project.environment.is_satisfied(dep) + ] sequential_dep_names = [d.name for d in sequential_deps] deps_list_bar = progress.bar( @@ -796,9 +803,9 @@ def batch_install(deps_list, procs, failed_deps_queue, def do_install_dependencies( dev=False, - only=False, + dev_only=False, bare=False, - requirements=False, + emit_requirements=False, allow_global=False, ignore_hashes=False, skip_lock=False, @@ -809,14 +816,14 @@ def do_install_dependencies( """" Executes the install functionality. - If requirements is True, simply spits out a requirements format to stdout. + If emit_requirements is True, simply spits out a requirements format to stdout. """ from six.moves import queue - if requirements: + if emit_requirements: bare = True - # Load the lockfile if it exists, or if only is being used (e.g. lock is being used). - if skip_lock or only or not project.lockfile_exists: + # Load the lockfile if it exists, or if dev_only is being used. + if skip_lock or not project.lockfile_exists: if not bare: click.echo( crayons.normal(fix_utf8("Installing dependencies from Pipfile…"), bold=True) @@ -836,14 +843,14 @@ def do_install_dependencies( ) # Allow pip to resolve dependencies when in skip-lock mode. no_deps = not skip_lock # skip_lock true, no_deps False, pip resolves deps - deps_list = list(lockfile.get_requirements(dev=dev, only=requirements)) - if requirements: + dev = dev or dev_only + deps_list = list(lockfile.get_requirements(dev=dev, only=dev_only)) + if emit_requirements: index_args = prepare_pip_source_args(get_source_list(pypi_mirror=pypi_mirror, project=project)) index_args = " ".join(index_args).replace(" -", "\n-") deps = [ req.as_line(sources=False, include_hashes=False) for req in deps_list ] - # Output only default dependencies click.echo(index_args) click.echo( "\n".join(sorted(deps)) @@ -1195,7 +1202,8 @@ def do_purge(bare=False, downloads=False, allow_global=False): def do_init( dev=False, - requirements=False, + dev_only=False, + emit_requirements=False, allow_global=False, ignore_pipfile=False, skip_lock=False, @@ -1300,7 +1308,8 @@ def do_init( ) do_install_dependencies( dev=dev, - requirements=requirements, + dev_only=dev_only, + emit_requirements=emit_requirements, allow_global=allow_global, skip_lock=skip_lock, concurrent=concurrent, @@ -1489,7 +1498,7 @@ def pip_install( pip_args = get_pip_args( pre=pre, verbose=environments.is_verbose(), upgrade=True, selective_upgrade=selective_upgrade, no_use_pep517=not use_pep517, - no_deps=no_deps, require_hashes=not ignore_hashes + no_deps=no_deps, require_hashes=not ignore_hashes, ) pip_command.extend(pip_args) if r: @@ -1812,7 +1821,10 @@ def do_outdated(pypi_mirror=None, pre=False, clear=False): (pkg.project_name, pkg.parsed_version, pkg.latest_version) for pkg in project.environment.get_outdated_packages() } - reverse_deps = project.environment.reverse_dependencies() + reverse_deps = { + canonicalize_name(name): deps + for name, deps in project.environment.reverse_dependencies().items() + } for result in installed_packages: dep = Requirement.from_line(str(result.as_requirement())) packages.update(dep.as_pipfile()) @@ -1842,9 +1854,9 @@ def do_outdated(pypi_mirror=None, pre=False, clear=False): version = None if name_in_pipfile: version = get_version(project.packages[name_in_pipfile]) - reverse_deps = reverse_deps.get(name_in_pipfile) - if isinstance(reverse_deps, Mapping) and "required" in reverse_deps: - required = " {0} required".format(reverse_deps["required"]) + rdeps = reverse_deps.get(canonicalize_name(package)) + if isinstance(rdeps, Mapping) and "required" in rdeps: + required = " {0} required".format(rdeps["required"]) if version: pipfile_version_text = " ({0} set in Pipfile)".format(version) else: @@ -1882,7 +1894,7 @@ def do_install( lock=True, ignore_pipfile=False, skip_lock=False, - requirements=False, + requirementstxt=False, sequential=False, pre=False, code=False, @@ -1905,7 +1917,7 @@ def do_install( package_args = [p for p in packages if p] + [p for p in editable_packages if p] skip_requirements = False # Don't search for requirements.txt files if the user provides one - if requirements or package_args or project.pipfile_exists: + if requirementstxt or package_args or project.pipfile_exists: skip_requirements = True concurrent = not sequential # Ensure that virtualenv is available and pipfile are available @@ -1930,7 +1942,7 @@ def do_install( pre = project.settings.get("allow_prereleases") if not keep_outdated: keep_outdated = project.settings.get("keep_outdated") - remote = requirements and is_valid_url(requirements) + remote = requirementstxt and is_valid_url(requirementstxt) # Warn and exit if --system is used without a pipfile. if (system and package_args) and not (PIPENV_VIRTUALENV): raise exceptions.SystemUsageError @@ -1949,17 +1961,17 @@ def do_install( prefix="pipenv-", suffix="-requirement.txt", dir=requirements_directory ) temp_reqs = fd.name - requirements_url = requirements + requirements_url = requirementstxt # Download requirements file try: - download_file(requirements, temp_reqs) + download_file(requirements_url, temp_reqs) except IOError: fd.close() os.unlink(temp_reqs) click.echo( crayons.red( u"Unable to find requirements file at {0}.".format( - crayons.normal(requirements) + crayons.normal(requirements_url) ) ), err=True, @@ -1968,9 +1980,9 @@ def do_install( finally: fd.close() # Replace the url with the temporary requirements file - requirements = temp_reqs + requirementstxt = temp_reqs remote = True - if requirements: + if requirementstxt: error, traceback = None, None click.echo( crayons.normal( @@ -1979,10 +1991,10 @@ def do_install( err=True, ) try: - import_requirements(r=project.path_to(requirements), dev=dev) + import_requirements(r=project.path_to(requirementstxt), dev=dev) except (UnicodeDecodeError, PipError) as e: # Don't print the temp file path if remote since it will be deleted. - req_path = requirements_url if remote else project.path_to(requirements) + req_path = requirements_url if remote else project.path_to(requirementstxt) error = ( u"Unexpected syntax in {0}. Are you sure this is a " "requirements.txt style file?".format(req_path) @@ -2559,6 +2571,7 @@ def do_check( db=False, ignore=None, output="default", + key=None, quiet=False, args=None, pypi_mirror=None @@ -2676,8 +2689,8 @@ def do_check( if not quiet and not environments.is_quiet(): click.echo(crayons.normal("Using local database {}".format(db))) cmd.append("--db={0}".format(db)) - if PIPENV_PYUP_API_KEY and not db: - cmd = cmd + ["--key={0}".format(PIPENV_PYUP_API_KEY)] + elif key or PIPENV_PYUP_API_KEY: + cmd = cmd + ["--key={0}".format(key or PIPENV_PYUP_API_KEY)] if ignored: for cve in ignored: cmd += cve diff --git a/pipenv/environment.py b/pipenv/environment.py index 7538ea9efa..09ea73a3f4 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -19,12 +19,14 @@ import pipenv from .vendor.cached_property import cached_property +from .vendor.packaging.utils import canonicalize_name from .vendor import vistir from .utils import normalize_path, make_posix BASE_WORKING_SET = pkg_resources.WorkingSet(sys.path) +# TODO: Unittests for this class class Environment(object): @@ -712,6 +714,33 @@ def is_installed(self, pkgname): return any(d for d in self.get_distributions() if d.project_name == pkgname) + def is_satisfied(self, req): + match = next( + iter( + d for d in self.get_distributions() + if canonicalize_name(d.project_name) == req.normalized_name + ), None + ) + if match is not None: + if req.editable and req.line_instance.is_local and self.find_egg(match): + requested_path = req.line_instance.path + return requested_path and vistir.compat.samefile(requested_path, match.location) + elif match.has_metadata("direct_url.json"): + direct_url_metadata = json.loads(match.get_metadata("direct_url.json")) + commit_id = direct_url_metadata.get("vcs_info", {}).get("commit_id", "") + vcs_type = direct_url_metadata.get("vcs_info", {}).get("vcs", "") + _, pipfile_part = req.as_pipfile().popitem() + return ( + vcs_type == req.vcs and commit_id == req.commit_hash + and direct_url_metadata["url"] == pipfile_part[req.vcs] + ) + elif req.line_instance.specifiers is not None: + return req.line_instance.specifiers.contains( + match.version, prereleases=True + ) + return True + return False + def run(self, cmd, cwd=os.curdir): """Run a command with :class:`~subprocess.Popen` in the context of the environment diff --git a/pipenv/environments.py b/pipenv/environments.py index 848fec8774..887522312c 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -14,6 +14,25 @@ # HACK: avoid resolver.py uses the wrong byte code files. # I hope I can remove this one day. os.environ["PYTHONDONTWRITEBYTECODE"] = fs_str("1") +_false_values = ("0", "false", "no", "off") +_true_values = ("1", "true", "yes", "on") + + +def env_to_bool(val): + """ + Convert **val** to boolean, returning True if truthy or False if falsey + + :param Any val: The value to convert + :return: False if Falsey, True if truthy + :rtype: bool + """ + if isinstance(val, bool): + return val + if val.lower() in _false_values: + return False + if val.lower() in _true_values: + return True + raise ValueError("Value is not a valid boolean-like: {0}".format(val)) def _is_env_truthy(name): @@ -21,7 +40,41 @@ def _is_env_truthy(name): """ if name not in os.environ: return False - return os.environ.get(name).lower() not in ("0", "false", "no", "off") + return os.environ.get(name).lower() not in _false_values + + +def get_from_env(arg, prefix="PIPENV", check_for_negation=True): + """ + Check the environment for a variable, returning its truthy or stringified value + + For example, setting ``PIPENV_NO_RESOLVE_VCS=1`` would mean that + ``get_from_env("RESOLVE_VCS", prefix="PIPENV")`` would return ``False``. + + :param str arg: The name of the variable to look for + :param str prefix: The prefix to attach to the variable, defaults to "PIPENV" + :param bool check_for_negation: Whether to check for ``_NO_``, defaults + to True + :return: The value from the environment if available + :rtype: Optional[Union[str, bool]] + """ + negative_lookup = "NO_{0}".format(arg) + positive_lookup = arg + if prefix: + positive_lookup = "{0}_{1}".format(prefix, arg) + negative_lookup = "{0}_{1}".format(prefix, negative_lookup) + if positive_lookup in os.environ: + value = os.environ[positive_lookup] + try: + return env_to_bool(value) + except ValueError: + return value + if check_for_negation and negative_lookup in os.environ: + value = os.environ[negative_lookup] + try: + return not env_to_bool(value) + except ValueError: + return value + return None PIPENV_IS_CI = bool("CI" in os.environ or "TF_BUILD" in os.environ) @@ -160,7 +213,7 @@ def _is_env_truthy(name): PIPENV_SPINNER = os.environ.get("PIPENV_SPINNER", PIPENV_SPINNER) """Sets the default spinner type. -Spinners are identitcal to the node.js spinners and can be found at +Spinners are identical to the ``node.js`` spinners and can be found at https://github.com/sindresorhus/cli-spinners """ @@ -244,7 +297,7 @@ def _is_env_truthy(name): PIP_EXISTS_ACTION = os.environ.get("PIP_EXISTS_ACTION", "w") """Specifies the value for pip's --exists-action option -Defaullts to (w)ipe +Defaults to ``(w)ipe`` """ PIPENV_RESOLVE_VCS = ( diff --git a/pipenv/installers.py b/pipenv/installers.py index cd8e49a24a..f16cdacdb2 100644 --- a/pipenv/installers.py +++ b/pipenv/installers.py @@ -1,8 +1,12 @@ +import os import operator import re +from abc import ABCMeta, abstractmethod + from .environments import PIPENV_INSTALL_TIMEOUT from .vendor import attr, delegator +from .utils import find_windows_executable @attr.s @@ -48,6 +52,10 @@ def matches_minor(self, other): return (self.major, self.minor) == (other.major, other.minor) +class InstallerNotFound(RuntimeError): + pass + + class InstallerError(RuntimeError): def __init__(self, desc, c): super(InstallerError, self).__init__(desc) @@ -56,29 +64,70 @@ def __init__(self, desc, c): class Installer(object): + __metaclass__ = ABCMeta - def __init__(self, cmd): - self._cmd = cmd + def __init__(self): + self.cmd = self._find_installer() + super(Installer, self).__init__() - def __str__(self): - return self._cmd + @abstractmethod + def _find_installer(self): + pass + + @staticmethod + def _find_python_installer_by_name_and_env(name, env_var): + """ + Given a python installer (pyenv or asdf), try to locate the binary for that + installer. + + pyenv/asdf are not always present on PATH. Both installers also support a + custom environment variable (PYENV_ROOT or ASDF_DIR) which alows them to + be installed into a non-default location (the default/suggested source + install location is in ~/.pyenv or ~/.asdf). + + For systems without the installers on PATH, and with a custom location + (e.g. /opt/pyenv), Pipenv can use those installers without modifications to + PATH, if an installer's respective environment variable is present in an + environment's .env file. + + This function searches for installer binaries in the following locations, + by precedence: + 1. On PATH, equivalent to which(1). + 2. In the "bin" subdirectory of PYENV_ROOT or ASDF_DIR, depending on the + installer. + 3. In ~/.pyenv/bin or ~/.asdf/bin, depending on the installer. + """ + for candidate in ( + # Look for the Python installer using the equivalent of 'which'. On + # Homebrew-installed systems, the env var may not be set, but this + # strategy will work. + find_windows_executable('', name), + # Check for explicitly set install locations (e.g. PYENV_ROOT, ASDF_DIR). + os.path.join(os.path.expanduser(os.getenv(env_var, '/dev/null')), 'bin', name), + # Check the pyenv/asdf-recommended from-source install locations + os.path.join(os.path.expanduser('~/.{}'.format(name)), 'bin', name), + ): + if candidate is not None and os.path.isfile(candidate) and os.access(candidate, os.X_OK): + return candidate + raise InstallerNotFound() def _run(self, *args, **kwargs): timeout = kwargs.pop('timeout', delegator.TIMEOUT) if kwargs: k = list(kwargs.keys())[0] raise TypeError('unexpected keyword argument {0!r}'.format(k)) - args = (self._cmd,) + tuple(args) + args = (self.cmd,) + tuple(args) c = delegator.run(args, block=False, timeout=timeout) c.block() if c.return_code != 0: - raise InstallerError('faild to run {0}'.format(args), c) + raise InstallerError('failed to run {0}'.format(args), c) return c + @abstractmethod def iter_installable_versions(self): """Iterate through CPython versions available for Pipenv to install. """ - raise NotImplementedError + pass def find_version_to_install(self, name): """Find a version in the installer from the version supplied. @@ -100,6 +149,7 @@ def find_version_to_install(self, name): ) return best_match + @abstractmethod def install(self, version): """Install the given version with runner implementation. @@ -109,11 +159,14 @@ def install(self, version): A ValueError is raised if the given version does not have a match in the runner. A InstallerError is raised if the runner command fails. """ - raise NotImplementedError + pass class Pyenv(Installer): + def _find_installer(self): + return self._find_python_installer_by_name_and_env('pyenv', 'PYENV_ROOT') + def iter_installable_versions(self): """Iterate through CPython versions available for Pipenv to install. """ @@ -140,6 +193,9 @@ def install(self, version): class Asdf(Installer): + def _find_installer(self): + return self._find_python_installer_by_name_and_env('asdf', 'ASDF_DIR') + def iter_installable_versions(self): """Iterate through CPython versions available for asdf to install. """ diff --git a/pipenv/patched/crayons.py b/pipenv/patched/crayons.py index de735dafe9..d7644a216a 100644 --- a/pipenv/patched/crayons.py +++ b/pipenv/patched/crayons.py @@ -12,8 +12,8 @@ import re import sys -import shellingham -import colorama +from pipenv.vendor import shellingham +from pipenv.vendor import colorama PY3 = sys.version_info[0] >= 3 diff --git a/pipenv/patched/notpip/__main__.py b/pipenv/patched/notpip/__main__.py index 56f669fafa..3c2161897b 100644 --- a/pipenv/patched/notpip/__main__.py +++ b/pipenv/patched/notpip/__main__.py @@ -11,7 +11,9 @@ # Resulting path is the name of the wheel itself # Add that to sys.path so we can import pipenv.patched.notpip path = os.path.dirname(os.path.dirname(__file__)) + pipenv = os.path.dirname(os.path.dirname(path)) sys.path.insert(0, path) + sys.path.insert(0, pipenv) from pipenv.patched.notpip._internal.cli.main import main as _main # isort:skip # noqa diff --git a/pipenv/patched/notpip/_internal/index/package_finder.py b/pipenv/patched/notpip/_internal/index/package_finder.py index e8a806a448..8c3e98957d 100644 --- a/pipenv/patched/notpip/_internal/index/package_finder.py +++ b/pipenv/patched/notpip/_internal/index/package_finder.py @@ -535,7 +535,7 @@ def _sort_key(self, candidate, ignore_compatibility=True): ) if self._prefer_binary: binary_preference = 1 - tags = self.valid_tags if not ignore_compatibility else None + tags = valid_tags try: pri = -(wheel.support_index_min(tags=tags)) except TypeError: diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index 28ece192c4..1123fb64b6 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -76,7 +76,7 @@ def simplify_markers(ireq): def clean_requires_python(candidates): """Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes.""" all_candidates = [] - py_version = parse_version(os.environ.get('PIP_PYTHON_VERSION', '.'.join(map(str, sys.version_info[:3])))) + py_version = parse_version(os.environ.get('PIPENV_REQUESTED_PYTHON_VERSION', '.'.join(map(str, sys.version_info[:3])))) for c in candidates: if getattr(c, "requires_python", None): # Old specifications had people setting this to single digits @@ -181,6 +181,8 @@ def format_requirement(ireq, marker=None, hashes=None): """ if ireq.editable: line = "-e {}".format(ireq.link.url) + elif ireq.link and ireq.link.is_vcs: + line = str(ireq.req) elif is_url_requirement(ireq): line = ireq.link.url else: diff --git a/pipenv/pipenv.1 b/pipenv/pipenv.1 index 7dd63f5864..ed8758a0dd 100644 --- a/pipenv/pipenv.1 +++ b/pipenv/pipenv.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH "PIPENV" "1" "Jul 14, 2019" "2018.11.27.dev0" "pipenv" +.TH "PIPENV" "1" "May 28, 2020" "2020.5.28" "pipenv" .SH NAME pipenv \- pipenv Documentation . @@ -30,7 +30,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -\fI\%\fP\fI\%\fP\fI\%\fP\fI\%\fP +\fI\%\fP\fI\%\fP\fI\%\fP .sp .ce ---- @@ -178,7 +178,7 @@ Homebrew/Linuxbrew installer takes care of pip for you. .SS ☤ Installing Pipenv .sp Pipenv is a dependency manager for Python projects. If you\(aqre familiar -with Node.js\(aq \fI\%npm\fP or Ruby\(aqs \fI\%bundler\fP, it is similar in spirit to those +with Node.js\(aqs \fI\%npm\fP or Ruby\(aqs \fI\%bundler\fP, it is similar in spirit to those tools. While pip can install Python packages, Pipenv is recommended as it\(aqs a higher\-level tool that simplifies dependency management for common use cases. @@ -215,7 +215,7 @@ $ brew upgrade pipenv .UNINDENT .SS ☤ Pragmatic Installation of Pipenv .sp -If you have a working installation of pip, and maintain certain "toolchain" type Python modules as global utilities in your user environment, pip \fI\%user installs\fP allow for installation into your home directory. Note that due to interaction between dependencies, you should limit tools installed in this way to basic building blocks for a Python workflow like virtualenv, pipenv, tox, and similar software. +If you have a working installation of pip, and maintain certain "tool\-chain" type Python modules as global utilities in your user environment, pip \fI\%user installs\fP allow for installation into your home directory. Note that due to interaction between dependencies, you should limit tools installed in this way to basic building blocks for a Python workflow like virtualenv, pipenv, tox, and similar software. .sp To install: .INDENT 0.0 @@ -396,6 +396,439 @@ You might want to set \fBexport PIPENV_VENV_IN_PROJECT=1\fP in your .bashrc/.zsh .sp Congratulations, you now know how to install and use Python packages! ✨ 🍰 ✨ .SS Release and Version History +.SS 2020.5.28 (2020\-05\-28) +.SS Features & Improvements +.INDENT 0.0 +.IP \(bu 2 +\fBpipenv install\fP and \fBpipenv sync\fP will no longer attempt to install satisfied dependencies during installation. \fI\%#3057\fP, +\fI\%#3506\fP +.IP \(bu 2 +Added support for resolution of direct\-url dependencies in \fBsetup.py\fP files to respect \fBPEP\-508\fP style URL dependencies. \fI\%#3148\fP +.IP \(bu 2 +Added full support for resolution of all dependency types including direct URLs, zip archives, tarballs, etc. +.INDENT 2.0 +.IP \(bu 2 +Improved error handling and formatting. +.IP \(bu 2 +Introduced improved cross platform stream wrappers for better \fBstdout\fP and \fBstderr\fP consistency. \fI\%#3298\fP +.UNINDENT +.IP \(bu 2 +For consistency with other commands and the \fB\-\-dev\fP option +description, \fBpipenv lock \-\-requirements \-\-dev\fP now emits +both default and development dependencies. +The new \fB\-\-dev\-only\fP option requests the previous +behaviour (e.g. to generate a \fBdev\-requirements.txt\fP file). \fI\%#3316\fP +.IP \(bu 2 +Pipenv will now successfully recursively lock VCS sub\-dependencies. \fI\%#3328\fP +.IP \(bu 2 +Added support for \fB\-\-verbose\fP output to \fBpipenv run\fP\&. \fI\%#3348\fP +.IP \(bu 2 +Pipenv will now discover and resolve the intrinsic dependencies of \fBall\fP VCS dependencies, whether they are editable or not, to prevent resolution conflicts. \fI\%#3368\fP +.IP \(bu 2 +Added a new environment variable, \fBPIPENV_RESOLVE_VCS\fP, to toggle dependency resolution off for non\-editable VCS, file, and URL based dependencies. \fI\%#3577\fP +.IP \(bu 2 +Added the ability for Windows users to enable emojis by setting \fBPIPENV_HIDE_EMOJIS=0\fP\&. \fI\%#3595\fP +.IP \(bu 2 +Allow overriding PIPENV_INSTALL_TIMEOUT environment variable (in seconds). \fI\%#3652\fP +.IP \(bu 2 +Allow overriding PIP_EXISTS_ACTION evironment variable (value is passed to pip install). +Possible values here: \fI\%https://pip.pypa.io/en/stable/reference/pip/#exists\-action\-option\fP +Useful when you need to \fIPIP_EXISTS_ACTION=i\fP (ignore existing packages) \- great for CI environments, where you need really fast setup. \fI\%#3738\fP +.IP \(bu 2 +Pipenv will no longer forcibly override \fBPIP_NO_DEPS\fP on all vcs and file dependencies as resolution happens on these in a pre\-lock step. \fI\%#3763\fP +.IP \(bu 2 +Improved verbose logging output during \fBpipenv lock\fP will now stream output to the console while maintaining a spinner. \fI\%#3810\fP +.IP \(bu 2 +Added support for automatic python installs via \fBasdf\fP and associated \fBPIPENV_DONT_USE_ASDF\fP environment variable. \fI\%#4018\fP +.IP \(bu 2 +Pyenv/asdf can now be used whether or not they are available on PATH. Setting PYENV_ROOT/ASDF_DIR in a Pipenv\(aqs .env allows Pipenv to install an interpreter without any shell customizations, so long as pyenv/asdf is installed. \fI\%#4245\fP +.IP \(bu 2 +Added \fB\-\-key\fP command line parameter for including personal PyUp.io API tokens when running \fBpipenv check\fP\&. \fI\%#4257\fP +.UNINDENT +.SS Behavior Changes +.INDENT 0.0 +.IP \(bu 2 +Make conservative checks of known exceptions when subprocess returns output, so user won\(aqt see the whole traceback \- just the error. \fI\%#2553\fP +.IP \(bu 2 +Do not touch Pipfile early and rely on it so that one can do \fBpipenv sync\fP without a Pipfile. \fI\%#3386\fP +.IP \(bu 2 +Re\-enable \fB\-\-help\fP option for \fBpipenv run\fP command. \fI\%#3844\fP +.IP \(bu 2 +Make sure \fBpipenv lock \-r \-\-pypi\-mirror {MIRROR_URL}\fP will respect the pypi\-mirror in requirements output. \fI\%#4199\fP +.UNINDENT +.SS Bug Fixes +.INDENT 0.0 +.IP \(bu 2 +Raise \fIPipenvUsageError\fP when [[source]] does not contain url field. \fI\%#2373\fP +.IP \(bu 2 +Fixed a bug which caused editable package resolution to sometimes fail with an unhelpful setuptools\-related error message. \fI\%#2722\fP +.IP \(bu 2 +Fixed an issue which caused errors due to reliance on the system utilities \fBwhich\fP and \fBwhere\fP which may not always exist on some systems. +\- Fixed a bug which caused periodic failures in python discovery when executables named \fBpython\fP were not present on the target \fB$PATH\fP\&. \fI\%#2783\fP +.IP \(bu 2 +Dependency resolution now writes hashes for local and remote files to the lockfile. \fI\%#3053\fP +.IP \(bu 2 +Fixed a bug which prevented \fBpipenv graph\fP from correctly showing all dependencies when running from within \fBpipenv shell\fP\&. \fI\%#3071\fP +.IP \(bu 2 +Fixed resolution of direct\-url dependencies in \fBsetup.py\fP files to respect \fBPEP\-508\fP style URL dependencies. \fI\%#3148\fP +.IP \(bu 2 +Fixed a bug which caused failures in warning reporting when running pipenv inside a virtualenv under some circumstances. +.INDENT 2.0 +.IP \(bu 2 +Fixed a bug with package discovery when running \fBpipenv clean\fP\&. \fI\%#3298\fP +.UNINDENT +.IP \(bu 2 +Quote command arguments with carets (\fB^\fP) on Windows to work around unintended shell escapes. \fI\%#3307\fP +.IP \(bu 2 +Handle alternate names for UTF\-8 encoding. \fI\%#3313\fP +.IP \(bu 2 +Abort pipenv before adding the non\-exist package to Pipfile. \fI\%#3318\fP +.IP \(bu 2 +Don\(aqt normalize the package name user passes in. \fI\%#3324\fP +.IP \(bu 2 +Fix a bug where custom virtualenv can not be activated with pipenv shell \fI\%#3339\fP +.IP \(bu 2 +Fix a bug that \fB\-\-site\-packages\fP flag is not recognized. \fI\%#3351\fP +.IP \(bu 2 +Fix a bug where pipenv \-\-clear is not working \fI\%#3353\fP +.IP \(bu 2 +Fix unhashable type error during \fB$ pipenv install \-\-selective\-upgrade\fP \fI\%#3384\fP +.IP \(bu 2 +Dependencies with direct \fBPEP508\fP compliant VCS URLs specified in their \fBinstall_requires\fP will now be successfully locked during the resolution process. \fI\%#3396\fP +.IP \(bu 2 +Fixed a keyerror which could occur when locking VCS dependencies in some cases. \fI\%#3404\fP +.IP \(bu 2 +Fixed a bug that \fBValidationError\fP is thrown when some fields are missing in source section. \fI\%#3427\fP +.IP \(bu 2 +Updated the index names in lock file when source name in Pipfile is changed. \fI\%#3449\fP +.IP \(bu 2 +Fixed an issue which caused \fBpipenv install \-\-help\fP to show duplicate entries for \fB\-\-pre\fP\&. \fI\%#3479\fP +.IP \(bu 2 +Fix bug causing \fB[SSL: CERTIFICATE_VERIFY_FAILED]\fP when Pipfile \fB[[source]]\fP has verify_ssl=false and url with custom port. \fI\%#3502\fP +.IP \(bu 2 +Fix \fBsync \-\-sequential\fP ignoring \fBpip install\fP errors and logs. \fI\%#3537\fP +.IP \(bu 2 +Fix the issue that lock file can\(aqt be created when \fBPIPENV_PIPFILE\fP is not under working directory. \fI\%#3584\fP +.IP \(bu 2 +Pipenv will no longer inadvertently set \fBeditable=True\fP on all vcs dependencies. \fI\%#3647\fP +.IP \(bu 2 +The \fB\-\-keep\-outdated\fP argument to \fBpipenv install\fP and \fBpipenv lock\fP will now drop specifier constraints when encountering editable dependencies. +\- In addition, \fB\-\-keep\-outdated\fP will retain specifiers that would otherwise be dropped from any entries that have not been updated. \fI\%#3656\fP +.IP \(bu 2 +Fixed a bug which sometimes caused pipenv to fail to respect the \fB\-\-site\-packages\fP flag when passed with \fBpipenv install\fP\&. \fI\%#3718\fP +.IP \(bu 2 +Normalize the package names to lowercase when comparing used and in\-Pipfile packages. \fI\%#3745\fP +.IP \(bu 2 +\fBpipenv update \-\-outdated\fP will now correctly handle comparisons between pre/post\-releases and normal releases. \fI\%#3766\fP +.IP \(bu 2 +Fixed a \fBKeyError\fP which could occur when pinning outdated VCS dependencies via \fBpipenv lock \-\-keep\-outdated\fP\&. \fI\%#3768\fP +.IP \(bu 2 +Resolved an issue which caused resolution to fail when encountering poorly formatted \fBpython_version\fP markers in \fBsetup.py\fP and \fBsetup.cfg\fP files. \fI\%#3786\fP +.IP \(bu 2 +Fix a bug that installation errors are displayed as a list. \fI\%#3794\fP +.IP \(bu 2 +Update \fBpythonfinder\fP to fix a problem that \fBpython.exe\fP will be mistakenly chosen for +virtualenv creation under WSL. \fI\%#3807\fP +.IP \(bu 2 +Fixed several bugs which could prevent editable VCS dependencies from being installed into target environments, even when reporting successful installation. \fI\%#3809\fP +.IP \(bu 2 +\fBpipenv check \-\-system\fP should find the correct Python interpreter when \fBpython\fP does not exist on the system. \fI\%#3819\fP +.IP \(bu 2 +Resolve the symlinks when the path is absolute. \fI\%#3842\fP +.IP \(bu 2 +Pass \fB\-\-pre\fP and \fB\-\-clear\fP options to \fBpipenv update \-\-outdated\fP\&. \fI\%#3879\fP +.IP \(bu 2 +Fixed a bug which prevented resolution of direct URL dependencies which have PEP508 style direct url VCS sub\-dependencies with subdirectories. \fI\%#3976\fP +.IP \(bu 2 +Honor PIPENV_SPINNER environment variable \fI\%#4045\fP +.IP \(bu 2 +Fixed an issue with \fBpipenv check\fP failing due to an invalid API key from \fBpyup.io\fP\&. \fI\%#4188\fP +.IP \(bu 2 +Fixed a bug which caused versions from VCS dependencies to be included in \fBPipfile.lock\fP inadvertently. \fI\%#4217\fP +.IP \(bu 2 +Fixed a bug which caused pipenv to search non\-existent virtual environments for \fBpip\fP when installing using \fB\-\-system\fP\&. \fI\%#4220\fP +.IP \(bu 2 +\fBRequires\-Python\fP values specifying constraint versions of python starting from \fB1.x\fP will now be parsed successfully. \fI\%#4226\fP +.IP \(bu 2 +Fix a bug of \fBpipenv update \-\-outdated\fP that can\(aqt print output correctly. \fI\%#4229\fP +.IP \(bu 2 +Fixed a bug which caused pipenv to prefer source distributions over wheels from \fBPyPI\fP during the dependency resolution phase. +Fixed an issue which prevented proper build isolation using \fBpep517\fP based builders during dependency resolution. \fI\%#4231\fP +.IP \(bu 2 +Don\(aqt fallback to system Python when no matching Python version is found. \fI\%#4232\fP +.UNINDENT +.SS Vendored Libraries +.INDENT 0.0 +.IP \(bu 2 +Updated vendored dependencies: +.INDENT 2.0 +.INDENT 3.5 +.INDENT 0.0 +.IP \(bu 2 +\fBattrs\fP: \fB18.2.0\fP => \fB19.1.0\fP +.IP \(bu 2 +\fBcertifi\fP: \fB2018.10.15\fP => \fB2019.3.9\fP +.IP \(bu 2 +\fBcached_property\fP: \fB1.4.3\fP => \fB1.5.1\fP +.IP \(bu 2 +\fBcerberus\fP: \fB1.2.0\fP => \fB1.3.1\fP +.IP \(bu 2 +\fBclick\-completion\fP: \fB0.5.0\fP => \fB0.5.1\fP +.IP \(bu 2 +\fBcolorama\fP: \fB0.3.9\fP => \fB0.4.1\fP +.IP \(bu 2 +\fBdistlib\fP: \fB0.2.8\fP => \fB0.2.9\fP +.IP \(bu 2 +\fBidna\fP: \fB2.7\fP => \fB2.8\fP +.IP \(bu 2 +\fBjinja2\fP: \fB2.10.0\fP => \fB2.10.1\fP +.IP \(bu 2 +\fBmarkupsafe\fP: \fB1.0\fP => \fB1.1.1\fP +.IP \(bu 2 +\fBorderedmultidict\fP: \fB(new)\fP => \fB1.0\fP +.IP \(bu 2 +\fBpackaging\fP: \fB18.0\fP => \fB19.0\fP +.IP \(bu 2 +\fBparse\fP: \fB1.9.0\fP => \fB1.12.0\fP +.IP \(bu 2 +\fBpathlib2\fP: \fB2.3.2\fP => \fB2.3.3\fP +.IP \(bu 2 +\fBpep517\fP: \fB(new)\fP => \fB0.5.0\fP +.IP \(bu 2 +\fBpexpect\fP: \fB4.6.0\fP => \fB4.7.0\fP +.IP \(bu 2 +\fBpipdeptree\fP: \fB0.13.0\fP => \fB0.13.2\fP +.IP \(bu 2 +\fBpyparsing\fP: \fB2.2.2\fP => \fB2.3.1\fP +.IP \(bu 2 +\fBpython\-dotenv\fP: \fB0.9.1\fP => \fB0.10.2\fP +.IP \(bu 2 +\fBpythonfinder\fP: \fB1.1.10\fP => \fB1.2.1\fP +.IP \(bu 2 +\fBpytoml\fP: \fB(new)\fP => \fB0.1.20\fP +.IP \(bu 2 +\fBrequests\fP: \fB2.20.1\fP => \fB2.21.0\fP +.IP \(bu 2 +\fBrequirementslib\fP: \fB1.3.3\fP => \fB1.5.0\fP +.IP \(bu 2 +\fBscandir\fP: \fB1.9.0\fP => \fB1.10.0\fP +.IP \(bu 2 +\fBshellingham\fP: \fB1.2.7\fP => \fB1.3.1\fP +.IP \(bu 2 +\fBsix\fP: \fB1.11.0\fP => \fB1.12.0\fP +.IP \(bu 2 +\fBtomlkit\fP: \fB0.5.2\fP => \fB0.5.3\fP +.IP \(bu 2 +\fBurllib3\fP: \fB1.24\fP => \fB1.25.2\fP +.IP \(bu 2 +\fBvistir\fP: \fB0.3.0\fP => \fB0.4.1\fP +.IP \(bu 2 +\fByaspin\fP: \fB0.14.0\fP => \fB0.14.3\fP +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 2.0 +.IP \(bu 2 +Removed vendored dependency \fBcursor\fP\&. \fI\%#3298\fP +.UNINDENT +.IP \(bu 2 +Updated \fBpip_shims\fP to support \fB\-\-outdated\fP with new pip versions. \fI\%#3766\fP +.IP \(bu 2 +Update vendored dependencies and invocations +.INDENT 2.0 +.IP \(bu 2 +Update vendored and patched dependencies +\- Update patches on \fBpiptools\fP, \fBpip\fP, \fBpip\-shims\fP, +.nf +\(ga\(ga +.fi +tomlkit\(ga +.IP \(bu 2 +Fix invocations of dependencies +\- Fix custom +.nf +\(ga\(ga +.fi +InstallCommand\(ga instantiation +\- Update +.nf +\(ga\(ga +.fi +PackageFinder\(ga usage +\- Fix +.nf +\(ga\(ga +.fi +Bool\(ga stringify attempts from +.nf +\(ga\(ga +.fi +tomlkit\(ga +.UNINDENT +.INDENT 2.0 +.TP +.B Updated vendored dependencies: +.INDENT 7.0 +.IP \(bu 2 +\fBattrs\fP: \fB\(ga18.2.0\fP => \fB\(ga19.1.0\fP +.IP \(bu 2 +\fBcertifi\fP: \fB\(ga2018.10.15\fP => \fB\(ga2019.3.9\fP +.IP \(bu 2 +\fBcached_property\fP: \fB\(ga1.4.3\fP => \fB\(ga1.5.1\fP +.IP \(bu 2 +\fBcerberus\fP: \fB\(ga1.2.0\fP => \fB\(ga1.3.1\fP +.IP \(bu 2 +\fBclick\fP: \fB\(ga7.0.0\fP => \fB\(ga7.1.1\fP +.IP \(bu 2 +\fBclick\-completion\fP: \fB\(ga0.5.0\fP => \fB\(ga0.5.1\fP +.IP \(bu 2 +\fBcolorama\fP: \fB\(ga0.3.9\fP => \fB\(ga0.4.3\fP +.IP \(bu 2 +\fBcontextlib2\fP: \fB\(ga(new)\fP => \fB\(ga0.6.0.post1\fP +.IP \(bu 2 +\fBdistlib\fP: \fB\(ga0.2.8\fP => \fB\(ga0.2.9\fP +.IP \(bu 2 +\fBfuncsigs\fP: \fB\(ga(new)\fP => \fB\(ga1.0.2\fP +.IP \(bu 2 +\fBimportlib_metadata\fP \fB\(ga1.3.0\fP => \fB\(ga1.5.1\fP +.IP \(bu 2 +\fBimportlib\-resources\fP: \fB\(ga(new)\fP => \fB\(ga1.4.0\fP +.IP \(bu 2 +\fBidna\fP: \fB\(ga2.7\fP => \fB\(ga2.9\fP +.IP \(bu 2 +\fBjinja2\fP: \fB\(ga2.10.0\fP => \fB\(ga2.11.1\fP +.IP \(bu 2 +\fBmarkupsafe\fP: \fB\(ga1.0\fP => \fB\(ga1.1.1\fP +.IP \(bu 2 +\fBmore\-itertools\fP: \fB\(ga(new)\fP => \fB\(ga5.0.0\fP +.IP \(bu 2 +\fBorderedmultidict\fP: \fB\(ga(new)\fP => \fB\(ga1.0\fP +.IP \(bu 2 +\fBpackaging\fP: \fB\(ga18.0\fP => \fB\(ga19.0\fP +.IP \(bu 2 +\fBparse\fP: \fB\(ga1.9.0\fP => \fB\(ga1.15.0\fP +.IP \(bu 2 +\fBpathlib2\fP: \fB\(ga2.3.2\fP => \fB\(ga2.3.3\fP +.IP \(bu 2 +\fBpep517\fP: \fB\(ga(new)\fP => \fB\(ga0.5.0\fP +.IP \(bu 2 +\fBpexpect\fP: \fB\(ga4.6.0\fP => \fB\(ga4.8.0\fP +.IP \(bu 2 +\fBpip\-shims\fP: \fB\(ga0.2.0\fP => \fB\(ga0.5.1\fP +.IP \(bu 2 +\fBpipdeptree\fP: \fB\(ga0.13.0\fP => \fB\(ga0.13.2\fP +.IP \(bu 2 +\fBpyparsing\fP: \fB\(ga2.2.2\fP => \fB\(ga2.4.6\fP +.IP \(bu 2 +\fBpython\-dotenv\fP: \fB\(ga0.9.1\fP => \fB\(ga0.10.2\fP +.IP \(bu 2 +\fBpythonfinder\fP: \fB\(ga1.1.10\fP => \fB\(ga1.2.2\fP +.IP \(bu 2 +\fBpytoml\fP: \fB\(ga(new)\fP => \fB\(ga0.1.20\fP +.IP \(bu 2 +\fBrequests\fP: \fB\(ga2.20.1\fP => \fB\(ga2.23.0\fP +.IP \(bu 2 +\fBrequirementslib\fP: \fB\(ga1.3.3\fP => \fB\(ga1.5.4\fP +.IP \(bu 2 +\fBscandir\fP: \fB\(ga1.9.0\fP => \fB\(ga1.10.0\fP +.IP \(bu 2 +\fBshellingham\fP: \fB\(ga1.2.7\fP => \fB\(ga1.3.2\fP +.IP \(bu 2 +\fBsix\fP: \fB\(ga1.11.0\fP => \fB\(ga1.14.0\fP +.IP \(bu 2 +\fBtomlkit\fP: \fB\(ga0.5.2\fP => \fB\(ga0.5.11\fP +.IP \(bu 2 +\fBurllib3\fP: \fB\(ga1.24\fP => \fB\(ga1.25.8\fP +.IP \(bu 2 +\fBvistir\fP: \fB\(ga0.3.0\fP => \fB\(ga0.5.0\fP +.IP \(bu 2 +\fByaspin\fP: \fB\(ga0.14.0\fP => \fB\(ga0.14.3\fP +.IP \(bu 2 +\fBzipp\fP: \fB\(ga0.6.0\fP +.UNINDENT +.UNINDENT +.INDENT 2.0 +.IP \(bu 2 +Removed vendored dependency \fBcursor\fP\&. \fI\%#4169\fP +.UNINDENT +.IP \(bu 2 +Add and update vendored dependencies to accommodate \fBsafety\fP vendoring: +\- \fBsafety\fP \fB(none)\fP => \fB1.8.7\fP +\- \fBdparse\fP \fB(none)\fP => \fB0.5.0\fP +\- \fBpyyaml\fP \fB(none)\fP => \fB5.3.1\fP +\- \fBurllib3\fP \fB1.25.8\fP => \fB1.25.9\fP +\- \fBcertifi\fP \fB2019.11.28\fP => \fB2020.4.5.1\fP +\- \fBpyparsing\fP \fB2.4.6\fP => \fB2.4.7\fP +\- \fBresolvelib\fP \fB0.2.2\fP => \fB0.3.0\fP +\- \fBimportlib\-metadata\fP \fB1.5.1\fP => \fB1.6.0\fP +\- \fBpip\-shims\fP \fB0.5.1\fP => \fB0.5.2\fP +\- \fBrequirementslib\fP \fB1.5.5\fP => \fB1.5.6\fP \fI\%#4188\fP +.IP \(bu 2 +Updated vendored \fBpip\fP => \fB20.0.2\fP and \fBpip\-tools\fP => \fB5.0.0\fP\&. \fI\%#4215\fP +.IP \(bu 2 +Updated vendored dependencies to latest versions for security and bug fixes: +.INDENT 2.0 +.IP \(bu 2 +\fBrequirementslib\fP \fB1.5.8\fP => \fB1.5.9\fP +.IP \(bu 2 +\fBvistir\fP \fB0.5.0\fP => \fB0.5.1\fP +.IP \(bu 2 +\fBjinja2\fP \fB2.11.1\fP => \fB2.11.2\fP +.IP \(bu 2 +\fBclick\fP \fB7.1.1\fP => \fB7.1.2\fP +.IP \(bu 2 +\fBdateutil\fP \fB(none)\fP => \fB2.8.1\fP +.IP \(bu 2 +\fBbackports.functools_lru_cache\fP \fB1.5.0\fP => \fB1.6.1\fP +.IP \(bu 2 +\fBenum34\fP \fB1.1.6\fP => \fB1.1.10\fP +.IP \(bu 2 +\fBtoml\fP \fB0.10.0\fP => \fB0.10.1\fP +.IP \(bu 2 +\fBimportlib_resources\fP \fB1.4.0\fP => \fB1.5.0\fP \fI\%#4226\fP +.UNINDENT +.IP \(bu 2 +Changed attrs import path in vendored dependencies to always import from \fBpipenv.vendor\fP\&. \fI\%#4267\fP +.UNINDENT +.SS Improved Documentation +.INDENT 0.0 +.IP \(bu 2 +Added documenation about variable expansion in \fBPipfile\fP entries. \fI\%#2317\fP +.IP \(bu 2 +Consolidate all contributing docs in the rst file \fI\%#3120\fP +.IP \(bu 2 +Update the out\-dated manual page. \fI\%#3246\fP +.IP \(bu 2 +Move CLI docs to its own page. \fI\%#3346\fP +.IP \(bu 2 +Replace (non\-existant) video on docs index.rst with equivalent gif. \fI\%#3499\fP +.IP \(bu 2 +Clarify wording in Basic Usage example on using double quotes to escape shell redirection \fI\%#3522\fP +.IP \(bu 2 +Ensure docs show navigation on small\-screen devices \fI\%#3527\fP +.IP \(bu 2 +Added a link to the TOML Spec under General Recommendations & Version Control to clarify how Pipfiles should be written. \fI\%#3629\fP +.IP \(bu 2 +Updated the documentation with the new \fBpytest\fP entrypoint. \fI\%#3759\fP +.IP \(bu 2 +Fix link to GIF in README.md demonstrating Pipenv\(aqs usage, and add descriptive alt text. \fI\%#3911\fP +.IP \(bu 2 +Added a line describing potential issues in fancy extension. \fI\%#3912\fP +.IP \(bu 2 +Documental description of how Pipfile works and association with Pipenv. \fI\%#3913\fP +.IP \(bu 2 +Clarify the proper value of \fBpython_version\fP and \fBpython_full_version\fP\&. \fI\%#3914\fP +.IP \(bu 2 +Write description for \-\-deploy extension and few extensions differences. \fI\%#3915\fP +.IP \(bu 2 +More documentation for \fB\&.env\fP files \fI\%#4100\fP +.IP \(bu 2 +Updated documentation to point to working links. \fI\%#4137\fP +.IP \(bu 2 +Replace docs.pipenv.org with pipenv.pypa.io \fI\%#4167\fP +.IP \(bu 2 +Added functionality to check spelling in documentation and cleaned up existing typographical issues. \fI\%#4209\fP +.UNINDENT .SS 2018.11.26 (2018\-11\-26) .SS Bug Fixes .INDENT 0.0 @@ -412,21 +845,21 @@ Fixed an issue which prevented the parsing of named extras sections from certain .IP \(bu 2 Correctly detect the virtualenv location inside an activated virtualenv. \fI\%#3231\fP .IP \(bu 2 -Fixed a bug which caused spinner frames to be written to stdout during locking operations which could cause redirection pipes to fail. \fI\%#3239\fP +Fixed a bug which caused spinner frames to be written to standard output during locking operations which could cause redirection pipes to fail. \fI\%#3239\fP .IP \(bu 2 -Fixed a bug that editable pacakges can\(aqt be uninstalled correctly. \fI\%#3240\fP +Fixed a bug that editable packages can\(aqt be uninstalled correctly. \fI\%#3240\fP .IP \(bu 2 Corrected an issue with installation timeouts which caused dependency resolution to fail for longer duration resolution steps. \fI\%#3244\fP .IP \(bu 2 Adding normal pep 508 compatible markers is now fully functional when using VCS dependencies. \fI\%#3249\fP .IP \(bu 2 -Updated \fBrequirementslib\fP and \fBpythonfinder\fP for multiple bugfixes. \fI\%#3254\fP +Updated \fBrequirementslib\fP and \fBpythonfinder\fP for multiple bug fixes. \fI\%#3254\fP .IP \(bu 2 Pipenv will now ignore hashes when installing with \fB\-\-skip\-lock\fP\&. \fI\%#3255\fP .IP \(bu 2 Fixed an issue where pipenv could crash when multiple pipenv processes attempted to create the same directory. \fI\%#3257\fP .IP \(bu 2 -Fixed an issue which sometimes prevented successful creation of project pipfiles. \fI\%#3260\fP +Fixed an issue which sometimes prevented successful creation of a project Pipfile. \fI\%#3260\fP .IP \(bu 2 \fBpipenv install\fP will now unset the \fBPYTHONHOME\fP environment variable when not combined with \fB\-\-system\fP\&. \fI\%#3261\fP .IP \(bu 2 @@ -466,10 +899,10 @@ Added persistent settings for all CLI flags via \fBPIPENV_{FLAG_NAME}\fP environ .IP \(bu 2 Added improved messaging about available but skipped updates due to dependency conflicts when running \fBpipenv update \-\-outdated\fP\&. \fI\%#2411\fP .IP \(bu 2 -Added environment variable \fIPIPENV_PYUP_API_KEY\fP to add ability -to override the bundled pyup.io API key. \fI\%#2825\fP +Added environment variable \fBPIPENV_PYUP_API_KEY\fP to add ability +to override the bundled PyUP.io API key. \fI\%#2825\fP .IP \(bu 2 -Added additional output to \fBpipenv update \-\-outdated\fP to indicate that the operation succeded and all packages were already up to date. \fI\%#2828\fP +Added additional output to \fBpipenv update \-\-outdated\fP to indicate that the operation succeeded and all packages were already up to date. \fI\%#2828\fP .IP \(bu 2 Updated \fBcrayons\fP patch to enable colors on native powershell but swap native blue for magenta. \fI\%#3020\fP .IP \(bu 2 @@ -486,7 +919,7 @@ Improved runtime performance of no\-op commands such as \fBpipenv \-\-venv\fP by .IP \(bu 2 Do not show error but success for running \fBpipenv uninstall \-\-all\fP in a fresh virtual environment. \fI\%#3170\fP .IP \(bu 2 -Improved asynchronous installation and error handling via queued subprocess paralleization. \fI\%#3217\fP +Improved asynchronous installation and error handling via queued subprocess parallelization. \fI\%#3217\fP .UNINDENT .SS Bug Fixes .INDENT 0.0 @@ -497,7 +930,7 @@ Non\-ascii characters will now be handled correctly when parsed by pipenv\(aqs \ .IP \(bu 2 Updated \fBpipenv uninstall\fP to respect the \fB\-\-skip\-lock\fP argument. \fI\%#2848\fP .IP \(bu 2 -Fixed a bug which caused uninstallation to sometimes fail to successfullly remove packages from \fBPipfiles\fP with comments on preceding or following lines. \fI\%#2885\fP, +Fixed a bug which caused uninstallation to sometimes fail to successfully remove packages from \fBPipfiles\fP with comments on preceding or following lines. \fI\%#2885\fP, \fI\%#3099\fP .IP \(bu 2 Pipenv will no longer fail when encountering python versions on Windows that have been uninstalled. \fI\%#2983\fP @@ -535,7 +968,7 @@ Updated \fBpythonfinder\fP to correct an issue with unnesting of nested paths wh .IP \(bu 2 Added additional logic for ignoring and replacing non\-ascii characters when formatting console output on non\-UTF\-8 systems. \fI\%#3131\fP .IP \(bu 2 -Fix virtual environment discovery when \fIPIPENV_VENV_IN_PROJECT\fP is set, but the in\-project \fI\&.venv\fP is a file. \fI\%#3134\fP +Fix virtual environment discovery when \fBPIPENV_VENV_IN_PROJECT\fP is set, but the in\-project \fI\&.venv\fP is a file. \fI\%#3134\fP .IP \(bu 2 Hashes for remote and local non\-PyPI artifacts will now be included in \fBPipfile.lock\fP during resolution. \fI\%#3145\fP .IP \(bu 2 @@ -652,7 +1085,7 @@ Upgraded \fBpythonfinder => 1.1.1\fP and \fBvistir => 0.1.7\fP\&. \fI\%#3007\fP Added environment variables \fIPIPENV_VERBOSE\fP and \fIPIPENV_QUIET\fP to control output verbosity without needing to pass options. \fI\%#2527\fP .IP \(bu 2 -Updated test\-pypi addon to better support json\-api access (forward compatibility). +Updated test\-PyPI add\-on to better support json\-API access (forward compatibility). Improved testing process for new contributors. \fI\%#2568\fP .IP \(bu 2 Greatly enhanced python discovery functionality: @@ -689,11 +1122,11 @@ Fallback to shell mode if \fIrun\fP fails with Windows error 193 to handle non\- .SS Bug Fixes .INDENT 0.0 .IP \(bu 2 -Fixed a bug which prevented installation of editable requirements using \fBssh://\fP style urls \fI\%#1393\fP +Fixed a bug which prevented installation of editable requirements using \fBssh://\fP style URLs \fI\%#1393\fP .IP \(bu 2 VCS Refs for locked local editable dependencies will now update appropriately to the latest hash when running \fBpipenv update\fP\&. \fI\%#1690\fP .IP \(bu 2 -\fB\&.tar.gz\fP and \fB\&.zip\fP artifacts will now have dependencies installed even when they are missing from the lockfile. \fI\%#2173\fP +\fB\&.tar.gz\fP and \fB\&.zip\fP artifacts will now have dependencies installed even when they are missing from the Lockfile. \fI\%#2173\fP .IP \(bu 2 The command line parser will now handle multiple \fB\-e/\-\-editable\fP dependencies properly via click\(aqs option parser to help mitigate future parsing issues. \fI\%#2279\fP .IP \(bu 2 @@ -717,12 +1150,12 @@ Fixed non\-deterministic resolution issues related to changes to the internal pa .IP \(bu 2 Fix subshell invocation on Windows for Python 2. \fI\%#2515\fP .IP \(bu 2 -Fixed a bug which sometimes caused pipenv to throw a \fBTypeError\fP or to run into encoding issues when writing lockfiles on python 2. \fI\%#2561\fP +Fixed a bug which sometimes caused pipenv to throw a \fBTypeError\fP or to run into encoding issues when writing a Lockfile on python 2. \fI\%#2561\fP .IP \(bu 2 Improve quoting logic for \fBpipenv run\fP so it works better with Windows built\-in commands. \fI\%#2563\fP .IP \(bu 2 -Fixed a bug related to parsing vcs requirements with both extras and subdirectory fragments. +Fixed a bug related to parsing VCS requirements with both extras and subdirectory fragments. Corrected an issue in the \fBrequirementslib\fP parser which led to some markers being discarded rather than evaluated. \fI\%#2564\fP .IP \(bu 2 Fixed multiple issues with finding the correct system python locations. \fI\%#2582\fP @@ -751,7 +1184,7 @@ Fixed virtualenv creation failure when a .venv file is present in the project ro .IP \(bu 2 Fixed a bug which could cause the \fB\-e/\-\-editable\fP argument on a dependency to be accidentally parsed as a dependency itself. \fI\%#2714\fP .IP \(bu 2 -Correctly pass \fIverbose\fP and \fIdebug\fP flags to the resolver subprocess so it generates appropriate output. This also resolves a bug introduced by the fix to #2527. \fI\%#2732\fP +Correctly pass \fBverbose\fP and \fBdebug\fP flags to the resolver subprocess so it generates appropriate output. This also resolves a bug introduced by the fix to #2527. \fI\%#2732\fP .IP \(bu 2 All markers are now included in \fBpipenv lock \-\-requirements\fP output. \fI\%#2748\fP .IP \(bu 2 @@ -764,7 +1197,7 @@ Fixed a bug in the dependency resolver which caused regular issues when handling .B Updated vendored dependencies: .INDENT 7.0 .IP \(bu 2 -\fBpip\-tools\fP (updated and patched to latest w/ \fBpip 18.0\fP compatibilty) +\fBpip\-tools\fP (updated and patched to latest w/ \fBpip 18.0\fP compatibility) .IP \(bu 2 \fBpip 10.0.1 => 18.0\fP .IP \(bu 2 @@ -844,7 +1277,7 @@ Update vendored libraries: .B Updated vendored dependencies: .INDENT 7.0 .IP \(bu 2 -\fBpip\-tools\fP (updated and patched to latest w/ \fBpip 18.0\fP compatibilty) +\fBpip\-tools\fP (updated and patched to latest w/ \fBpip 18.0\fP compatibility) .IP \(bu 2 \fBpip 10.0.1 => 18.0\fP .IP \(bu 2 @@ -889,7 +1322,7 @@ Update vendored libraries: .IP \(bu 2 Simplified the test configuration process. \fI\%#2568\fP .IP \(bu 2 -Updated documentation to use working fortune cookie addon. \fI\%#2644\fP +Updated documentation to use working fortune cookie add\-on. \fI\%#2644\fP .IP \(bu 2 Added additional information about troubleshooting \fBpipenv shell\fP by using the the \fB$PIPENV_SHELL\fP environment variable. \fI\%#2671\fP .IP \(bu 2 @@ -900,7 +1333,7 @@ Added simple example to README.md for installing from git. \fI\%#2685\fP Stopped recommending \fI\-\-system\fP for Docker contexts. \fI\%#2762\fP .IP \(bu 2 Fixed the example url for doing "pipenv install \-e -some\-repo\-url#egg=something", it was missing the "egg=" in the fragment +some\-repository\-url#egg=something", it was missing the "egg=" in the fragment identifier. \fI\%#2792\fP .IP \(bu 2 Fixed link to the "be cordial" essay in the contribution documentation. \fI\%#2793\fP @@ -915,36 +1348,36 @@ Replace reference to uservoice with PEEP\-000 \fI\%#2909\fP .IP \(bu 2 All calls to \fBpipenv shell\fP are now implemented from the ground up using \fI\%shellingham\fP, a custom library which was purpose built to handle edge cases and shell detection. \fI\%#2371\fP .IP \(bu 2 -Added support for python 3.7 via a few small compatibility / bugfixes. \fI\%#2427\fP, +Added support for python 3.7 via a few small compatibility / bug fixes. \fI\%#2427\fP, \fI\%#2434\fP, \fI\%#2436\fP .IP \(bu 2 Added new flag \fBpipenv \-\-support\fP to replace the diagnostic command \fBpython \-m pipenv.help\fP\&. \fI\%#2477\fP, \fI\%#2478\fP .IP \(bu 2 -Improved import times and CLI runtimes with minor tweaks. \fI\%#2485\fP +Improved import times and CLI run times with minor tweaks. \fI\%#2485\fP .UNINDENT .SS Bug Fixes .INDENT 0.0 .IP \(bu 2 -Fixed an ongoing bug which sometimes resolved incompatible versions into lockfiles. \fI\%#1901\fP +Fixed an ongoing bug which sometimes resolved incompatible versions into the project Lockfile. \fI\%#1901\fP .IP \(bu 2 Fixed a bug which caused errors when creating virtualenvs which contained leading dash characters. \fI\%#2415\fP .IP \(bu 2 -Fixed a logic error which caused \fB\-\-deploy \-\-system\fP to overwrite editable vcs packages in the pipfile before installing, which caused any installation to fail by default. \fI\%#2417\fP +Fixed a logic error which caused \fB\-\-deploy \-\-system\fP to overwrite editable vcs packages in the Pipfile before installing, which caused any installation to fail by default. \fI\%#2417\fP .IP \(bu 2 Updated requirementslib to fix an issue with properly quoting markers in VCS requirements. \fI\%#2419\fP .IP \(bu 2 Installed new vendored jinja2 templates for \fBclick\-completion\fP which were causing template errors for users with completion enabled. \fI\%#2422\fP .IP \(bu 2 -Added support for python 3.7 via a few small compatibility / bugfixes. \fI\%#2427\fP +Added support for python 3.7 via a few small compatibility / bug fixes. \fI\%#2427\fP .IP \(bu 2 Fixed an issue reading package names from \fBsetup.py\fP files in projects which imported utilities such as \fBversioneer\fP\&. \fI\%#2433\fP .IP \(bu 2 Pipenv will now ensure that its internal package names registry files are written with unicode strings. \fI\%#2450\fP .IP \(bu 2 Fixed a bug causing requirements input as relative paths to be output as absolute paths or URIs. -Fixed a bug affecting normalization of \fBgit+git@host\fP uris. \fI\%#2453\fP +Fixed a bug affecting normalization of \fBgit+git@host\fP URLs. \fI\%#2453\fP .IP \(bu 2 Pipenv will now always use \fBpathlib2\fP for \fBPath\fP based filesystem interactions by default on \fBpython<3.5\fP\&. \fI\%#2454\fP .IP \(bu 2 @@ -1001,7 +1434,7 @@ Added nested JSON output to the \fBpipenv graph\fP command. \fI\%#2199\fP Dropped vendored pip 9 and vendored, patched, and migrated to pip 10. Updated patched piptools version. \fI\%#2255\fP .IP \(bu 2 -PyPI mirror URLs can now be set to override instances of PyPI urls by passing +PyPI mirror URLs can now be set to override instances of PyPI URLs by passing the \fB\-\-pypi\-mirror\fP argument from the command line or setting the \fBPIPENV_PYPI_MIRROR\fP environment variable. \fI\%#2281\fP .IP \(bu 2 @@ -1033,8 +1466,8 @@ specific CVEs. \fI\%#2408\fP .INDENT 0.0 .IP \(bu 2 Pipenv will now parse & capitalize \fBplatform_python_implementation\fP markers -.. warning:: This could cause an issue if you have an out of date \fBPipfile\fP -which lowercases the comparison value (e.g. \fBcpython\fP instead of +\&.. warning:: This could cause an issue if you have an out of date \fBPipfile\fP +which lower\-cases the comparison value (e.g. \fBcpython\fP instead of \fBCPython\fP). \fI\%#2123\fP .IP \(bu 2 Pipenv will now only search for \fBrequirements.txt\fP files when creating new @@ -1081,7 +1514,7 @@ locked have been fixed. \fI\%#2267\fP .IP \(bu 2 Fixed a bug causing pipenv graph to fail to display sometimes. \fI\%#2268\fP .IP \(bu 2 -Updated \fBrequirementslib\fP to fix a bug in pipfile parsing affecting +Updated \fBrequirementslib\fP to fix a bug in Pipfile parsing affecting relative path conversions. \fI\%#2269\fP .IP \(bu 2 Windows executable discovery now leverages \fBos.pathext\fP\&. \fI\%#2298\fP @@ -1096,10 +1529,10 @@ VCS dependencies are now manually obtained only if they do not match the requested ref. \fI\%#2304\fP .IP \(bu 2 Added error handling functionality to properly cope with single\-digit -\fBRequires\-Python\fP metatdata with no specifiers. \fI\%#2377\fP +\fBRequires\-Python\fP metadata with no specifiers. \fI\%#2377\fP .IP \(bu 2 \fBpipenv update\fP will now always run the resolver and lock before ensuring -your dependencies are in sync with your lockfile. \fI\%#2379\fP +dependencies are in sync with project Lockfile. \fI\%#2379\fP .IP \(bu 2 Resolved a bug in our patched resolvers which could cause nondeterministic resolution failures in certain conditions. Running \fBpipenv install\fP with no @@ -1129,7 +1562,7 @@ custom certificate settings. \fI\%#2193\fP Dropped vendored pip 9 and vendored, patched, and migrated to pip 10. Updated patched piptools version. \fI\%#2255\fP .IP \(bu 2 -Updated \fBrequirementslib\fP to fix a bug in pipfile parsing affecting +Updated \fBrequirementslib\fP to fix a bug in Pipfile parsing affecting relative path conversions. \fI\%#2269\fP .IP \(bu 2 Added custom shell detection library \fBshellingham\fP, a port of our changes @@ -1190,7 +1623,7 @@ Automatically generates a \fBPipfile\fP, if one doesn\(aqt exist. .IP \(bu 2 Automatically creates a virtualenv in a standard location. .IP \(bu 2 -Automatically adds/removes packages to a \fBPipfile\fP when they are un/installed. +Automatically adds/removes packages to a \fBPipfile\fP when they are installed or uninstalled. .IP \(bu 2 Automatically loads \fB\&.env\fP files, if they exist. .UNINDENT @@ -1227,6 +1660,12 @@ Otherwise, whatever virtualenv defaults to will be the default. This document covers some of Pipenv\(aqs more basic features. .SS ☤ Example Pipfile & Pipfile.lock .sp +Pipfiles contain information for the dependencies of the project, and supersedes +the requirements.txt file used in most Python projects. You should add a Pipfile in the +Git repository letting users who clone the repository know the only thing required would be +installing Pipenv in the machine and typing \fBpipenv install\fP\&. Pipenv is a reference +implementation for using Pipfile. +.sp Here is a simple example of a \fBPipfile\fP and the resulting \fBPipfile.lock\fP\&. .SS Example Pipfile .INDENT 0.0 @@ -1348,7 +1787,7 @@ Generally, keep both \fBPipfile\fP and \fBPipfile.lock\fP in version control. .IP \(bu 2 Do not keep \fBPipfile.lock\fP in version control if multiple versions of Python are being targeted. .IP \(bu 2 -Specify your target Python version in your \fIPipfile\fP\(aqs \fB[requires]\fP section. Ideally, you should only have one target Python version, as this is a deployment tool. +Specify your target Python version in your \fIPipfile\fP\(aqs \fB[requires]\fP section. Ideally, you should only have one target Python version, as this is a deployment tool. \fBpython_version\fP should be in the format \fBX.Y\fP and \fBpython_full_version\fP should be in \fBX.Y.Z\fP format. .IP \(bu 2 \fBpipenv install\fP is fully compatible with \fBpip install\fP syntax, for which the full documentation can be found \fI\%here\fP\&. .IP \(bu 2 @@ -1446,7 +1885,7 @@ For example, to install requests you can use: .sp .nf .ft C -$ pipenv install requests~=1.2 # equivalent to requests~=1.2.0 +$ pipenv install requests~=1.2 .ft P .fi .UNINDENT @@ -1657,6 +2096,8 @@ is unique. .IP \(bu 2 \fB\-\-system\fP — Use the system \fBpip\fP command rather than the one from your virtualenv. .IP \(bu 2 +\fB\-\-deploy\fP — Make sure the packages are properly locked in Pipfile.lock, and abort if the lock file is out\-of\-date. +.IP \(bu 2 \fB\-\-ignore\-pipfile\fP — Ignore the \fBPipfile\fP and install from the \fBPipfile.lock\fP\&. .IP \(bu 2 \fB\-\-skip\-lock\fP — Ignore the \fBPipfile.lock\fP and install from the \fBPipfile\fP\&. In addition, do not write out a \fBPipfile.lock\fP reflecting changes to the \fBPipfile\fP\&. @@ -1794,7 +2235,7 @@ If you\(aqd like a specific package to be installed with a specific package inde .nf .ft C [[source]] -url = "https://pypi.python.org/simple" +url = "https://pypi.org/simple" verify_ssl = true name = "pypi" @@ -1817,7 +2258,7 @@ records = "*" Very fancy. .SS ☤ Using a PyPI Mirror .sp -If you\(aqd like to override the default PyPI index urls with the url for a PyPI mirror, you can use the following: +If you would like to override the default PyPI index URLs with the URL for a PyPI mirror, you can use the following: .INDENT 0.0 .INDENT 3.5 .sp @@ -1994,7 +2435,9 @@ $ pipenv \-\-python=/path/to/python \-\-site\-packages .UNINDENT .SS ☤ Generating a \fBrequirements.txt\fP .sp -You can convert a \fBPipfile\fP and \fBPipfile.lock\fP into a \fBrequirements.txt\fP file very easily, and get all the benefits of extras and other goodies we have included. +You can convert a \fBPipfile\fP and \fBPipfile.lock\fP into a \fBrequirements.txt\fP +file very easily, and get all the benefits of extras and other goodies we have +included. .sp Let\(aqs take this \fBPipfile\fP: .INDENT 0.0 @@ -2008,12 +2451,15 @@ verify_ssl = true [packages] requests = {version="*"} + +[dev\-packages] +pytest = {version="*"} .ft P .fi .UNINDENT .UNINDENT .sp -And generate a \fBrequirements.txt\fP out of it: +And generate a set of requirements out of it with only the default dependencies: .INDENT 0.0 .INDENT 3.5 .sp @@ -2030,30 +2476,35 @@ urllib3==1.22 .UNINDENT .UNINDENT .sp -If you wish to generate a \fBrequirements.txt\fP with only the development requirements you can do that too! Let\(aqs take the following \fBPipfile\fP: +As with other commands, passing \fB\-\-dev\fP will include both the default and +development dependencies: .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -[[source]] -url = "https://pypi.python.org/simple" -verify_ssl = true - -[dev\-packages] -pytest = {version="*"} +$ pipenv lock \-r \-\-dev +chardet==3.0.4 +requests==2.18.4 +certifi==2017.7.27.1 +idna==2.6 +urllib3==1.22 +py==1.4.34 +pytest==3.2.3 .ft P .fi .UNINDENT .UNINDENT .sp -And generate a \fBrequirements.txt\fP out of it: +Finally, if you wish to generate a requirements file with only the +development requirements you can do that too, using the \fB\-\-dev\-only\fP +flag: .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -$ pipenv lock \-r \-\-dev +$ pipenv lock \-r \-\-dev\-only py==1.4.34 pytest==3.2.3 .ft P @@ -2061,23 +2512,44 @@ pytest==3.2.3 .UNINDENT .UNINDENT .sp -Very fancy. -.SS ☤ Detection of Security Vulnerabilities -.sp -Pipenv includes the \fI\%safety\fP package, and will use it to scan your dependency graph -for known security vulnerabilities! -.sp -Example: +The locked requirements are written to stdout, with shell output redirection +used to write them to a file: .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -$ cat Pipfile -[packages] -django = "==1.10.1" - -$ pipenv check +$ pipenv lock \-r > requirements.txt +$ pipenv lock \-r \-\-dev\-only > dev\-requirements.txt +$ cat requirements.txt +chardet==3.0.4 +requests==2.18.4 +certifi==2017.7.27.1 +idna==2.6 +urllib3==1.22 +$ cat dev\-requirements.txt +py==1.4.34 +pytest==3.2.3 +.ft P +.fi +.UNINDENT +.UNINDENT +.SS ☤ Detection of Security Vulnerabilities +.sp +Pipenv includes the \fI\%safety\fP package, and will use it to scan your dependency graph +for known security vulnerabilities! +.sp +Example: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +$ cat Pipfile +[packages] +django = "==1.10.1" + +$ pipenv check Checking PEP 508 requirements… Passed! Checking installed package safety… @@ -2121,16 +2593,15 @@ hardened for production use and should be used only as a development aid. \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 -In order to enable this functionality while maintaining its permissive -copyright license, \fIpipenv\fP embeds an API client key for the backend -Safety API operated by pyup.io rather than including a full copy of the -CC\-BY\-NC\-SA licensed Safety\-DB database. This embedded client key is -shared across all \fIpipenv check\fP users, and hence will be subject to -API access throttling based on overall usage rather than individual -client usage. +Each month, \fIPyUp.io\fP updates the \fBsafety\fP database of +insecure Python packages and \fI\%makes it available to the +community for free\fP\&. Pipenv +makes an API call to retrieve those results and use them +each time you run \fBpipenv check\fP to show you vulnerable +dependencies. .sp -You can also use your own safety API key by setting the -environment variable \fBPIPENV_PYUP_API_KEY\fP\&. +For more up\-to\-date vulnerability data, you may also use your own safety +API key by setting the environment variable \fBPIPENV_PYUP_API_KEY\fP\&. .UNINDENT .UNINDENT .SS ☤ Community Integrations @@ -2259,6 +2730,28 @@ Type "help", "copyright", "credits" or "license" for more information. .UNINDENT .UNINDENT .sp +Shell like variable expansion is available in \fB\&.env\fP files using \fI${VARNAME}\fP syntax.: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +$ cat .env +CONFIG_PATH=${HOME}/.config/foo + +$ pipenv run python +Loading .env environment variables… +Python 3.7.6 (default, Dec 19 2019, 22:52:49) +[GCC 9.2.1 20190827 (Red Hat 9.2.1\-1)] on linux +Type "help", "copyright", "credits" or "license" for more information. +>>> import os +>>> os.environ[\(aqCONFIG_PATH\(aq] +\(aq/home/kennethreitz/.config/foo\(aq +.ft P +.fi +.UNINDENT +.UNINDENT +.sp This is very useful for keeping production credentials out of your codebase. We do not recommend committing \fB\&.env\fP files into source control! .sp @@ -2285,6 +2778,8 @@ $ PIPENV_DONT_LOAD_ENV=1 pipenv shell .fi .UNINDENT .UNINDENT +.sp +See \fI\%theskumar/python\-dotenv\fP for more information on \fB\&.env\fP files. .SS ☤ Custom Script Shortcuts .sp Pipenv supports creating custom shortcuts in the (optional) \fB[scripts]\fP section of your Pipfile. @@ -2325,6 +2820,17 @@ For example: .sp .nf .ft C +[scripts] +echospam = "echo I am really a very silly example" +.ft P +.fi +.UNINDENT +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C $ pipenv run echospam "indeed" I am really a very silly example indeed .ft P @@ -2367,7 +2873,7 @@ variables. To activate them, simply create the variable in your shell and pipenv will detect it. .INDENT 0.0 .TP -.B pipenv.environments.PIPENV_CACHE_DIR = \(aq/Users/fming/Library/Caches/pipenv\(aq +.B pipenv.environments.PIPENV_CACHE_DIR = \(aq/home/techalchemy/.cache/pipenv\(aq Location for Pipenv to store it\(aqs package cache. .sp Default is to use appdir\(aqs user cache directory. @@ -2382,7 +2888,7 @@ to show colors. .UNINDENT .INDENT 0.0 .TP -.B pipenv.environments.PIPENV_DEFAULT_PYTHON_VERSION = None +.B pipenv.environments.PIPENV_DEFAULT_PYTHON_VERSION = \(aq3.8\(aq Use this Python version when creating new virtual environments by default. .sp This can be set to a version string, e.g. \fB3.6\fP, or a path. Default is to use @@ -2399,6 +2905,13 @@ Default is to load \fB\&.env\fP for \fBrun\fP and \fBshell\fP commands. .UNINDENT .INDENT 0.0 .TP +.B pipenv.environments.PIPENV_DONT_USE_ASDF = False +If set, Pipenv does not attempt to install Python with asdf. +.sp +Default is to install Python automatically via asdf when needed, if possible. +.UNINDENT +.INDENT 0.0 +.TP .B pipenv.environments.PIPENV_DONT_USE_PYENV = False If set, Pipenv does not attempt to install Python with pyenv. .sp @@ -2500,7 +3013,7 @@ See also \fBPIPENV_MAX_DEPTH\fP\&. .UNINDENT .INDENT 0.0 .TP -.B pipenv.environments.PIPENV_PYPI_MIRROR = \(aqhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple\(aq +.B pipenv.environments.PIPENV_PYPI_MIRROR = None If set, tells pipenv to override PyPI index urls with a mirror. .sp Default is to not mirror PyPI, i.e. use the real one, pypi.org. The @@ -2508,7 +3021,7 @@ Default is to not mirror PyPI, i.e. use the real one, pypi.org. The .UNINDENT .INDENT 0.0 .TP -.B pipenv.environments.PIPENV_RESOLVE_VCS = False +.B pipenv.environments.PIPENV_RESOLVE_VCS = True Tells Pipenv whether to resolve all VCS dependencies in full. .sp As of Pipenv 2018.11.26, only editable VCS dependencies were resolved in full. @@ -2517,7 +3030,7 @@ approach, you may set this to \(aq0\(aq, \(aqoff\(aq, or \(aqfalse\(aq. .UNINDENT .INDENT 0.0 .TP -.B pipenv.environments.PIPENV_SHELL = \(aq/bin/zsh\(aq +.B pipenv.environments.PIPENV_SHELL = \(aq/usr/bin/fish\(aq An absolute path to the preferred shell for \fBpipenv shell\fP\&. .sp Default is to detect automatically what shell is currently in use. @@ -2546,7 +3059,7 @@ NOTE: This only affects the \fBinstall\fP and \fBuninstall\fP commands. .B pipenv.environments.PIPENV_SPINNER = \(aqdots\(aq Sets the default spinner type. .sp -Spinners are identitcal to the node.js spinners and can be found at +Spinners are identical to the \fBnode.js\fP spinners and can be found at \fI\%https://github.com/sindresorhus/cli\-spinners\fP .UNINDENT .INDENT 0.0 @@ -2576,7 +3089,50 @@ if interactive. .B pipenv.environments.PIP_EXISTS_ACTION = \(aqw\(aq Specifies the value for pip\(aqs \-\-exists\-action option .sp -Defaullts to (w)ipe +Defaults to \fB(w)ipe\fP +.UNINDENT +.INDENT 0.0 +.TP +.B pipenv.environments.env_to_bool(val) +Convert \fBval\fP to boolean, returning True if truthy or False if falsey +.INDENT 7.0 +.TP +.B Parameters +\fBval\fP (\fIAny\fP) \-\- The value to convert +.TP +.B Returns +False if Falsey, True if truthy +.TP +.B Return type +bool +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B pipenv.environments.get_from_env(arg, prefix=\(aqPIPENV\(aq, check_for_negation=True) +Check the environment for a variable, returning its truthy or stringified value +.sp +For example, setting \fBPIPENV_NO_RESOLVE_VCS=1\fP would mean that +\fBget_from_env("RESOLVE_VCS", prefix="PIPENV")\fP would return \fBFalse\fP\&. +.INDENT 7.0 +.TP +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBarg\fP (\fIstr\fP) \-\- The name of the variable to look for +.IP \(bu 2 +\fBprefix\fP (\fIstr\fP) \-\- The prefix to attach to the variable, defaults to "PIPENV" +.IP \(bu 2 +\fBcheck_for_negation\fP (\fIbool\fP) \-\- Whether to check for \fB_NO_\fP, defaults +to True +.UNINDENT +.TP +.B Returns +The value from the environment if available +.TP +.B Return type +Optional[Union[str, bool]] +.UNINDENT .UNINDENT .INDENT 0.0 .TP @@ -2628,7 +3184,7 @@ In addition, you can also have Pipenv stick the virtualenv in \fBproject/.venv\f .sp Pipenv is being used in projects like \fI\%Requests\fP for declaring development dependencies and running the test suite. .sp -We\(aqve currently tested deployments with both \fI\%Travis\-CI\fP and \fI\%tox\fP with success. +We have currently tested deployments with both \fI\%Travis\-CI\fP and \fI\%tox\fP with success. .SS Travis CI .sp An example Travis CI setup can be found in \fI\%Requests\fP\&. The project uses a Makefile to @@ -2722,7 +3278,7 @@ probably a good idea in any case. A 3rd party plugin, \fI\%tox\-pipenv\fP is also available to use Pipenv natively with tox. .SS ☤ Shell Completion .sp -To enable completion in fish, add this to your config: +To enable completion in fish, add this to your configuration: .INDENT 0.0 .INDENT 3.5 .sp @@ -2734,7 +3290,7 @@ eval (pipenv \-\-completion) .UNINDENT .UNINDENT .sp -Alternatively, with bash or zsh, add this to your config: +Alternatively, with bash or zsh, add this to your configuration: .INDENT 0.0 .INDENT 3.5 .sp @@ -2785,9 +3341,9 @@ $ PIP_IGNORE_INSTALLED=1 pipenv install \-\-dev .sp There is a subtle but very important distinction to be made between \fBapplications\fP and \fBlibraries\fP\&. This is a very common source of confusion in the Python community. .sp -Libraries provide reusable functionality to other libraries and applications (let\(aqs use the umbrella term \fBprojects\fP here). They are required to work alongside other libraries, all with their own set of subdependencies. They define \fBabstract dependencies\fP\&. To avoid version conflicts in subdependencies of different libraries within a project, libraries should never ever pin dependency versions. Although they may specify lower or (less frequently) upper bounds, if they rely on some specific feature/fix/bug. Library dependencies are specified via \fBinstall_requires\fP in \fBsetup.py\fP\&. +Libraries provide reusable functionality to other libraries and applications (let\(aqs use the umbrella term \fBprojects\fP here). They are required to work alongside other libraries, all with their own set of sub\-dependencies. They define \fBabstract dependencies\fP\&. To avoid version conflicts in sub\-dependencies of different libraries within a project, libraries should never ever pin dependency versions. Although they may specify lower or (less frequently) upper bounds, if they rely on some specific feature/fix/bug. Library dependencies are specified via \fBinstall_requires\fP in \fBsetup.py\fP\&. .sp -Libraries are ultimately meant to be used in some \fBapplication\fP\&. Applications are different in that they usually are not depended on by other projects. They are meant to be deployed into some specific environment and only then should the exact versions of all their dependencies and subdependencies be made concrete. To make this process easier is currently the main goal of Pipenv. +Libraries are ultimately meant to be used in some \fBapplication\fP\&. Applications are different in that they usually are not depended on by other projects. They are meant to be deployed into some specific environment and only then should the exact versions of all their dependencies and sub\-dependencies be made concrete. To make this process easier is currently the main goal of Pipenv. .sp To summarize: .INDENT 0.0 @@ -2820,520 +3376,339 @@ You can force Pipenv to use a different cache location by setting the environmen .SS ☤ Changing Default Python Versions .sp By default, Pipenv will initialize a project using whatever version of python the python3 is. Besides starting a project with the \fB\-\-three\fP or \fB\-\-two\fP flags, you can also use \fBPIPENV_DEFAULT_PYTHON_VERSION\fP to specify what version to use when starting a project when \fB\-\-three\fP or \fB\-\-two\fP aren\(aqt used. -.SS Frequently Encountered Pipenv Problems -.sp -Pipenv is constantly being improved by volunteers, but is still a very young -project with limited resources, and has some quirks that needs to be dealt -with. We need everyone’s help (including yours!). -.sp -Here are some common questions people have using Pipenv. Please take a look -below and see if they resolve your problem. -.sp -\fBNOTE:\fP -.INDENT 0.0 -.INDENT 3.5 -\fBMake sure you’re running the newest Pipenv version first!\fP -.UNINDENT -.UNINDENT -.SS ☤ Your dependencies could not be resolved -.sp -Make sure your dependencies actually \fIdo\fP resolve. If you’re confident they -are, you may need to clear your resolver cache. Run the following command: +.SS Pipenv CLI Reference +.SS pipenv .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv lock \-\-clear +pipenv [OPTIONS] COMMAND [ARGS]... .ft P .fi .UNINDENT .UNINDENT -.sp -and try again. -.sp -If this does not work, try manually deleting the whole cache directory. It is -usually one of the following locations: +Options.INDENT 0.0 +.TP +.B \-\-where +Output project home information. +.UNINDENT .INDENT 0.0 -.IP \(bu 2 -\fB~/Library/Caches/pipenv\fP (macOS) -.IP \(bu 2 -\fB%LOCALAPPDATA%\epipenv\epipenv\eCache\fP (Windows) -.IP \(bu 2 -\fB~/.cache/pipenv\fP (other operating systems) +.TP +.B \-\-venv +Output virtualenv information. .UNINDENT -.sp -Pipenv does not install prereleases (i.e. a version with an alpha/beta/etc. -suffix, such as \fI1.0b1\fP) by default. You will need to pass the \fB\-\-pre\fP flag -in your command, or set .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -[pipenv] -allow_prereleases = true -.ft P -.fi +.TP +.B \-\-py +Output Python interpreter information. .UNINDENT +.INDENT 0.0 +.TP +.B \-\-envs +Output Environment Variable options. .UNINDENT -.sp -in your Pipfile. -.SS ☤ No module named -.sp -This is usually a result of mixing Pipenv with system packages. We \fIstrongly\fP -recommend installing Pipenv in an isolated environment. Uninstall all existing -Pipenv installations, and see installing\-pipenv to choose one of the -recommended way to install Pipenv instead. -.SS ☤ My pyenv\-installed Python is not found -.sp -Make sure you have \fBPYENV_ROOT\fP set correctly. Pipenv only supports CPython -distributions, with version name like \fB3.6.4\fP or similar. -.SS ☤ Pipenv does not respect pyenv’s global and local Python versions -.sp -Pipenv by default uses the Python it is installed against to create the -virtualenv. You can set the \fB\-\-python\fP option, or -\fB$PYENV_ROOT/shims/python\fP to let it consult pyenv when choosing the -interpreter. See specifying_versions for more information. -.sp -If you want Pipenv to automatically “do the right thing”, you can set the -environment variable \fBPIPENV_PYTHON\fP to \fB$PYENV_ROOT/shims/python\fP\&. This -will make Pipenv use pyenv’s active Python version to create virtual -environments by default. -.SS ☤ ValueError: unknown locale: UTF\-8 -.sp -macOS has a bug in its locale detection that prevents us from detecting your -shell encoding correctly. This can also be an issue on other systems if the -locale variables do not specify an encoding. -.sp -The workaround is to set the following two environment variables to a standard -localization format: .INDENT 0.0 -.IP \(bu 2 -\fBLC_ALL\fP -.IP \(bu 2 -\fBLANG\fP +.TP +.B \-\-rm +Remove the virtualenv. .UNINDENT -.sp -For Bash, for example, you can add the following to your \fB~/.bash_profile\fP: .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -export LC_ALL=\(aqen_US.UTF\-8\(aq -export LANG=\(aqen_US.UTF\-8\(aq -.ft P -.fi +.TP +.B \-\-bare +Minimal output. .UNINDENT +.INDENT 0.0 +.TP +.B \-\-completion +Output completion (to be executed by the shell). .UNINDENT -.sp -For Zsh, the file to edit is \fB~/.zshrc\fP\&. -.sp -\fBNOTE:\fP .INDENT 0.0 -.INDENT 3.5 -You can change both the \fBen_US\fP and \fBUTF\-8\fP part to the -language/locale and encoding you use. +.TP +.B \-\-man +Display manpage. .UNINDENT +.INDENT 0.0 +.TP +.B \-\-support +Output diagnostic information for use in GitHub issues. .UNINDENT -.SS ☤ /bin/pip: No such file or directory -.sp -This may be related to your locale setting. See \fI\%☤ ValueError: unknown locale: UTF\-8\fP -for a possible solution. -.SS ☤ \fBshell\fP does not show the virtualenv’s name in prompt +.INDENT 0.0 +.TP +.B \-\-site\-packages, \-\-no\-site\-packages +Enable site\-packages for the virtualenv. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-python +Specify which version of Python virtualenv should use. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-three, \-\-two +Use Python 3/2 when creating virtualenv. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-clear +Clears caches (pipenv, pip, and pip\-tools). +.UNINDENT +.INDENT 0.0 +.TP +.B \-v, \-\-verbose +Verbose mode. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-pypi\-mirror +Specify a PyPI mirror. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-version +Show the version and exit. +.UNINDENT +.SS check .sp -This is intentional. You can do it yourself with either shell plugins, or -clever \fBPS1\fP configuration. If you really want it back, use +Checks for PyUp Safety security vulnerabilities and against PEP 508 markers provided in Pipfile. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv shell \-c +pipenv check [OPTIONS] [ARGS]... .ft P .fi .UNINDENT .UNINDENT +Options.INDENT 0.0 +.TP +.B \-\-unused +Given a code path, show potentially unused dependencies. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-db +Path to a local PyUp Safety vulnerabilities database. Default: ENV PIPENV_SAFETY_DB or None. +.UNINDENT +.INDENT 0.0 +.TP +.B \-i, \-\-ignore +Ignore specified vulnerability during PyUp Safety checks. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-output +Translates to \-\-json, \-\-full\-report or \-\-bare from PyUp Safety check +.INDENT 7.0 +.TP +.B Options +default|json|full\-report|bare +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-key +Safety API key from PyUp.io for scanning dependencies against a live vulnerabilities database. Leave blank for scanning against a database that only updates once a month. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-quiet +Quiet standard output, except vulnerability report. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-python +Specify which version of Python virtualenv should use. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-three, \-\-two +Use Python 3/2 when creating virtualenv. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-clear +Clears caches (pipenv, pip, and pip\-tools). +.UNINDENT +.INDENT 0.0 +.TP +.B \-v, \-\-verbose +Verbose mode. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-pypi\-mirror +Specify a PyPI mirror. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-system +System pip management. +.UNINDENT +Arguments.INDENT 0.0 +.TP +.B ARGS +Optional argument(s) +.UNINDENT +.SS clean .sp -instead (not available on Windows). -.SS ☤ Pipenv does not respect dependencies in setup.py -.sp -No, it does not, intentionally. Pipfile and setup.py serve different purposes, -and should not consider each other by default. See pipfile\-vs\-setuppy -for more information. -.SS ☤ Using \fBpipenv run\fP in Supervisor program -.sp -When you configure a supervisor program\(aqs \fBcommand\fP with \fBpipenv run ...\fP, you -need to set locale enviroment variables properly to make it work. -.sp -Add this line under \fB[supervisord]\fP section in \fB/etc/supervisor/supervisord.conf\fP: +Uninstalls all packages not specified in Pipfile.lock. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -[supervisord] -environment=LC_ALL=\(aqen_US.UTF\-8\(aq,LANG=\(aqen_US.UTF\-8\(aq +pipenv clean [OPTIONS] .ft P .fi .UNINDENT .UNINDENT -.SS ☤ An exception is raised during \fBLocking dependencies…\fP -.sp -Run \fBpipenv lock \-\-clear\fP and try again. The lock sequence caches results -to speed up subsequent runs. The cache may contain faulty results if a bug -causes the format to corrupt, even after the bug is fixed. \fB\-\-clear\fP flushes -the cache, and therefore removes the bad results. -.SH CONTRIBUTION GUIDES -.SS Development Philosophy -.sp -Pipenv is an open but opinionated tool, created by an open but opinionated developer. -.SS Management Style -.sp -\fI\%Kenneth Reitz\fP is the BDFL. He has final say in any decision related to the Pipenv project. Kenneth is responsible for the direction and form of the library, as well as its presentation. In addition to making decisions based on technical merit, he is responsible for making decisions based on the development philosophy of Pipenv. -.sp -\fI\%Dan Ryan\fP, \fI\%Tzu\-ping Chung\fP, and \fI\%Nate Prewitt\fP are the core contributors. -They are responsible for triaging bug reports, reviewing pull requests and ensuring that Kenneth is kept up to speed with developments around the library. -The day\-to\-day managing of the project is done by the core contributors. They are responsible for making judgements about whether or not a feature request is -likely to be accepted by Kenneth. -.SS Values +Options.INDENT 0.0 +.TP +.B \-\-bare +Minimal output. +.UNINDENT .INDENT 0.0 -.IP \(bu 2 -Simplicity is always better than functionality. -.IP \(bu 2 -Listen to everyone, then disregard it. -.IP \(bu 2 -The API is all that matters. Everything else is secondary. -.IP \(bu 2 -Fit the 90% use\-case. Ignore the nay\-sayers. +.TP +.B \-\-dry\-run +Just output unneeded packages. .UNINDENT -.SS Contributing to Pipenv -.sp -If you\(aqre reading this, you\(aqre probably interested in contributing to Pipenv. -Thank you very much! Open source projects live\-and\-die based on the support -they receive from others, and the fact that you\(aqre even considering -contributing to the Pipenv project is \fIvery\fP generous of you. -.sp -This document lays out guidelines and advice for contributing to this project. -If you\(aqre thinking of contributing, please start by reading this document and -getting a feel for how contributing to this project works. If you have any -questions, feel free to reach out to either \fI\%Dan Ryan\fP, \fI\%Tzu\-ping Chung\fP, -or \fI\%Nate Prewitt\fP, the primary maintainers. -.sp -The guide is split into sections based on the type of contribution you\(aqre -thinking of making, with a section that covers general guidelines for all -contributors. -.SS General Guidelines -.SS Be Cordial .INDENT 0.0 -.INDENT 3.5 -\fBBe cordial or be on your way\fP\&. \fI—Kenneth Reitz\fP +.TP +.B \-v, \-\-verbose +Verbose mode. .UNINDENT +.INDENT 0.0 +.TP +.B \-\-three, \-\-two +Use Python 3/2 when creating virtualenv. .UNINDENT -.sp -Pipenv has one very important rule governing all forms of contribution, -including reporting bugs or requesting features. This golden rule is -"\fI\%be cordial or be on your way\fP". -.sp -\fBAll contributions are welcome\fP, as long as -everyone involved is treated with respect. -.SS Get Early Feedback -.sp -If you are contributing, do not feel the need to sit on your contribution until -it is perfectly polished and complete. It helps everyone involved for you to -seek feedback as early as you possibly can. Submitting an early, unfinished -version of your contribution for feedback in no way prejudices your chances of -getting that contribution accepted, and can save you from putting a lot of work -into a contribution that is not suitable for the project. -.SS Contribution Suitability -.sp -Our project maintainers have the last word on whether or not a contribution is -suitable for Pipenv. All contributions will be considered carefully, but from -time to time, contributions will be rejected because they do not suit the -current goals or needs of the project. -.sp -If your contribution is rejected, don\(aqt despair! As long as you followed these -guidelines, you will have a much better chance of getting your next -contribution accepted. -.SS Questions -.sp -The GitHub issue tracker is for \fIbug reports\fP and \fIfeature requests\fP\&. Please do -not use it to ask questions about how to use Pipenv. These questions should -instead be directed to \fI\%Stack Overflow\fP\&. Make sure that your question is tagged -with the \fBpipenv\fP tag when asking it on Stack Overflow, to ensure that it is -answered promptly and accurately. -.SS Code Contributions -.SS Steps for Submitting Code -.sp -When contributing code, you\(aqll want to follow this checklist: .INDENT 0.0 -.IP 1. 3 -Understand our \fI\%development philosophy\fP\&. -.IP 2. 3 -Fork the repository on GitHub. -.IP 3. 3 -Set up your \fI\%Development Setup\fP -.IP 4. 3 -Run the tests (\fI\%Testing\fP) to confirm they all pass on your system. -If they don\(aqt, you\(aqll need to investigate why they fail. If you\(aqre unable -to diagnose this yourself, raise it as a bug report by following the guidelines -in this document: \fI\%Bug Reports\fP\&. -.IP 5. 3 -Write tests that demonstrate your bug or feature. Ensure that they fail. -.IP 6. 3 -Make your change. -.IP 7. 3 -Run the entire test suite again, confirming that all tests pass \fIincluding -the ones you just added\fP\&. -.IP 8. 3 -Send a GitHub Pull Request to the main repository\(aqs \fBmaster\fP branch. -GitHub Pull Requests are the expected method of code collaboration on this -project. +.TP +.B \-\-python +Specify which version of Python virtualenv should use. .UNINDENT +.SS graph .sp -The following sub\-sections go into more detail on some of the points above. -.SS Development Setup -.sp -To get your development environment setup, run: +Displays currently\-installed dependency graph information. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pip install \-e . -pipenv install \-\-dev +pipenv graph [OPTIONS] .ft P .fi .UNINDENT .UNINDENT +Options.INDENT 0.0 +.TP +.B \-\-bare +Minimal output. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-json +Output JSON. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-json\-tree +Output JSON in nested tree. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-reverse +Reversed dependency graph. +.UNINDENT +.SS install .sp -This will install the repo version of Pipenv and then install the development -dependencies. Once that has completed, you can start developing. -.sp -The repo version of Pipenv must be installed over other global versions to -resolve conflicts with the \fBpipenv\fP folder being implicitly added to \fBsys.path\fP\&. -See \fI\%pypa/pipenv#2557\fP for more details. -.SS Testing -.sp -Tests are written in \fBpytest\fP style and can be run very simply: +Installs provided packages and adds them to Pipfile, or (if no packages are given), installs all packages from Pipfile. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pytest +pipenv install [OPTIONS] [PACKAGES]... .ft P .fi .UNINDENT .UNINDENT -.sp -This will run all Pipenv tests, which can take awhile. To run a subset of the -tests, the standard pytest filters are available, such as: +Options.INDENT 0.0 +.TP +.B \-\-system +System pip management. +.UNINDENT .INDENT 0.0 -.IP \(bu 2 -provide a directory or file: \fBpytest tests/unit\fP or \fBpytest tests/unit/test_cmdparse.py\fP -.IP \(bu 2 -provide a keyword expression: \fBpytest \-k test_lock_editable_vcs_without_install\fP -.IP \(bu 2 -provide a nodeid: \fBpytest tests/unit/test_cmdparse.py::test_parse\fP -.IP \(bu 2 -provide a test marker: \fBpytest \-m lock\fP +.TP +.B \-c, \-\-code +Install packages automatically discovered from import statements. .UNINDENT -.SS Code Review -.sp -Contributions will not be merged until they\(aqve been code reviewed. You should -implement any code review feedback unless you strongly object to it. In the -event that you object to the code review feedback, you should make your case -clearly and calmly. If, after doing so, the feedback is judged to still apply, -you must either apply the feedback or withdraw your contribution. -.SS Package Index -.sp -To speed up testing, tests that rely on a package index for locking and -installing use a local server that contains vendored packages in the -\fBtests/pypi\fP directory. Each vendored package should have it\(aqs own folder -containing the necessary releases. When adding a release for a package, it is -easiest to use either the \fB\&.tar.gz\fP or universal wheels (ex: \fBpy2.py3\-none\fP). If -a \fB\&.tar.gz\fP or universal wheel is not available, add wheels for all available -architectures and platforms. -.SS Documentation Contributions -.sp -Documentation improvements are always welcome! The documentation files live in -the \fBdocs/\fP directory of the codebase. They\(aqre written in -\fI\%reStructuredText\fP, and use \fI\%Sphinx\fP to generate the full suite of -documentation. -.sp -When contributing documentation, please do your best to follow the style of the -documentation files. This means a soft\-limit of 79 characters wide in your text -files and a semi\-formal, yet friendly and approachable, prose style. -.sp -When presenting Python code, use single\-quoted strings (\fB\(aqhello\(aq\fP instead of -\fB"hello"\fP). -.SS Bug Reports -.sp -Bug reports are hugely important! They are recorded as \fI\%GitHub issues\fP\&. Please -be aware of the following things when filing bug reports: -.INDENT 0.0 -.IP 1. 3 -Avoid raising duplicate issues. \fIPlease\fP use the GitHub issue search feature -to check whether your bug report or feature request has been mentioned in -the past. Duplicate bug reports and feature requests are a huge maintenance -burden on the limited resources of the project. If it is clear from your -report that you would have struggled to find the original, that\(aqs ok, but -if searching for a selection of words in your issue title would have found -the duplicate then the issue will likely be closed extremely abruptly. -.IP 2. 3 -When filing bug reports about exceptions or tracebacks, please include the -\fIcomplete\fP traceback. Partial tracebacks, or just the exception text, are -not helpful. Issues that do not contain complete tracebacks may be closed -without warning. -.IP 3. 3 -Make sure you provide a suitable amount of information to work with. This -means you should provide: -.INDENT 3.0 -.IP \(bu 2 -Guidance on \fBhow to reproduce the issue\fP\&. Ideally, this should be a -\fIsmall\fP code sample that can be run immediately by the maintainers. -Failing that, let us know what you\(aqre doing, how often it happens, what -environment you\(aqre using, etc. Be thorough: it prevents us needing to ask -further questions. -.IP \(bu 2 -Tell us \fBwhat you expected to happen\fP\&. When we run your example code, -what are we expecting to happen? What does "success" look like for your -code? -.IP \(bu 2 -Tell us \fBwhat actually happens\fP\&. It\(aqs not helpful for you to say "it -doesn\(aqt work" or "it fails". Tell us \fIhow\fP it fails: do you get an -exception? A hang? The packages installed seem incorrect? -How was the actual result different from your expected result? -.IP \(bu 2 -Tell us \fBwhat version of Pipenv you\(aqre using\fP, and -\fBhow you installed it\fP\&. Different versions of Pipenv behave -differently and have different bugs, and some distributors of Pipenv -ship patches on top of the code we supply. -.UNINDENT -.sp -If you do not provide all of these things, it will take us much longer to -fix your problem. If we ask you to clarify these and you never respond, we -will close your issue without fixing it. -.UNINDENT -.SS Run the tests -.sp -Three ways of running the tests are as follows: .INDENT 0.0 -.IP 1. 3 -\fBmake test\fP (which uses \fBdocker\fP) -.IP 2. 3 -\fB\&./run\-tests.sh\fP or \fBrun\-tests.bat\fP -.IP 3. 3 -Using pipenv: +.TP +.B \-\-deploy +Abort if the Pipfile.lock is out\-of\-date, or Python version is wrong. .UNINDENT .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -$ git clone https://github.com/pypa/pipenv.git -$ cd pipenv -$ git submodule sync && git submodule update \-\-init \-\-recursive -$ pipenv install \-\-dev -$ pipenv run pytest -.ft P -.fi -.UNINDENT +.TP +.B \-\-site\-packages, \-\-no\-site\-packages +Enable site\-packages for the virtualenv. .UNINDENT -.sp -For the last two, it is important that your environment is setup correctly, and -this may take some work, for example, on a specific Mac installation, the following -steps may be needed: .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -# Make sure the tests can access github -if [ "$SSH_AGENT_PID" = "" ] -then - eval \(gassh\-agent\(ga - ssh\-add -fi - -# Use unix like utilities, installed with brew, -# e.g. brew install coreutils -for d in /usr/local/opt/*/libexec/gnubin /usr/local/opt/python/libexec/bin -do - [[ ":$PATH:" != *":$d:"* ]] && PATH="$d:${PATH}" -done - -export PATH - -# PIP_FIND_LINKS currently breaks test_uninstall.py -unset PIP_FIND_LINKS -.ft P -.fi -.UNINDENT +.TP +.B \-\-skip\-lock +Skip locking mechanisms and use the Pipfile instead during operation. .UNINDENT -.SH ☤ PIPENV USAGE -.SS pipenv .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -pipenv [OPTIONS] COMMAND [ARGS]... -.ft P -.fi -.UNINDENT -.UNINDENT -Options.INDENT 0.0 .TP -.B \-\-where -Output project home information. +.B \-e, \-\-editable +An editable Python package URL or path, often to a VCS repository. .UNINDENT .INDENT 0.0 .TP -.B \-\-venv -Output virtualenv information. +.B \-\-ignore\-pipfile +Ignore Pipfile when installing, using the Pipfile.lock. .UNINDENT .INDENT 0.0 .TP -.B \-\-py -Output Python interpreter information. +.B \-\-selective\-upgrade +Update specified packages. .UNINDENT .INDENT 0.0 .TP -.B \-\-envs -Output Environment Variable options. +.B \-r, \-\-requirements +Import a requirements.txt file. .UNINDENT .INDENT 0.0 .TP -.B \-\-rm -Remove the virtualenv. +.B \-\-extra\-index\-url +URLs to the extra PyPI compatible indexes to query for package look\-ups. .UNINDENT .INDENT 0.0 .TP -.B \-\-bare -Minimal output. +.B \-i, \-\-index +Target PyPI\-compatible package index url. .UNINDENT .INDENT 0.0 .TP -.B \-\-completion -Output completion (to be eval\(aqd). +.B \-\-sequential +Install dependencies one\-at\-a\-time, instead of concurrently. .UNINDENT .INDENT 0.0 .TP -.B \-\-man -Display manpage. +.B \-d, \-\-dev +Install both develop and default packages .UNINDENT .INDENT 0.0 .TP -.B \-\-support -Output diagnostic information for use in GitHub issues. +.B \-\-keep\-outdated +Keep out\-dated dependencies from being updated in Pipfile.lock. .UNINDENT .INDENT 0.0 .TP -.B \-\-site\-packages -Enable site\-packages for the virtualenv. +.B \-\-pre +Allow pre\-releases. .UNINDENT .INDENT 0.0 .TP @@ -3360,33 +3735,75 @@ Verbose mode. .B \-\-pypi\-mirror Specify a PyPI mirror. .UNINDENT +Arguments.INDENT 0.0 +.TP +.B PACKAGES +Optional argument(s) +.UNINDENT +Environment variables.INDENT 0.0 +.TP +.B PIPENV_SKIP_LOCK +.INDENT 7.0 +.INDENT 3.5 +Provide a default for \fI\%\-\-skip\-lock\fP +.UNINDENT +.UNINDENT +.UNINDENT .INDENT 0.0 .TP -.B \-\-version -Show the version and exit. +.B PIP_EXTRA_INDEX_URL +.INDENT 7.0 +.INDENT 3.5 +Provide a default for \fI\%\-\-extra\-index\-url\fP .UNINDENT -.SS check +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B PIP_INDEX_URL +.INDENT 7.0 +.INDENT 3.5 +Provide a default for \fI\%\-i\fP +.UNINDENT +.UNINDENT +.UNINDENT +.SS lock .sp -Checks for security vulnerabilities and against PEP 508 markers provided in Pipfile. +Generates Pipfile.lock. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv check [OPTIONS] [ARGS]... +pipenv lock [OPTIONS] .ft P .fi .UNINDENT .UNINDENT Options.INDENT 0.0 .TP -.B \-\-unused -Given a code path, show potentially unused dependencies. +.B \-\-dev\-only +Emit development dependencies \fIonly\fP (overrides \-\-dev) .UNINDENT .INDENT 0.0 .TP -.B \-i, \-\-ignore -Ignore specified vulnerability during safety checks. +.B \-r, \-\-requirements +Generate output in requirements.txt format. +.UNINDENT +.INDENT 0.0 +.TP +.B \-d, \-\-dev +Generate both develop and default requirements +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-keep\-outdated +Keep out\-dated dependencies from being updated in Pipfile.lock. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-pre +Allow pre\-releases. .UNINDENT .INDENT 0.0 .TP @@ -3413,38 +3830,41 @@ Verbose mode. .B \-\-pypi\-mirror Specify a PyPI mirror. .UNINDENT +.SS open +.sp +View a given module in your editor. +.sp +This uses the EDITOR environment variable. You can temporarily override it, +for example: .INDENT 0.0 -.TP -.B \-\-system -System pip management. +.INDENT 3.5 +EDITOR=atom pipenv open requests .UNINDENT -Arguments.INDENT 0.0 -.TP -.B ARGS -Optional argument(s) .UNINDENT -.SS clean -.sp -Uninstalls all packages not specified in Pipfile.lock. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv clean [OPTIONS] +pipenv open [OPTIONS] MODULE .ft P .fi .UNINDENT .UNINDENT Options.INDENT 0.0 .TP -.B \-\-bare -Minimal output. +.B \-\-python +Specify which version of Python virtualenv should use. .UNINDENT .INDENT 0.0 .TP -.B \-\-dry\-run -Just output unneeded packages. +.B \-\-three, \-\-two +Use Python 3/2 when creating virtualenv. +.UNINDENT +.INDENT 0.0 +.TP +.B \-\-clear +Clears caches (pipenv, pip, and pip\-tools). .UNINDENT .INDENT 0.0 .TP @@ -3453,114 +3873,132 @@ Verbose mode. .UNINDENT .INDENT 0.0 .TP -.B \-\-three, \-\-two -Use Python 3/2 when creating virtualenv. +.B \-\-pypi\-mirror +Specify a PyPI mirror. .UNINDENT -.INDENT 0.0 +Arguments.INDENT 0.0 .TP -.B \-\-python -Specify which version of Python virtualenv should use. +.B MODULE +Required argument .UNINDENT -.SS graph +.SS run .sp -Displays currently\-installed dependency graph information. +Spawns a command installed into the virtualenv. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv graph [OPTIONS] +pipenv run [OPTIONS] COMMAND [ARGS]... .ft P .fi .UNINDENT .UNINDENT Options.INDENT 0.0 .TP -.B \-\-bare -Minimal output. +.B \-\-python +Specify which version of Python virtualenv should use. .UNINDENT .INDENT 0.0 .TP -.B \-\-json -Output JSON. +.B \-\-three, \-\-two +Use Python 3/2 when creating virtualenv. .UNINDENT .INDENT 0.0 .TP -.B \-\-json\-tree -Output JSON in nested tree. +.B \-\-clear +Clears caches (pipenv, pip, and pip\-tools). .UNINDENT .INDENT 0.0 .TP -.B \-\-reverse -Reversed dependency graph. +.B \-v, \-\-verbose +Verbose mode. .UNINDENT -.SS install +.INDENT 0.0 +.TP +.B \-\-pypi\-mirror +Specify a PyPI mirror. +.UNINDENT +Arguments.INDENT 0.0 +.TP +.B COMMAND +Required argument +.UNINDENT +.INDENT 0.0 +.TP +.B ARGS +Optional argument(s) +.UNINDENT +.SS shell .sp -Installs provided packages and adds them to Pipfile, or (if no packages are given), installs all packages from Pipfile. +Spawns a shell within the virtualenv. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv install [OPTIONS] [PACKAGES]... +pipenv shell [OPTIONS] [SHELL_ARGS]... .ft P .fi .UNINDENT .UNINDENT Options.INDENT 0.0 .TP -.B \-\-system -System pip management. +.B \-\-fancy +Run in shell in fancy mode. Make sure the shell have no path manipulating scripts. Run $pipenv shell for issues with compatibility mode. .UNINDENT .INDENT 0.0 .TP -.B \-c, \-\-code -Install packages automatically discovered from import statements. +.B \-\-anyway +Always spawn a sub\-shell, even if one is already spawned. .UNINDENT .INDENT 0.0 .TP -.B \-\-deploy -Abort if the Pipfile.lock is out\-of\-date, or Python version is wrong. +.B \-\-pypi\-mirror +Specify a PyPI mirror. .UNINDENT .INDENT 0.0 .TP -.B \-\-skip\-lock -Skip locking mechanisms and use the Pipfile instead during operation. +.B \-\-three, \-\-two +Use Python 3/2 when creating virtualenv. .UNINDENT .INDENT 0.0 .TP -.B \-e, \-\-editable -An editable python package URL or path, often to a VCS repo. +.B \-\-python +Specify which version of Python virtualenv should use. .UNINDENT -.INDENT 0.0 +Arguments.INDENT 0.0 .TP -.B \-\-ignore\-pipfile -Ignore Pipfile when installing, using the Pipfile.lock. +.B SHELL_ARGS +Optional argument(s) .UNINDENT +.SS sync +.sp +Installs all packages specified in Pipfile.lock. .INDENT 0.0 -.TP -.B \-\-selective\-upgrade -Update specified packages. +.INDENT 3.5 +.sp +.nf +.ft C +pipenv sync [OPTIONS] +.ft P +.fi .UNINDENT -.INDENT 0.0 -.TP -.B \-r, \-\-requirements -Import a requirements.txt file. .UNINDENT -.INDENT 0.0 +Options.INDENT 0.0 .TP -.B \-\-extra\-index\-url -URLs to the extra PyPI compatible indexes to query for package lookups. +.B \-\-bare +Minimal output. .UNINDENT .INDENT 0.0 .TP -.B \-i, \-\-index -Target PyPI\-compatible package index url. +.B \-\-sequential +Install dependencies one\-at\-a\-time, instead of concurrently. .UNINDENT .INDENT 0.0 .TP -.B \-\-sequential -Install dependencies one\-at\-a\-time, instead of concurrently. +.B \-d, \-\-dev +Install both develop and default packages .UNINDENT .INDENT 0.0 .TP @@ -3574,11 +4012,6 @@ Allow pre\-releases. .UNINDENT .INDENT 0.0 .TP -.B \-d, \-\-dev -Install both develop and default packages. -.UNINDENT -.INDENT 0.0 -.TP .B \-\-python Specify which version of Python virtualenv should use. .UNINDENT @@ -3602,55 +4035,43 @@ Verbose mode. .B \-\-pypi\-mirror Specify a PyPI mirror. .UNINDENT -Arguments.INDENT 0.0 -.TP -.B PACKAGES -Optional argument(s) -.UNINDENT -Environment variables.INDENT 0.0 -.TP -.B PIPENV_SKIP_LOCK -.INDENT 7.0 +.SS uninstall +.sp +Uninstalls a provided package and removes it from Pipfile. +.INDENT 0.0 .INDENT 3.5 -Provide a default for \fI\%\-\-skip\-lock\fP +.sp +.nf +.ft C +pipenv uninstall [OPTIONS] [PACKAGES]... +.ft P +.fi .UNINDENT .UNINDENT +Options.INDENT 0.0 +.TP +.B \-\-all\-dev +Uninstall all package from [dev\-packages]. .UNINDENT .INDENT 0.0 .TP -.B PIP_EXTRA_INDEX_URL -.INDENT 7.0 -.INDENT 3.5 -Provide a default for \fI\%\-\-extra\-index\-url\fP -.UNINDENT -.UNINDENT +.B \-\-all +Purge all package(s) from virtualenv. Does not edit Pipfile. .UNINDENT .INDENT 0.0 .TP -.B PIP_INDEX_URL -.INDENT 7.0 -.INDENT 3.5 -Provide a default for \fI\%\-i\fP -.UNINDENT -.UNINDENT +.B \-e, \-\-editable +An editable Python package URL or path, often to a VCS repository. .UNINDENT -.SS lock -.sp -Generates Pipfile.lock. .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -pipenv lock [OPTIONS] -.ft P -.fi -.UNINDENT +.TP +.B \-\-skip\-lock +Skip locking mechanisms and use the Pipfile instead during operation. .UNINDENT -Options.INDENT 0.0 +.INDENT 0.0 .TP -.B \-r, \-\-requirements -Generate output in requirements.txt format. +.B \-d, \-\-dev +Deprecated (as it has no effect). May be removed in a future release. .UNINDENT .INDENT 0.0 .TP @@ -3664,11 +4085,6 @@ Allow pre\-releases. .UNINDENT .INDENT 0.0 .TP -.B \-d, \-\-dev -Install both develop and default packages. -.UNINDENT -.INDENT 0.0 -.TP .B \-\-python Specify which version of Python virtualenv should use. .UNINDENT @@ -3692,170 +4108,87 @@ Verbose mode. .B \-\-pypi\-mirror Specify a PyPI mirror. .UNINDENT -.SS open -.sp -View a given module in your editor. -.sp -This uses the EDITOR environment variable. You can temporarily override it, -for example: -.INDENT 0.0 +Arguments.INDENT 0.0 +.TP +.B PACKAGES +Optional argument(s) +.UNINDENT +Environment variables.INDENT 0.0 +.TP +.B PIPENV_SKIP_LOCK +.INDENT 7.0 .INDENT 3.5 -EDITOR=atom pipenv open requests +Provide a default for \fI\%\-\-skip\-lock\fP .UNINDENT .UNINDENT +.UNINDENT +.SS update +.sp +Runs lock, then sync. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv open [OPTIONS] MODULE +pipenv update [OPTIONS] [PACKAGES]... .ft P .fi .UNINDENT .UNINDENT Options.INDENT 0.0 .TP -.B \-\-python -Specify which version of Python virtualenv should use. +.B \-\-bare +Minimal output. .UNINDENT .INDENT 0.0 .TP -.B \-\-three, \-\-two -Use Python 3/2 when creating virtualenv. +.B \-\-outdated +List out\-of\-date dependencies. .UNINDENT .INDENT 0.0 .TP -.B \-\-clear -Clears caches (pipenv, pip, and pip\-tools). +.B \-\-dry\-run +List out\-of\-date dependencies. .UNINDENT .INDENT 0.0 .TP -.B \-v, \-\-verbose -Verbose mode. +.B \-e, \-\-editable +An editable Python package URL or path, often to a VCS repository. .UNINDENT .INDENT 0.0 .TP -.B \-\-pypi\-mirror -Specify a PyPI mirror. -.UNINDENT -Arguments.INDENT 0.0 -.TP -.B MODULE -Required argument +.B \-\-ignore\-pipfile +Ignore Pipfile when installing, using the Pipfile.lock. .UNINDENT -.SS run -.sp -Spawns a command installed into the virtualenv. .INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -pipenv run [OPTIONS] COMMAND [ARGS]... -.ft P -.fi -.UNINDENT -.UNINDENT -Options.INDENT 0.0 .TP -.B \-\-python -Specify which version of Python virtualenv should use. +.B \-\-selective\-upgrade +Update specified packages. .UNINDENT .INDENT 0.0 .TP -.B \-\-three, \-\-two -Use Python 3/2 when creating virtualenv. +.B \-r, \-\-requirements +Import a requirements.txt file. .UNINDENT .INDENT 0.0 .TP -.B \-\-clear -Clears caches (pipenv, pip, and pip\-tools). +.B \-\-extra\-index\-url +URLs to the extra PyPI compatible indexes to query for package look\-ups. .UNINDENT .INDENT 0.0 .TP -.B \-v, \-\-verbose -Verbose mode. +.B \-i, \-\-index +Target PyPI\-compatible package index url. .UNINDENT .INDENT 0.0 .TP -.B \-\-pypi\-mirror -Specify a PyPI mirror. -.UNINDENT -Arguments.INDENT 0.0 -.TP -.B COMMAND -Required argument -.UNINDENT -.INDENT 0.0 -.TP -.B ARGS -Optional argument(s) -.UNINDENT -.SS shell -.sp -Spawns a shell within the virtualenv. -.INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -pipenv shell [OPTIONS] [SHELL_ARGS]... -.ft P -.fi -.UNINDENT -.UNINDENT -Options.INDENT 0.0 -.TP -.B \-\-fancy -Run in shell in fancy mode (for elegantly configured shells). -.UNINDENT -.INDENT 0.0 -.TP -.B \-\-anyway -Always spawn a subshell, even if one is already spawned. -.UNINDENT -.INDENT 0.0 -.TP -.B \-\-pypi\-mirror -Specify a PyPI mirror. -.UNINDENT -.INDENT 0.0 -.TP -.B \-\-three, \-\-two -Use Python 3/2 when creating virtualenv. -.UNINDENT -.INDENT 0.0 -.TP -.B \-\-python -Specify which version of Python virtualenv should use. -.UNINDENT -Arguments.INDENT 0.0 -.TP -.B SHELL_ARGS -Optional argument(s) -.UNINDENT -.SS sync -.sp -Installs all packages specified in Pipfile.lock. -.INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -pipenv sync [OPTIONS] -.ft P -.fi -.UNINDENT -.UNINDENT -Options.INDENT 0.0 -.TP -.B \-\-bare -Minimal output. +.B \-\-sequential +Install dependencies one\-at\-a\-time, instead of concurrently. .UNINDENT .INDENT 0.0 .TP -.B \-\-sequential -Install dependencies one\-at\-a\-time, instead of concurrently. +.B \-d, \-\-dev +Install both develop and default packages .UNINDENT .INDENT 0.0 .TP @@ -3869,11 +4202,6 @@ Allow pre\-releases. .UNINDENT .INDENT 0.0 .TP -.B \-d, \-\-dev -Install both develop and default packages. -.UNINDENT -.INDENT 0.0 -.TP .B \-\-python Specify which version of Python virtualenv should use. .UNINDENT @@ -3897,217 +4225,485 @@ Verbose mode. .B \-\-pypi\-mirror Specify a PyPI mirror. .UNINDENT -.SS uninstall -.sp -Un\-installs a provided package and removes it from Pipfile. -.INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -pipenv uninstall [OPTIONS] [PACKAGES]... -.ft P -.fi -.UNINDENT -.UNINDENT -Options.INDENT 0.0 +Arguments.INDENT 0.0 .TP -.B \-\-all\-dev -Un\-install all package from [dev\-packages]. +.B PACKAGES +Optional argument(s) .UNINDENT -.INDENT 0.0 +Environment variables.INDENT 0.0 .TP -.B \-\-all -Purge all package(s) from virtualenv. Does not edit Pipfile. +.B PIP_EXTRA_INDEX_URL +.INDENT 7.0 +.INDENT 3.5 +Provide a default for \fI\%\-\-extra\-index\-url\fP .UNINDENT -.INDENT 0.0 -.TP -.B \-e, \-\-editable -An editable python package URL or path, often to a VCS repo. .UNINDENT -.INDENT 0.0 -.TP -.B \-\-skip\-lock -Skip locking mechanisms and use the Pipfile instead during operation. .UNINDENT .INDENT 0.0 .TP -.B \-\-keep\-outdated -Keep out\-dated dependencies from being updated in Pipfile.lock. +.B PIP_INDEX_URL +.INDENT 7.0 +.INDENT 3.5 +Provide a default for \fI\%\-i\fP .UNINDENT -.INDENT 0.0 -.TP -.B \-\-pre -Allow pre\-releases. .UNINDENT -.INDENT 0.0 -.TP -.B \-d, \-\-dev -Install both develop and default packages. .UNINDENT +.SS Frequently Encountered Pipenv Problems +.sp +Pipenv is constantly being improved by volunteers, but is still a very young +project with limited resources, and has some quirks that needs to be dealt +with. We need everyone’s help (including yours!). +.sp +Here are some common questions people have using Pipenv. Please take a look +below and see if they resolve your problem. +.sp +\fBNOTE:\fP .INDENT 0.0 -.TP -.B \-\-python -Specify which version of Python virtualenv should use. +.INDENT 3.5 +\fBMake sure you’re running the newest Pipenv version first!\fP .UNINDENT -.INDENT 0.0 -.TP -.B \-\-three, \-\-two -Use Python 3/2 when creating virtualenv. .UNINDENT +.SS ☤ Your dependencies could not be resolved +.sp +Make sure your dependencies actually \fIdo\fP resolve. If you’re confident they +are, you may need to clear your resolver cache. Run the following command: .INDENT 0.0 -.TP -.B \-\-clear -Clears caches (pipenv, pip, and pip\-tools). +.INDENT 3.5 +.sp +.nf +.ft C +pipenv lock \-\-clear +.ft P +.fi .UNINDENT -.INDENT 0.0 -.TP -.B \-v, \-\-verbose -Verbose mode. .UNINDENT +.sp +and try again. +.sp +If this does not work, try manually deleting the whole cache directory. It is +usually one of the following locations: .INDENT 0.0 -.TP -.B \-\-pypi\-mirror -Specify a PyPI mirror. -.UNINDENT -Arguments.INDENT 0.0 -.TP -.B PACKAGES -Optional argument(s) +.IP \(bu 2 +\fB~/Library/Caches/pipenv\fP (macOS) +.IP \(bu 2 +\fB%LOCALAPPDATA%\epipenv\epipenv\eCache\fP (Windows) +.IP \(bu 2 +\fB~/.cache/pipenv\fP (other operating systems) .UNINDENT -Environment variables.INDENT 0.0 -.TP -.B PIPENV_SKIP_LOCK -.INDENT 7.0 +.sp +Pipenv does not install pre\-releases (i.e. a version with an alpha/beta/etc. +suffix, such as \fI1.0b1\fP) by default. You will need to pass the \fB\-\-pre\fP flag +in your command, or set +.INDENT 0.0 .INDENT 3.5 -Provide a default for \fI\%\-\-skip\-lock\fP +.sp +.nf +.ft C +[pipenv] +allow_prereleases = true +.ft P +.fi .UNINDENT .UNINDENT +.sp +in your Pipfile. +.SS ☤ No module named +.sp +This is usually a result of mixing Pipenv with system packages. We \fIstrongly\fP +recommend installing Pipenv in an isolated environment. Uninstall all existing +Pipenv installations, and see installing\-pipenv to choose one of the +recommended way to install Pipenv instead. +.SS ☤ My pyenv\-installed Python is not found +.sp +Make sure you have \fBPYENV_ROOT\fP set correctly. Pipenv only supports CPython +distributions, with version name like \fB3.6.4\fP or similar. +.SS ☤ Pipenv does not respect pyenv’s global and local Python versions +.sp +Pipenv by default uses the Python it is installed against to create the +virtualenv. You can set the \fB\-\-python\fP option, or +\fB$PYENV_ROOT/shims/python\fP to let it consult pyenv when choosing the +interpreter. See specifying_versions for more information. +.sp +If you want Pipenv to automatically “do the right thing”, you can set the +environment variable \fBPIPENV_PYTHON\fP to \fB$PYENV_ROOT/shims/python\fP\&. This +will make Pipenv use pyenv’s active Python version to create virtual +environments by default. +.SS ☤ ValueError: unknown locale: UTF\-8 +.sp +macOS has a bug in its locale detection that prevents us from detecting your +shell encoding correctly. This can also be an issue on other systems if the +locale variables do not specify an encoding. +.sp +The workaround is to set the following two environment variables to a standard +localization format: +.INDENT 0.0 +.IP \(bu 2 +\fBLC_ALL\fP +.IP \(bu 2 +\fBLANG\fP .UNINDENT -.SS update .sp -Runs lock, then sync. +For Bash, for example, you can add the following to your \fB~/.bash_profile\fP: .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -pipenv update [OPTIONS] [PACKAGES]... +export LC_ALL=\(aqen_US.UTF\-8\(aq +export LANG=\(aqen_US.UTF\-8\(aq .ft P .fi .UNINDENT .UNINDENT -Options.INDENT 0.0 -.TP -.B \-\-bare -Minimal output. -.UNINDENT +.sp +For Zsh, the file to edit is \fB~/.zshrc\fP\&. +.sp +\fBNOTE:\fP .INDENT 0.0 -.TP -.B \-\-outdated -List out\-of\-date dependencies. +.INDENT 3.5 +You can change both the \fBen_US\fP and \fBUTF\-8\fP part to the +language/locale and encoding you use. .UNINDENT -.INDENT 0.0 -.TP -.B \-\-dry\-run -List out\-of\-date dependencies. .UNINDENT +.SS ☤ /bin/pip: No such file or directory +.sp +This may be related to your locale setting. See \fI\%☤ ValueError: unknown locale: UTF\-8\fP +for a possible solution. +.SS ☤ \fBshell\fP does not show the virtualenv’s name in prompt +.sp +This is intentional. You can do it yourself with either shell plugins, or +clever \fBPS1\fP configuration. If you really want it back, use .INDENT 0.0 -.TP -.B \-e, \-\-editable -An editable python package URL or path, often to a VCS repo. +.INDENT 3.5 +.sp +.nf +.ft C +pipenv shell \-c +.ft P +.fi .UNINDENT -.INDENT 0.0 -.TP -.B \-\-ignore\-pipfile -Ignore Pipfile when installing, using the Pipfile.lock. .UNINDENT +.sp +instead (not available on Windows). +.SS ☤ Pipenv does not respect dependencies in setup.py +.sp +No, it does not, intentionally. Pipfile and setup.py serve different purposes, +and should not consider each other by default. See pipfile\-vs\-setuppy +for more information. +.SS ☤ Using \fBpipenv run\fP in Supervisor program +.sp +When you configure a supervisor program\(aqs \fBcommand\fP with \fBpipenv run ...\fP, you +need to set locale environment variables properly to make it work. +.sp +Add this line under \fB[supervisord]\fP section in \fB/etc/supervisor/supervisord.conf\fP: .INDENT 0.0 -.TP -.B \-\-selective\-upgrade -Update specified packages. +.INDENT 3.5 +.sp +.nf +.ft C +[supervisord] +environment=LC_ALL=\(aqen_US.UTF\-8\(aq,LANG=\(aqen_US.UTF\-8\(aq +.ft P +.fi .UNINDENT -.INDENT 0.0 -.TP -.B \-r, \-\-requirements -Import a requirements.txt file. .UNINDENT +.SS ☤ An exception is raised during \fBLocking dependencies…\fP +.sp +Run \fBpipenv lock \-\-clear\fP and try again. The lock sequence caches results +to speed up subsequent runs. The cache may contain faulty results if a bug +causes the format to corrupt, even after the bug is fixed. \fB\-\-clear\fP flushes +the cache, and therefore removes the bad results. +.SH CONTRIBUTION GUIDES +.SS Development Philosophy +.sp +Pipenv is an open but opinionated tool, created by an open but opinionated developer. +.SS Management Style .INDENT 0.0 -.TP -.B \-\-extra\-index\-url -URLs to the extra PyPI compatible indexes to query for package lookups. +.INDENT 3.5 +\fBTo be updated (as of March 2020)\fP\&. .UNINDENT -.INDENT 0.0 -.TP -.B \-i, \-\-index -Target PyPI\-compatible package index url. .UNINDENT +.sp +\fI\%Kenneth Reitz\fP is the BDFL. He has final say in any decision related to the Pipenv project. Kenneth is responsible for the direction and form of the library, as well as its presentation. In addition to making decisions based on technical merit, he is responsible for making decisions based on the development philosophy of Pipenv. +.sp +\fI\%Dan Ryan\fP, \fI\%Tzu\-ping Chung\fP, and \fI\%Nate Prewitt\fP are the core contributors. +They are responsible for triaging bug reports, reviewing pull requests and ensuring that Kenneth is kept up to speed with developments around the library. +The day\-to\-day managing of the project is done by the core contributors. They are responsible for making judgments about whether or not a feature request is +likely to be accepted by Kenneth. +.SS Values .INDENT 0.0 -.TP -.B \-\-sequential -Install dependencies one\-at\-a\-time, instead of concurrently. +.IP \(bu 2 +Simplicity is always better than functionality. +.IP \(bu 2 +Listen to everyone, then disregard it. +.IP \(bu 2 +The API is all that matters. Everything else is secondary. +.IP \(bu 2 +Fit the 90% use\-case. Ignore the nay\-sayers. .UNINDENT +.SS Contributing to Pipenv +.sp +If you\(aqre reading this, you\(aqre probably interested in contributing to Pipenv. +Thank you very much! Open source projects live\-and\-die based on the support +they receive from others, and the fact that you\(aqre even considering +contributing to the Pipenv project is \fIvery\fP generous of you. +.sp +This document lays out guidelines and advice for contributing to this project. +If you\(aqre thinking of contributing, please start by reading this document and +getting a feel for how contributing to this project works. If you have any +questions, feel free to reach out to either \fI\%Dan Ryan\fP, \fI\%Tzu\-ping Chung\fP, +or \fI\%Nate Prewitt\fP, the primary maintainers. +.sp +The guide is split into sections based on the type of contribution you\(aqre +thinking of making, with a section that covers general guidelines for all +contributors. +.SS General Guidelines +.SS Be Cordial .INDENT 0.0 -.TP -.B \-\-keep\-outdated -Keep out\-dated dependencies from being updated in Pipfile.lock. +.INDENT 3.5 +\fBBe cordial or be on your way\fP\&. \fI—Kenneth Reitz\fP .UNINDENT -.INDENT 0.0 -.TP -.B \-\-pre -Allow pre\-releases. .UNINDENT +.sp +Pipenv has one very important rule governing all forms of contribution, +including reporting bugs or requesting features. This golden rule is +"\fI\%be cordial or be on your way\fP". +.sp +\fBAll contributions are welcome\fP, as long as +everyone involved is treated with respect. +.SS Get Early Feedback +.sp +If you are contributing, do not feel the need to sit on your contribution until +it is perfectly polished and complete. It helps everyone involved for you to +seek feedback as early as you possibly can. Submitting an early, unfinished +version of your contribution for feedback in no way prejudices your chances of +getting that contribution accepted, and can save you from putting a lot of work +into a contribution that is not suitable for the project. +.SS Contribution Suitability +.sp +Our project maintainers have the last word on whether or not a contribution is +suitable for Pipenv. All contributions will be considered carefully, but from +time to time, contributions will be rejected because they do not suit the +current goals or needs of the project. +.sp +If your contribution is rejected, don\(aqt despair! As long as you followed these +guidelines, you will have a much better chance of getting your next +contribution accepted. +.SS Questions +.sp +The GitHub issue tracker is for \fIbug reports\fP and \fIfeature requests\fP\&. Please do +not use it to ask questions about how to use Pipenv. These questions should +instead be directed to \fI\%Stack Overflow\fP\&. Make sure that your question is tagged +with the \fBpipenv\fP tag when asking it on Stack Overflow, to ensure that it is +answered promptly and accurately. +.SS Code Contributions +.SS Steps for Submitting Code +.sp +When contributing code, you\(aqll want to follow this checklist: .INDENT 0.0 -.TP -.B \-d, \-\-dev -Install both develop and default packages. +.IP 1. 3 +Understand our \fI\%development philosophy\fP\&. +.IP 2. 3 +Fork the repository on GitHub. +.IP 3. 3 +Set up your \fI\%Development Setup\fP +.IP 4. 3 +Run the tests (\fI\%Testing\fP) to confirm they all pass on your system. +If they don\(aqt, you\(aqll need to investigate why they fail. If you\(aqre unable +to diagnose this yourself, raise it as a bug report by following the guidelines +in this document: \fI\%Bug Reports\fP\&. +.IP 5. 3 +Write tests that demonstrate your bug or feature. Ensure that they fail. +.IP 6. 3 +Make your change. +.IP 7. 3 +Run the entire test suite again, confirming that all tests pass \fIincluding +the ones you just added\fP\&. +.IP 8. 3 +Send a GitHub Pull Request to the main repository\(aqs \fBmaster\fP branch. +GitHub Pull Requests are the expected method of code collaboration on this +project. .UNINDENT +.sp +The following sub\-sections go into more detail on some of the points above. +.SS Development Setup +.sp +To get your development environment setup, run: .INDENT 0.0 -.TP -.B \-\-python -Specify which version of Python virtualenv should use. +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-e . +pipenv install \-\-dev +.ft P +.fi .UNINDENT -.INDENT 0.0 -.TP -.B \-\-three, \-\-two -Use Python 3/2 when creating virtualenv. .UNINDENT +.sp +This will install the repository version of Pipenv and then install the development +dependencies. Once that has completed, you can start developing. +.sp +The repository version of Pipenv must be installed over other global versions to +resolve conflicts with the \fBpipenv\fP folder being implicitly added to \fBsys.path\fP\&. +See \fI\%pypa/pipenv#2557\fP for more details. +.SS Testing +.sp +Tests are written in \fBpytest\fP style and can be run very simply: .INDENT 0.0 -.TP -.B \-\-clear -Clears caches (pipenv, pip, and pip\-tools). +.INDENT 3.5 +.sp +.nf +.ft C +pytest +.ft P +.fi +.UNINDENT .UNINDENT +.sp +This will run all Pipenv tests, which can take awhile. To run a subset of the +tests, the standard pytest filters are available, such as: .INDENT 0.0 -.TP -.B \-v, \-\-verbose -Verbose mode. +.IP \(bu 2 +provide a directory or file: \fBpytest tests/unit\fP or \fBpytest tests/unit/test_cmdparse.py\fP +.IP \(bu 2 +provide a keyword expression: \fBpytest \-k test_lock_editable_vcs_without_install\fP +.IP \(bu 2 +provide a nodeid: \fBpytest tests/unit/test_cmdparse.py::test_parse\fP +.IP \(bu 2 +provide a test marker: \fBpytest \-m lock\fP .UNINDENT +.SS Code Review +.sp +Contributions will not be merged until they have been code reviewed. You should +implement any code review feedback unless you strongly object to it. In the +event that you object to the code review feedback, you should make your case +clearly and calmly. If, after doing so, the feedback is judged to still apply, +you must either apply the feedback or withdraw your contribution. +.SS Package Index +.sp +To speed up testing, tests that rely on a package index for locking and +installing use a local server that contains vendored packages in the +\fBtests/pypi\fP directory. Each vendored package should have it\(aqs own folder +containing the necessary releases. When adding a release for a package, it is +easiest to use either the \fB\&.tar.gz\fP or universal wheels (ex: \fBpy2.py3\-none\fP). If +a \fB\&.tar.gz\fP or universal wheel is not available, add wheels for all available +architectures and platforms. +.SS Documentation Contributions +.sp +Documentation improvements are always welcome! The documentation files live in +the \fBdocs/\fP directory of the codebase. They\(aqre written in +\fI\%reStructuredText\fP, and use \fI\%Sphinx\fP to generate the full suite of +documentation. +.sp +When contributing documentation, please do your best to follow the style of the +documentation files. This means a soft\-limit of 79 characters wide in your text +files and a semi\-formal, yet friendly and approachable, prose style. +.sp +When presenting Python code, use single\-quoted strings (\fB\(aqhello\(aq\fP instead of +\fB"hello"\fP). +.SS Bug Reports +.sp +Bug reports are hugely important! They are recorded as \fI\%GitHub issues\fP\&. Please +be aware of the following things when filing bug reports: .INDENT 0.0 -.TP -.B \-\-pypi\-mirror -Specify a PyPI mirror. +.IP 1. 3 +Avoid raising duplicate issues. \fIPlease\fP use the GitHub issue search feature +to check whether your bug report or feature request has been mentioned in +the past. Duplicate bug reports and feature requests are a huge maintenance +burden on the limited resources of the project. If it is clear from your +report that you would have struggled to find the original, that\(aqs okay, but +if searching for a selection of words in your issue title would have found +the duplicate then the issue will likely be closed extremely abruptly. +.IP 2. 3 +When filing bug reports about exceptions or tracebacks, please include the +\fIcomplete\fP traceback. Partial tracebacks, or just the exception text, are +not helpful. Issues that do not contain complete tracebacks may be closed +without warning. +.IP 3. 3 +Make sure you provide a suitable amount of information to work with. This +means you should provide: +.INDENT 3.0 +.IP \(bu 2 +Guidance on \fBhow to reproduce the issue\fP\&. Ideally, this should be a +\fIsmall\fP code sample that can be run immediately by the maintainers. +Failing that, let us know what you\(aqre doing, how often it happens, what +environment you\(aqre using, etc. Be thorough: it prevents us needing to ask +further questions. +.IP \(bu 2 +Tell us \fBwhat you expected to happen\fP\&. When we run your example code, +what are we expecting to happen? What does "success" look like for your +code? +.IP \(bu 2 +Tell us \fBwhat actually happens\fP\&. It\(aqs not helpful for you to say "it +doesn\(aqt work" or "it fails". Tell us \fIhow\fP it fails: do you get an +exception? A hang? The packages installed seem incorrect? +How was the actual result different from your expected result? +.IP \(bu 2 +Tell us \fBwhat version of Pipenv you\(aqre using\fP, and +\fBhow you installed it\fP\&. Different versions of Pipenv behave +differently and have different bugs, and some distributors of Pipenv +ship patches on top of the code we supply. .UNINDENT -Arguments.INDENT 0.0 -.TP -.B PACKAGES -Optional argument(s) +.sp +If you do not provide all of these things, it will take us much longer to +fix your problem. If we ask you to clarify these and you never respond, we +will close your issue without fixing it. .UNINDENT -Environment variables.INDENT 0.0 -.TP -.B PIP_EXTRA_INDEX_URL -.INDENT 7.0 -.INDENT 3.5 -Provide a default for \fI\%\-\-extra\-index\-url\fP +.SS Run the tests +.sp +Three ways of running the tests are as follows: +.INDENT 0.0 +.IP 1. 3 +\fBmake test\fP (which uses \fBdocker\fP) +.IP 2. 3 +\fB\&./run\-tests.sh\fP or \fBrun\-tests.bat\fP +.IP 3. 3 +Using pipenv: .UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +$ git clone https://github.com/pypa/pipenv.git +$ cd pipenv +$ git submodule sync && git submodule update \-\-init \-\-recursive +$ pipenv install \-\-dev +$ pipenv run pytest +.ft P +.fi .UNINDENT .UNINDENT +.sp +For the last two, it is important that your environment is setup correctly, and +this may take some work, for example, on a specific Mac installation, the following +steps may be needed: .INDENT 0.0 -.TP -.B PIP_INDEX_URL -.INDENT 7.0 .INDENT 3.5 -Provide a default for \fI\%\-i\fP -.UNINDENT +.sp +.nf +.ft C +# Make sure the tests can access github +if [ "$SSH_AGENT_PID" = "" ] +then + eval \(gassh\-agent\(ga + ssh\-add +fi + +# Use unix like utilities, installed with brew, +# e.g. brew install coreutils +for d in /usr/local/opt/*/libexec/gnubin /usr/local/opt/python/libexec/bin +do + [[ ":$PATH:" != *":$d:"* ]] && PATH="$d:${PATH}" +done + +export PATH + +# PIP_FIND_LINKS currently breaks test_uninstall.py +unset PIP_FIND_LINKS +.ft P +.fi .UNINDENT .UNINDENT .INDENT 0.0 @@ -4119,8 +4715,8 @@ modindex search .UNINDENT .SH AUTHOR -Kenneth Reitz +Python Packaging Authority .SH COPYRIGHT -2017. A project founded by Kenneth Reitz +2020. A project founded by Kenneth Reitz .\" Generated by docutils manpage writer. . diff --git a/pipenv/project.py b/pipenv/project.py index 4cb2ff9c51..40b6b26364 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -25,7 +25,7 @@ from .environments import ( PIPENV_DEFAULT_PYTHON_VERSION, PIPENV_IGNORE_VIRTUALENVS, PIPENV_MAX_DEPTH, PIPENV_PIPFILE, PIPENV_PYTHON, PIPENV_TEST_INDEX, PIPENV_VENV_IN_PROJECT, - is_in_virtualenv, is_type_checking + PIPENV_USE_SYSTEM, is_in_virtualenv, is_type_checking ) from .vendor.requirementslib.models.utils import get_default_pyproject_backend from .utils import ( @@ -328,21 +328,30 @@ def pipfile_package_names(self): "combined": dev_keys | default_keys } + def get_environment(self, allow_global=False): + # type: (bool) -> Environment + if allow_global: + prefix = sys.prefix + else: + prefix = self.virtualenv_location + is_venv = is_in_virtualenv() + sources = self.sources if self.sources else [DEFAULT_SOURCE] + environment = Environment( + prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile, + project=self + ) + pipenv_dist = get_pipenv_dist(pkg="pipenv") + if pipenv_dist: + environment.extend_dists(pipenv_dist) + else: + environment.add_dist("pipenv") + return environment + @property def environment(self): if not self._environment: - prefix = self.virtualenv_location - is_venv = is_in_virtualenv() - sources = self.sources if self.sources else [DEFAULT_SOURCE] - self._environment = Environment( - prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile, - project=self - ) - pipenv_dist = get_pipenv_dist(pkg="pipenv") - if pipenv_dist: - self._environment.extend_dists(pipenv_dist) - else: - self._environment.add_dist("pipenv") + allow_global = os.environ.get("PIPENV_USE_SYSTEM", PIPENV_USE_SYSTEM) + self._environment = self.get_environment(allow_global=allow_global) return self._environment def get_outdated_packages(self): @@ -790,7 +799,7 @@ def write_toml(self, data, path=None): except Exception: document = tomlkit.document() for section in ("packages", "dev-packages"): - document[section] = tomlkit.container.Table() + document[section] = tomlkit.table() # Convert things to inline tables — fancy :) for package in data.get(section, {}): if hasattr(data[section][package], "keys"): diff --git a/pipenv/resolver.py b/pipenv/resolver.py index cd04fccb0f..733b28d5da 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -771,7 +771,7 @@ def resolve(packages, pre, project, sources, clear, system, requirements_dir=Non def _main(pre, clear, verbose, system, write, requirements_dir, packages, parse_only=False): - os.environ["PIP_PYTHON_VERSION"] = ".".join([str(s) for s in sys.version_info[:3]]) + os.environ["PIPENV_REQUESTED_PYTHON_VERSION"] = ".".join([str(s) for s in sys.version_info[:3]]) os.environ["PIP_PYTHON_PATH"] = str(sys.executable) if parse_only: parse_packages( diff --git a/pipenv/utils.py b/pipenv/utils.py index eb1f6519e8..d008e76186 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -236,14 +236,14 @@ def __init__(self, python_version, python_path): def __enter__(self): # Only inject when the value is valid if self.python_version: - os.environ["PIP_PYTHON_VERSION"] = str(self.python_version) + os.environ["PIPENV_REQUESTED_PYTHON_VERSION"] = str(self.python_version) if self.python_path: os.environ["PIP_PYTHON_PATH"] = str(self.python_path) def __exit__(self, *args): # Restore original Python version information. try: - del os.environ["PIP_PYTHON_VERSION"] + del os.environ["PIPENV_REQUESTED_PYTHON_VERSION"] except KeyError: pass @@ -586,7 +586,7 @@ def get_deps_from_req(cls, req, resolver=None): constraints.add(line) # ensure the top level entry remains as provided # note that we shouldn't pin versions for editable vcs deps - if (not req.is_vcs or (req.is_vcs and not req.editable)): + if not req.is_vcs: if req.specifiers: locked_deps[name]["version"] = req.specifiers elif parsed_line.setup_info and parsed_line.setup_info.version: @@ -682,25 +682,21 @@ def pip_command(self): self._pip_command = self._get_pip_command() return self._pip_command - def prepare_pip_args(self, use_pep517=True, build_isolation=True): + def prepare_pip_args(self, use_pep517=False, build_isolation=True): pip_args = [] if self.sources: pip_args = prepare_pip_source_args(self.sources, pip_args) - if not use_pep517: + if use_pep517 is False: pip_args.append("--no-use-pep517") - if not build_isolation: + if build_isolation is False: pip_args.append("--no-build-isolation") pip_args.extend(["--cache-dir", environments.PIPENV_CACHE_DIR]) return pip_args @property def pip_args(self): - use_pep517 = False if ( - os.environ.get("PIP_NO_USE_PEP517", None) is not None - ) else (True if os.environ.get("PIP_USE_PEP517", None) is not None else None) - build_isolation = False if ( - os.environ.get("PIP_NO_BUILD_ISOLATION", None) is not None - ) else (True if os.environ.get("PIP_BUILD_ISOLATION", None) is not None else None) + use_pep517 = environments.get_from_env("USE_PEP517", prefix="PIP") + build_isolation = environments.get_from_env("BUILD_ISOLATION", prefix="PIP") if self._pip_args is None: self._pip_args = self.prepare_pip_args( use_pep517=use_pep517, build_isolation=build_isolation @@ -741,6 +737,10 @@ def pip_options(self): if self._pip_options is None: pip_options, _ = self.pip_command.parser.parse_args(self.pip_args) pip_options.cache_dir = environments.PIPENV_CACHE_DIR + pip_options.no_python_version_warning = True + pip_options.no_input = True + pip_options.progress_bar = "off" + pip_options.ignore_requires_python = True self._pip_options = pip_options return self._pip_options @@ -786,6 +786,7 @@ def get_resolver(self, clear=False, pre=False): self._resolver = PiptoolsResolver( constraints=self.parsed_constraints, repository=self.repository, cache=DependencyCache(environments.PIPENV_CACHE_DIR), clear_caches=clear, + # TODO: allow users to toggle the 'allow unsafe' flag to resolve setuptools? prereleases=pre, allow_unsafe=False ) @@ -996,6 +997,8 @@ def clean_results(self): for req, ireq in reqs: if (req.vcs and req.editable and not req.is_direct_url): continue + elif req.normalized_name in self.skipped.keys(): + continue collected_hashes = self.collect_hashes(ireq) req = req.add_hashes(collected_hashes) if not collected_hashes and self._should_include_hash(ireq): @@ -1040,20 +1043,24 @@ def format_requirement_for_lockfile(req, markers_lookup, index_lookup, hashes=No entry["version"] = pf_entry.lstrip("=") else: entry.update(pf_entry) - if version is not None: + if version is not None and not req.is_vcs: entry["version"] = version - if req.line_instance.is_direct_url: + if req.line_instance.is_direct_url and not req.is_vcs: entry["file"] = req.req.uri if hashes: entry["hashes"] = sorted(set(hashes)) entry["name"] = name - if index: # and index != next(iter(project.sources), {}).get("name"): + if index: entry.update({"index": index}) if markers: entry.update({"markers": markers}) entry = translate_markers(entry) - if req.vcs or req.editable and entry.get("index"): - del entry["index"] + if req.vcs or req.editable: + for key in ("index", "version", "file"): + try: + del entry[key] + except KeyError: + pass return name, entry @@ -1874,11 +1881,6 @@ def get_vcs_deps( lockfile[name] = requirement.pipfile_entry[1] lockfile[name]['ref'] = commit_hash result.append(requirement) - version = requirement.specifiers - if not version and requirement.specifiers: - version = requirement.specifiers - if version: - lockfile[name]['version'] = version except OSError: continue return result, lockfile @@ -2200,8 +2202,7 @@ def find_python(finder, line=None): if not result and not line.startswith("python"): line = "python{0}".format(line) result = find_python(finder, line) - if not result: - result = next(iter(finder.find_all_python_versions()), None) + if result: if not isinstance(result, six.string_types): return result.path.as_posix() diff --git a/pipenv/vendor/appdirs.py b/pipenv/vendor/appdirs.py index ae67001af8..2acd1debeb 100644 --- a/pipenv/vendor/appdirs.py +++ b/pipenv/vendor/appdirs.py @@ -13,8 +13,8 @@ # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html -__version_info__ = (1, 4, 3) -__version__ = '.'.join(map(str, __version_info__)) +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) import sys diff --git a/pipenv/vendor/backports/enum/__init__.py b/pipenv/vendor/backports/enum/__init__.py index d6ffb3a40f..51f3cf2470 100644 --- a/pipenv/vendor/backports/enum/__init__.py +++ b/pipenv/vendor/backports/enum/__init__.py @@ -4,7 +4,7 @@ __all__ = ['Enum', 'IntEnum', 'unique'] -version = 1, 1, 6 +version = 1, 1, 10 pyver = float('%s.%s' % _sys.version_info[:2]) @@ -183,7 +183,8 @@ def __new__(metacls, cls, bases, classdict): else: del classdict['_order_'] if pyver < 3.0: - _order_ = _order_.replace(',', ' ').split() + if isinstance(_order_, basestring): + _order_ = _order_.replace(',', ' ').split() aliases = [name for name in members if name not in _order_] _order_ += aliases @@ -463,7 +464,7 @@ def _create_(cls, class_name, names=None, module=None, type=None, start=1): _order_.append(member_name) # only set _order_ in classdict if name/value was not from a mapping if not isinstance(item, basestring): - classdict['_order_'] = ' '.join(_order_) + classdict['_order_'] = _order_ enum_class = metacls.__new__(metacls, class_name, bases, classdict) # TODO: replace the frame hack if a blessed way to know the calling diff --git a/pipenv/vendor/backports/functools_lru_cache.py b/pipenv/vendor/backports/functools_lru_cache.py index 707c6c766d..e0b19d951a 100644 --- a/pipenv/vendor/backports/functools_lru_cache.py +++ b/pipenv/vendor/backports/functools_lru_cache.py @@ -8,10 +8,12 @@ @functools.wraps(functools.update_wrapper) -def update_wrapper(wrapper, - wrapped, - assigned = functools.WRAPPER_ASSIGNMENTS, - updated = functools.WRAPPER_UPDATES): +def update_wrapper( + wrapper, + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, +): """ Patch two bugs in functools.update_wrapper. """ @@ -34,10 +36,17 @@ def __hash__(self): return self.hashvalue -def _make_key(args, kwds, typed, - kwd_mark=(object(),), - fasttypes=set([int, str, frozenset, type(None)]), - sorted=sorted, tuple=tuple, type=type, len=len): +def _make_key( + args, + kwds, + typed, + kwd_mark=(object(),), + fasttypes=set([int, str, frozenset, type(None)]), + sorted=sorted, + tuple=tuple, + type=type, + len=len, +): 'Make a cache key from optionally typed positional and keyword arguments' key = args if kwds: @@ -82,16 +91,16 @@ def lru_cache(maxsize=100, typed=False): def decorating_function(user_function): cache = dict() - stats = [0, 0] # make statistics updateable non-locally - HITS, MISSES = 0, 1 # names for the stats fields + stats = [0, 0] # make statistics updateable non-locally + HITS, MISSES = 0, 1 # names for the stats fields make_key = _make_key - cache_get = cache.get # bound method to lookup key or return None - _len = len # localize the global len() function - lock = RLock() # because linkedlist updates aren't threadsafe - root = [] # root of the circular doubly linked list - root[:] = [root, root, None, None] # initialize by pointing to self - nonlocal_root = [root] # make updateable non-locally - PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields + cache_get = cache.get # bound method to lookup key or return None + _len = len # localize the global len() function + lock = RLock() # because linkedlist updates aren't threadsafe + root = [] # root of the circular doubly linked list + root[:] = [root, root, None, None] # initialize by pointing to self + nonlocal_root = [root] # make updateable non-locally + PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields if maxsize == 0: @@ -106,7 +115,9 @@ def wrapper(*args, **kwds): def wrapper(*args, **kwds): # simple caching without ordering or size limit key = make_key(args, kwds, typed) - result = cache_get(key, root) # root used here as a unique not-found sentinel + result = cache_get( + key, root + ) # root used here as a unique not-found sentinel if result is not root: stats[HITS] += 1 return result @@ -123,7 +134,8 @@ def wrapper(*args, **kwds): with lock: link = cache_get(key) if link is not None: - # record recent use of the key by moving it to the front of the list + # record recent use of the key by moving it + # to the front of the list root, = nonlocal_root link_prev, link_next, key, result = link link_prev[NEXT] = link_next diff --git a/pipenv/vendor/click/__init__.py b/pipenv/vendor/click/__init__.py index 3910b80323..2b6008f2dd 100644 --- a/pipenv/vendor/click/__init__.py +++ b/pipenv/vendor/click/__init__.py @@ -76,4 +76,4 @@ # literals. disable_unicode_literals_warning = False -__version__ = "7.1.1" +__version__ = "7.1.2" diff --git a/pipenv/vendor/click/_compat.py b/pipenv/vendor/click/_compat.py index ed57a18f95..60cb115bc5 100644 --- a/pipenv/vendor/click/_compat.py +++ b/pipenv/vendor/click/_compat.py @@ -174,8 +174,6 @@ def seekable(self): iteritems = lambda x: x.iteritems() range_type = xrange - from pipes import quote as shlex_quote - def is_bytes(x): return isinstance(x, (buffer, bytearray)) @@ -284,8 +282,6 @@ def filename_to_ui(value): isidentifier = lambda x: x.isidentifier() iteritems = lambda x: iter(x.items()) - from shlex import quote as shlex_quote - def is_bytes(x): return isinstance(x, (bytes, memoryview, bytearray)) diff --git a/pipenv/vendor/click/_termui_impl.py b/pipenv/vendor/click/_termui_impl.py index c6e86cc010..88bec37701 100644 --- a/pipenv/vendor/click/_termui_impl.py +++ b/pipenv/vendor/click/_termui_impl.py @@ -17,7 +17,6 @@ from ._compat import isatty from ._compat import open_stream from ._compat import range_type -from ._compat import shlex_quote from ._compat import strip_ansi from ._compat import term_len from ._compat import WIN @@ -346,10 +345,7 @@ def pager(generator, color=None): fd, filename = tempfile.mkstemp() os.close(fd) try: - if ( - hasattr(os, "system") - and os.system("more {}".format(shlex_quote(filename))) == 0 - ): + if hasattr(os, "system") and os.system('more "{}"'.format(filename)) == 0: return _pipepager(generator, "more", color) return _nullpager(stdout, generator, color) finally: @@ -418,7 +414,7 @@ def _tempfilepager(generator, cmd, color): with open_stream(filename, "wb")[0] as f: f.write(text.encode(encoding)) try: - os.system("{} {}".format(shlex_quote(cmd), shlex_quote(filename))) + os.system('{} "{}"'.format(cmd, filename)) finally: os.unlink(filename) @@ -463,9 +459,7 @@ def edit_file(self, filename): environ = None try: c = subprocess.Popen( - "{} {}".format(shlex_quote(editor), shlex_quote(filename)), - env=environ, - shell=True, + '{} "{}"'.format(editor, filename), env=environ, shell=True, ) exit_code = c.wait() if exit_code != 0: @@ -536,16 +530,18 @@ def _unquote_file(url): elif WIN: if locate: url = _unquote_file(url) - args = "explorer /select,{}".format(shlex_quote(url)) + args = 'explorer /select,"{}"'.format(_unquote_file(url.replace('"', ""))) else: - args = 'start {} "" {}'.format("/WAIT" if wait else "", shlex_quote(url)) + args = 'start {} "" "{}"'.format( + "/WAIT" if wait else "", url.replace('"', "") + ) return os.system(args) elif CYGWIN: if locate: url = _unquote_file(url) - args = "cygstart {}".format(shlex_quote(os.path.dirname(url))) + args = 'cygstart "{}"'.format(os.path.dirname(url).replace('"', "")) else: - args = "cygstart {} {}".format("-w" if wait else "", shlex_quote(url)) + args = 'cygstart {} "{}"'.format("-w" if wait else "", url.replace('"', "")) return os.system(args) try: diff --git a/pipenv/vendor/dateutil/LICENSE b/pipenv/vendor/dateutil/LICENSE new file mode 100644 index 0000000000..1e65815cf0 --- /dev/null +++ b/pipenv/vendor/dateutil/LICENSE @@ -0,0 +1,54 @@ +Copyright 2017- Paul Ganssle +Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The above license applies to all contributions after 2017-12-01, as well as +all contributions that have been re-licensed (see AUTHORS file for the list of +contributors who have re-licensed their code). +-------------------------------------------------------------------------------- +dateutil - Extensions to the standard Python datetime module. + +Copyright (c) 2003-2011 - Gustavo Niemeyer +Copyright (c) 2012-2014 - Tomi Pieviläinen +Copyright (c) 2014-2016 - Yaron de Leeuw +Copyright (c) 2015- - Paul Ganssle +Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The above BSD License Applies to all code, even that also covered by Apache 2.0. \ No newline at end of file diff --git a/pipenv/vendor/dateutil/__init__.py b/pipenv/vendor/dateutil/__init__.py new file mode 100644 index 0000000000..0defb82e21 --- /dev/null +++ b/pipenv/vendor/dateutil/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] diff --git a/pipenv/vendor/dateutil/_common.py b/pipenv/vendor/dateutil/_common.py new file mode 100644 index 0000000000..4eb2659bd2 --- /dev/null +++ b/pipenv/vendor/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/pipenv/vendor/dateutil/_version.py b/pipenv/vendor/dateutil/_version.py new file mode 100644 index 0000000000..eac1209698 --- /dev/null +++ b/pipenv/vendor/dateutil/_version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '2.8.1' diff --git a/pipenv/vendor/dateutil/easter.py b/pipenv/vendor/dateutil/easter.py new file mode 100644 index 0000000000..53b7c78938 --- /dev/null +++ b/pipenv/vendor/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different easter + calculation methods: + + 1 - Original calculation in Julian calendar, valid in + dates after 326 AD + 2 - Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3 - Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms `_ + + and + + `The Calendar FAQ: Easter `_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/pipenv/vendor/dateutil/parser/__init__.py b/pipenv/vendor/dateutil/parser/__init__.py new file mode 100644 index 0000000000..d174b0e4dc --- /dev/null +++ b/pipenv/vendor/dateutil/parser/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo, ParserError +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'ParserError', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/pipenv/vendor/dateutil/parser/_parser.py b/pipenv/vendor/dateutil/parser/_parser.py new file mode 100644 index 0000000000..458aa6a329 --- /dev/null +++ b/pipenv/vendor/dateutil/parser/_parser.py @@ -0,0 +1,1609 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo", "ParserError"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if six.PY2: + # In Python 2, we can't duck type properly because unicode has + # a 'decode' function, and we'd be double-decoding + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + else: + if getattr(instream, 'decode', None) is not None: + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leap year + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precedence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ParserError("Unknown string format: %s", timestr) + + if len(res) == 0: + raise ParserError("String does not contain a date: %s", timestr) + + try: + ret = self._build_naive(res, default) + except ValueError as e: + six.raise_from(ParserError(e.args[0] + ": %s", timestr), e) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *don't* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + # ------------------------------------------------------------------ + # Handling for individual tokens. These are kept as methods instead + # of functions for the sake of customizability via subclassing. + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted + # via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + # ------------------------------------------------------------------ + # Post-Parsing construction of datetime output. These are kept as + # methods instead of functions for the sake of customizability via + # subclassing. + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + else: + raise TypeError("Offset must be tzinfo subclass, tz string, " + "or int offset.") + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.UTC) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.UTC) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +class ParserError(ValueError): + """Error class for representing failure to parse a datetime string.""" + def __str__(self): + try: + return self.args[0] % self.args[1:] + except (TypeError, IndexError): + return super(ParserError, self).__str__() + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, str(self)) + + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo""" +# vim:ts=4:sw=4:et diff --git a/pipenv/vendor/dateutil/parser/isoparser.py b/pipenv/vendor/dateutil/parser/isoparser.py new file mode 100644 index 0000000000..48f86a3355 --- /dev/null +++ b/pipenv/vendor/dateutil/parser/isoparser.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {}'.format(datestr)) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len(timestr) < 2: + raise ValueError('ISO time too short') + + has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + if (has_sep and pos < len_str and + timestr[pos:pos + 1] == self._TIME_SEP): + pos += 1 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.UTC + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.UTC + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/pipenv/vendor/dateutil/relativedelta.py b/pipenv/vendor/dateutil/relativedelta.py new file mode 100644 index 0000000000..a9e85f7e6c --- /dev/null +++ b/pipenv/vendor/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding arithmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/pipenv/vendor/dateutil/rrule.py b/pipenv/vendor/dateutil/rrule.py new file mode 100644 index 0000000000..6bf0ea9c64 --- /dev/null +++ b/pipenv/vendor/dateutil/rrule.py @@ -0,0 +1,1735 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import itertools +import datetime +import calendar +import re +import sys + +try: + from math import gcd +except ImportError: + from fractions import gcd + +from six import advance_iterator, integer_types +from six.moves import _thread, range +import heapq + +from ._common import weekday as weekdaybase + +# For warning about deprecation of until and count +from warnings import warn + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penalty. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/pipenv/vendor/dateutil/tz/__init__.py b/pipenv/vendor/dateutil/tz/__init__.py new file mode 100644 index 0000000000..af1352c472 --- /dev/null +++ b/pipenv/vendor/dateutil/tz/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/pipenv/vendor/dateutil/tz/_common.py b/pipenv/vendor/dateutil/tz/_common.py new file mode 100644 index 0000000000..e6ac118315 --- /dev/null +++ b/pipenv/vendor/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/pipenv/vendor/dateutil/tz/_factories.py b/pipenv/vendor/dateutil/tz/_factories.py new file mode 100644 index 0000000000..f8a65891a0 --- /dev/null +++ b/pipenv/vendor/dateutil/tz/_factories.py @@ -0,0 +1,80 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + +from six.moves import _thread + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls._cache_lock = _thread.allocate_lock() + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls._cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls.__cache_lock = _thread.allocate_lock() + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls.__cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/pipenv/vendor/dateutil/tz/tz.py b/pipenv/vendor/dateutil/tz/tz.py new file mode 100644 index 0000000000..af81e88e11 --- /dev/null +++ b/pipenv/vendor/dateutil/tz/tz.py @@ -0,0 +1,1849 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name == ":": + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + try: + if name.startswith(":"): + name = name[1:] + except TypeError as e: + if isinstance(name, bytes): + new_msg = "gettz argument should be str, not bytes" + six.raise_from(TypeError(new_msg), e) + else: + raise + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = UTC + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextlib import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/pipenv/vendor/dateutil/tz/win.py b/pipenv/vendor/dateutil/tz/win.py new file mode 100644 index 0000000000..cde07ba792 --- /dev/null +++ b/pipenv/vendor/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/pipenv/vendor/dateutil/tzwin.py b/pipenv/vendor/dateutil/tzwin.py new file mode 100644 index 0000000000..cebc673e40 --- /dev/null +++ b/pipenv/vendor/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/pipenv/vendor/dateutil/utils.py b/pipenv/vendor/dateutil/utils.py new file mode 100644 index 0000000000..44d9c99455 --- /dev/null +++ b/pipenv/vendor/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may a negilible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/pipenv/vendor/dateutil/zoneinfo/__init__.py b/pipenv/vendor/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000000..34f11ad66c --- /dev/null +++ b/pipenv/vendor/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/pipenv/vendor/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/pipenv/vendor/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000000..89e83517b5 Binary files /dev/null and b/pipenv/vendor/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/pipenv/vendor/dateutil/zoneinfo/rebuild.py b/pipenv/vendor/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000000..78f0d1a0ce --- /dev/null +++ b/pipenv/vendor/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,53 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + try: + check_call(["zic", "-d", zonedir] + filepaths) + except OSError as e: + _print_on_nosuchfile(e) + raise + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/pipenv/vendor/importlib_resources/_compat.py b/pipenv/vendor/importlib_resources/_compat.py index 48ca6afdab..242a8487a3 100644 --- a/pipenv/vendor/importlib_resources/_compat.py +++ b/pipenv/vendor/importlib_resources/_compat.py @@ -47,6 +47,19 @@ class ABC(object): # type: ignore from zipp import Path as ZipPath # type: ignore +try: + from typing import runtime_checkable # type: ignore +except ImportError: + def runtime_checkable(cls): # type: ignore + return cls + + +try: + from typing import Protocol # type: ignore +except ImportError: + Protocol = ABC # type: ignore + + class PackageSpec(object): def __init__(self, **kwargs): vars(self).update(kwargs) diff --git a/pipenv/vendor/importlib_resources/abc.py b/pipenv/vendor/importlib_resources/abc.py index 28596a4a58..1f2c25a74e 100644 --- a/pipenv/vendor/importlib_resources/abc.py +++ b/pipenv/vendor/importlib_resources/abc.py @@ -2,7 +2,7 @@ import abc -from ._compat import ABC, FileNotFoundError +from ._compat import ABC, FileNotFoundError, runtime_checkable, Protocol # Use mypy's comment syntax for Python 2 compatibility try: @@ -57,7 +57,8 @@ def contents(self): raise FileNotFoundError -class Traversable(ABC): +@runtime_checkable +class Traversable(Protocol): """ An object with a subset of pathlib.Path methods suitable for traversing directories and opening files. @@ -115,6 +116,13 @@ def open(self, mode='r', *args, **kwargs): accepted by io.TextIOWrapper. """ + @abc.abstractproperty + def name(self): + # type: () -> str + """ + The base name of this object without any parent references. + """ + class TraversableResources(ResourceReader): @abc.abstractmethod diff --git a/pipenv/vendor/importlib_resources/tests/test_files.py b/pipenv/vendor/importlib_resources/tests/test_files.py new file mode 100644 index 0000000000..5a9cf9c754 --- /dev/null +++ b/pipenv/vendor/importlib_resources/tests/test_files.py @@ -0,0 +1,39 @@ +import typing +import unittest + +import importlib_resources as resources +from importlib_resources.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text() + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/pipenv/vendor/jinja2/__init__.py b/pipenv/vendor/jinja2/__init__.py index 7f4a1c55a8..1229ba4275 100644 --- a/pipenv/vendor/jinja2/__init__.py +++ b/pipenv/vendor/jinja2/__init__.py @@ -41,4 +41,4 @@ from .utils import is_undefined from .utils import select_autoescape -__version__ = "2.11.1" +__version__ = "2.11.2" diff --git a/pipenv/vendor/jinja2/asyncfilters.py b/pipenv/vendor/jinja2/asyncfilters.py index d29f6c62d2..3d98dbcc00 100644 --- a/pipenv/vendor/jinja2/asyncfilters.py +++ b/pipenv/vendor/jinja2/asyncfilters.py @@ -26,17 +26,16 @@ async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): def dualfilter(normal_filter, async_filter): wrap_evalctx = False - if getattr(normal_filter, "environmentfilter", False): + if getattr(normal_filter, "environmentfilter", False) is True: def is_async(args): return args[0].is_async wrap_evalctx = False else: - if not getattr(normal_filter, "evalcontextfilter", False) and not getattr( - normal_filter, "contextfilter", False - ): - wrap_evalctx = True + has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True + has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True + wrap_evalctx = not has_evalctxfilter and not has_ctxfilter def is_async(args): return args[0].environment.is_async diff --git a/pipenv/vendor/jinja2/compiler.py b/pipenv/vendor/jinja2/compiler.py index f450ec6e31..63297b42c3 100644 --- a/pipenv/vendor/jinja2/compiler.py +++ b/pipenv/vendor/jinja2/compiler.py @@ -1307,13 +1307,13 @@ def _make_finalize(self): def finalize(value): return default(env_finalize(value)) - if getattr(env_finalize, "contextfunction", False): + if getattr(env_finalize, "contextfunction", False) is True: src += "context, " finalize = None # noqa: F811 - elif getattr(env_finalize, "evalcontextfunction", False): + elif getattr(env_finalize, "evalcontextfunction", False) is True: src += "context.eval_ctx, " finalize = None - elif getattr(env_finalize, "environmentfunction", False): + elif getattr(env_finalize, "environmentfunction", False) is True: src += "environment, " def finalize(value): @@ -1689,11 +1689,11 @@ def visit_Filter(self, node, frame): func = self.environment.filters.get(node.name) if func is None: self.fail("no filter named %r" % node.name, node.lineno) - if getattr(func, "contextfilter", False): + if getattr(func, "contextfilter", False) is True: self.write("context, ") - elif getattr(func, "evalcontextfilter", False): + elif getattr(func, "evalcontextfilter", False) is True: self.write("context.eval_ctx, ") - elif getattr(func, "environmentfilter", False): + elif getattr(func, "environmentfilter", False) is True: self.write("environment, ") # if the filter node is None we are inside a filter block diff --git a/pipenv/vendor/jinja2/debug.py b/pipenv/vendor/jinja2/debug.py index d2c5a06bf6..5d8aec31d0 100644 --- a/pipenv/vendor/jinja2/debug.py +++ b/pipenv/vendor/jinja2/debug.py @@ -245,10 +245,7 @@ def controller(op): class _CTraceback(ctypes.Structure): _fields_ = [ # Extra PyObject slots when compiled with Py_TRACE_REFS. - ( - "PyObject_HEAD", - ctypes.c_byte * (32 if hasattr(sys, "getobjects") else 16), - ), + ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()), # Only care about tb_next as an object, not a traceback. ("tb_next", ctypes.py_object), ] diff --git a/pipenv/vendor/jinja2/environment.py b/pipenv/vendor/jinja2/environment.py index bf44b9deb4..8430390eea 100644 --- a/pipenv/vendor/jinja2/environment.py +++ b/pipenv/vendor/jinja2/environment.py @@ -492,20 +492,20 @@ def call_filter( if func is None: fail_for_missing_callable("no filter named %r", name) args = [value] + list(args or ()) - if getattr(func, "contextfilter", False): + if getattr(func, "contextfilter", False) is True: if context is None: raise TemplateRuntimeError( "Attempted to invoke context filter without context" ) args.insert(0, context) - elif getattr(func, "evalcontextfilter", False): + elif getattr(func, "evalcontextfilter", False) is True: if eval_ctx is None: if context is not None: eval_ctx = context.eval_ctx else: eval_ctx = EvalContext(self) args.insert(0, eval_ctx) - elif getattr(func, "environmentfilter", False): + elif getattr(func, "environmentfilter", False) is True: args.insert(0, self) return func(*args, **(kwargs or {})) diff --git a/pipenv/vendor/jinja2/filters.py b/pipenv/vendor/jinja2/filters.py index 1af7ac88a7..9741567351 100644 --- a/pipenv/vendor/jinja2/filters.py +++ b/pipenv/vendor/jinja2/filters.py @@ -761,7 +761,7 @@ def do_wordwrap( def do_wordcount(s): """Count the words in that string.""" - return len(_word_re.findall(s)) + return len(_word_re.findall(soft_unicode(s))) def do_int(value, default=0, base=10): diff --git a/pipenv/vendor/jinja2/lexer.py b/pipenv/vendor/jinja2/lexer.py index a2b44e926b..552356a12d 100644 --- a/pipenv/vendor/jinja2/lexer.py +++ b/pipenv/vendor/jinja2/lexer.py @@ -681,6 +681,8 @@ def tokeniter(self, source, name, filename=None, state=None): source_length = len(source) balancing_stack = [] lstrip_unless_re = self.lstrip_unless_re + newlines_stripped = 0 + line_starting = True while 1: # tokenizer loop @@ -717,7 +719,9 @@ def tokeniter(self, source, name, filename=None, state=None): if strip_sign == "-": # Strip all whitespace between the text and the tag. - groups = (text.rstrip(),) + groups[1:] + stripped = text.rstrip() + newlines_stripped = text[len(stripped) :].count("\n") + groups = (stripped,) + groups[1:] elif ( # Not marked for preserving whitespace. strip_sign != "+" @@ -728,11 +732,11 @@ def tokeniter(self, source, name, filename=None, state=None): ): # The start of text between the last newline and the tag. l_pos = text.rfind("\n") + 1 - - # If there's only whitespace between the newline and the - # tag, strip it. - if not lstrip_unless_re.search(text, l_pos): - groups = (text[:l_pos],) + groups[1:] + if l_pos > 0 or line_starting: + # If there's only whitespace between the newline and the + # tag, strip it. + if not lstrip_unless_re.search(text, l_pos): + groups = (text[:l_pos],) + groups[1:] for idx, token in enumerate(tokens): # failure group @@ -758,7 +762,8 @@ def tokeniter(self, source, name, filename=None, state=None): data = groups[idx] if data or token not in ignore_if_empty: yield lineno, token, data - lineno += data.count("\n") + lineno += data.count("\n") + newlines_stripped + newlines_stripped = 0 # strings as token just are yielded as it. else: @@ -790,6 +795,8 @@ def tokeniter(self, source, name, filename=None, state=None): yield lineno, tokens, data lineno += data.count("\n") + line_starting = m.group()[-1:] == "\n" + # fetch new position into new variable so that we can check # if there is a internal parsing error which would result # in an infinite loop diff --git a/pipenv/vendor/jinja2/loaders.py b/pipenv/vendor/jinja2/loaders.py index ce5537a03c..457c4b59a7 100644 --- a/pipenv/vendor/jinja2/loaders.py +++ b/pipenv/vendor/jinja2/loaders.py @@ -3,11 +3,9 @@ sources. """ import os -import pkgutil import sys import weakref from hashlib import sha1 -from importlib import import_module from os import path from types import ModuleType @@ -217,141 +215,75 @@ def list_templates(self): class PackageLoader(BaseLoader): - """Load templates from a directory in a Python package. + """Load templates from python eggs or packages. It is constructed with + the name of the python package and the path to the templates in that + package:: - :param package_name: Import name of the package that contains the - template directory. - :param package_path: Directory within the imported package that - contains the templates. - :param encoding: Encoding of template files. + loader = PackageLoader('mypackage', 'views') - The following example looks up templates in the ``pages`` directory - within the ``project.ui`` package. + If the package path is not given, ``'templates'`` is assumed. - .. code-block:: python - - loader = PackageLoader("project.ui", "pages") - - Only packages installed as directories (standard pip behavior) or - zip/egg files (less common) are supported. The Python API for - introspecting data in packages is too limited to support other - installation methods the way this loader requires. - - There is limited support for :pep:`420` namespace packages. The - template directory is assumed to only be in one namespace - contributor. Zip files contributing to a namespace are not - supported. - - .. versionchanged:: 2.11.0 - No longer uses ``setuptools`` as a dependency. - - .. versionchanged:: 2.11.0 - Limited PEP 420 namespace package support. + Per default the template encoding is ``'utf-8'`` which can be changed + by setting the `encoding` parameter to something else. Due to the nature + of eggs it's only possible to reload templates if the package was loaded + from the file system and not a zip file. """ def __init__(self, package_name, package_path="templates", encoding="utf-8"): - if package_path == os.path.curdir: - package_path = "" - elif package_path[:2] == os.path.curdir + os.path.sep: - package_path = package_path[2:] + from pkg_resources import DefaultProvider + from pkg_resources import get_provider + from pkg_resources import ResourceManager - package_path = os.path.normpath(package_path).rstrip(os.path.sep) - self.package_path = package_path - self.package_name = package_name + provider = get_provider(package_name) self.encoding = encoding - - # Make sure the package exists. This also makes namespace - # packages work, otherwise get_loader returns None. - import_module(package_name) - self._loader = loader = pkgutil.get_loader(package_name) - - # Zip loader's archive attribute points at the zip. - self._archive = getattr(loader, "archive", None) - self._template_root = None - - if hasattr(loader, "get_filename"): - # A standard directory package, or a zip package. - self._template_root = os.path.join( - os.path.dirname(loader.get_filename(package_name)), package_path - ) - elif hasattr(loader, "_path"): - # A namespace package, limited support. Find the first - # contributor with the template directory. - for root in loader._path: - root = os.path.join(root, package_path) - - if os.path.isdir(root): - self._template_root = root - break - - if self._template_root is None: - raise ValueError( - "The %r package was not installed in a way that" - " PackageLoader understands." % package_name - ) + self.manager = ResourceManager() + self.filesystem_bound = isinstance(provider, DefaultProvider) + self.provider = provider + self.package_path = package_path def get_source(self, environment, template): - p = os.path.join(self._template_root, *split_template_path(template)) + pieces = split_template_path(template) + p = "/".join((self.package_path,) + tuple(pieces)) - if self._archive is None: - # Package is a directory. - if not os.path.isfile(p): - raise TemplateNotFound(template) + if not self.provider.has_resource(p): + raise TemplateNotFound(template) - with open(p, "rb") as f: - source = f.read() + filename = uptodate = None - mtime = os.path.getmtime(p) + if self.filesystem_bound: + filename = self.provider.get_resource_filename(self.manager, p) + mtime = path.getmtime(filename) - def up_to_date(): - return os.path.isfile(p) and os.path.getmtime(p) == mtime + def uptodate(): + try: + return path.getmtime(filename) == mtime + except OSError: + return False - else: - # Package is a zip file. - try: - source = self._loader.get_data(p) - except OSError: - raise TemplateNotFound(template) + source = self.provider.get_resource_string(self.manager, p) + return source.decode(self.encoding), filename, uptodate - # Could use the zip's mtime for all template mtimes, but - # would need to safely reload the module if it's out of - # date, so just report it as always current. - up_to_date = None + def list_templates(self): + path = self.package_path - return source.decode(self.encoding), p, up_to_date + if path[:2] == "./": + path = path[2:] + elif path == ".": + path = "" - def list_templates(self): + offset = len(path) results = [] - if self._archive is None: - # Package is a directory. - offset = len(self._template_root) - - for dirpath, _, filenames in os.walk(self._template_root): - dirpath = dirpath[offset:].lstrip(os.path.sep) - results.extend( - os.path.join(dirpath, name).replace(os.path.sep, "/") - for name in filenames - ) - else: - if not hasattr(self._loader, "_files"): - raise TypeError( - "This zip import does not have the required" - " metadata to list templates." - ) - - # Package is a zip file. - prefix = ( - self._template_root[len(self._archive) :].lstrip(os.path.sep) - + os.path.sep - ) - offset = len(prefix) + def _walk(path): + for filename in self.provider.resource_listdir(path): + fullname = path + "/" + filename - for name in self._loader._files.keys(): - # Find names under the templates directory that aren't directories. - if name.startswith(prefix) and name[-1] != os.path.sep: - results.append(name[offset:].replace(os.path.sep, "/")) + if self.provider.resource_isdir(fullname): + _walk(fullname) + else: + results.append(fullname[offset:].lstrip("/")) + _walk(path) results.sort() return results diff --git a/pipenv/vendor/jinja2/nativetypes.py b/pipenv/vendor/jinja2/nativetypes.py index 9866c962dc..a9ead4e2bb 100644 --- a/pipenv/vendor/jinja2/nativetypes.py +++ b/pipenv/vendor/jinja2/nativetypes.py @@ -1,4 +1,3 @@ -import types from ast import literal_eval from itertools import chain from itertools import islice @@ -11,7 +10,7 @@ from .environment import Template -def native_concat(nodes, preserve_quotes=True): +def native_concat(nodes): """Return a native Python type from the list of compiled nodes. If the result is a single node, its value is returned. Otherwise, the nodes are concatenated as strings. If the result can be parsed with @@ -19,9 +18,6 @@ def native_concat(nodes, preserve_quotes=True): the string is returned. :param nodes: Iterable of nodes to concatenate. - :param preserve_quotes: Whether to re-wrap literal strings with - quotes, to preserve quotes around expressions for later parsing. - Should be ``False`` in :meth:`NativeEnvironment.render`. """ head = list(islice(nodes, 2)) @@ -31,29 +27,17 @@ def native_concat(nodes, preserve_quotes=True): if len(head) == 1: raw = head[0] else: - if isinstance(nodes, types.GeneratorType): - nodes = chain(head, nodes) - raw = u"".join([text_type(v) for v in nodes]) + raw = u"".join([text_type(v) for v in chain(head, nodes)]) try: - literal = literal_eval(raw) + return literal_eval(raw) except (ValueError, SyntaxError, MemoryError): return raw - # If literal_eval returned a string, re-wrap with the original - # quote character to avoid dropping quotes between expression nodes. - # Without this, "'{{ a }}', '{{ b }}'" results in "a, b", but should - # be ('a', 'b'). - if preserve_quotes and isinstance(literal, str): - return "{quote}{}{quote}".format(literal, quote=raw[0]) - - return literal - class NativeCodeGenerator(CodeGenerator): """A code generator which renders Python types by not adding - ``to_string()`` around output nodes, and using :func:`native_concat` - to convert complex strings back to Python types if possible. + ``to_string()`` around output nodes. """ @staticmethod @@ -61,7 +45,7 @@ def _default_finalize(value): return value def _output_const_repr(self, group): - return repr(native_concat(group)) + return repr(u"".join([text_type(v) for v in group])) def _output_child_to_const(self, node, frame, finalize): const = node.as_const(frame.eval_ctx) @@ -100,10 +84,9 @@ def render(self, *args, **kwargs): Otherwise, the string is returned. """ vars = dict(*args, **kwargs) + try: - return native_concat( - self.root_render_func(self.new_context(vars)), preserve_quotes=False - ) + return native_concat(self.root_render_func(self.new_context(vars))) except Exception: return self.environment.handle_exception() diff --git a/pipenv/vendor/jinja2/nodes.py b/pipenv/vendor/jinja2/nodes.py index 9f3edc05f9..95bd614a14 100644 --- a/pipenv/vendor/jinja2/nodes.py +++ b/pipenv/vendor/jinja2/nodes.py @@ -671,7 +671,7 @@ def as_const(self, eval_ctx=None): # python 3. because of that, do not rename filter_ to filter! filter_ = self.environment.filters.get(self.name) - if filter_ is None or getattr(filter_, "contextfilter", False): + if filter_ is None or getattr(filter_, "contextfilter", False) is True: raise Impossible() # We cannot constant handle async filters, so we need to make sure @@ -684,9 +684,9 @@ def as_const(self, eval_ctx=None): args, kwargs = args_as_const(self, eval_ctx) args.insert(0, self.node.as_const(eval_ctx)) - if getattr(filter_, "evalcontextfilter", False): + if getattr(filter_, "evalcontextfilter", False) is True: args.insert(0, eval_ctx) - elif getattr(filter_, "environmentfilter", False): + elif getattr(filter_, "environmentfilter", False) is True: args.insert(0, self.environment) try: diff --git a/pipenv/vendor/jinja2/runtime.py b/pipenv/vendor/jinja2/runtime.py index 527d4b5e4b..3ad7968624 100644 --- a/pipenv/vendor/jinja2/runtime.py +++ b/pipenv/vendor/jinja2/runtime.py @@ -280,11 +280,11 @@ def call(__self, __obj, *args, **kwargs): # noqa: B902 break if callable(__obj): - if getattr(__obj, "contextfunction", 0): + if getattr(__obj, "contextfunction", False) is True: args = (__self,) + args - elif getattr(__obj, "evalcontextfunction", 0): + elif getattr(__obj, "evalcontextfunction", False) is True: args = (__self.eval_ctx,) + args - elif getattr(__obj, "environmentfunction", 0): + elif getattr(__obj, "environmentfunction", False) is True: args = (__self.environment,) + args try: return __obj(*args, **kwargs) diff --git a/pipenv/vendor/jinja2/utils.py b/pipenv/vendor/jinja2/utils.py index e3285e8edb..b422ba9686 100644 --- a/pipenv/vendor/jinja2/utils.py +++ b/pipenv/vendor/jinja2/utils.py @@ -165,11 +165,15 @@ def object_type_repr(obj): return "None" elif obj is Ellipsis: return "Ellipsis" + + cls = type(obj) + # __builtin__ in 2.x, builtins in 3.x - if obj.__class__.__module__ in ("__builtin__", "builtins"): - name = obj.__class__.__name__ + if cls.__module__ in ("__builtin__", "builtins"): + name = cls.__name__ else: - name = obj.__class__.__module__ + "." + obj.__class__.__name__ + name = cls.__module__ + "." + cls.__name__ + return "%s object" % name @@ -693,7 +697,8 @@ def __init__(*args, **kwargs): # noqa: B902 self.__attrs = dict(*args, **kwargs) def __getattribute__(self, name): - if name == "_Namespace__attrs": + # __class__ is needed for the awaitable check in async mode + if name in {"_Namespace__attrs", "__class__"}: return object.__getattribute__(self, name) try: return self.__attrs[name] diff --git a/pipenv/vendor/passa/models/projects.py b/pipenv/vendor/passa/models/projects.py index f6e037d651..c7807c0557 100644 --- a/pipenv/vendor/passa/models/projects.py +++ b/pipenv/vendor/passa/models/projects.py @@ -6,7 +6,7 @@ import io import os -import attr +from pipenv.vendor import attr import packaging.markers import packaging.utils import plette diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py index 763271153b..aeba0443f3 100644 --- a/pipenv/vendor/pythonfinder/models/mixins.py +++ b/pipenv/vendor/pythonfinder/models/mixins.py @@ -5,7 +5,7 @@ import operator from collections import defaultdict -import attr +from pipenv.vendor import attr import six from ..compat import fs_str diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index b855a05da0..a8070c912f 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -7,7 +7,7 @@ from collections import defaultdict from itertools import chain -import attr +from pipenv.vendor import attr import six from cached_property import cached_property from ..compat import Path, fs_str diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 619e77612d..ff249be2b0 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -7,7 +7,7 @@ import sys from collections import defaultdict -import attr +from pipenv.vendor import attr import six from packaging.version import Version diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py index a0e69b034e..39353cdb4e 100644 --- a/pipenv/vendor/pythonfinder/models/windows.py +++ b/pipenv/vendor/pythonfinder/models/windows.py @@ -4,7 +4,7 @@ import operator from collections import defaultdict -import attr +from pipenv.vendor import attr from ..environment import MYPY_RUNNING from ..exceptions import InvalidPythonVersion diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 8150545cb9..ef48e6281b 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -10,7 +10,7 @@ from fnmatch import fnmatch from threading import Timer -import attr +from pipenv.vendor import attr import six from packaging.version import LegacyVersion, Version diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index b8270bb799..8fc573d1a6 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.5.7" +__version__ = "1.5.9" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 78c78ace2b..1a610ce792 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -6,7 +6,7 @@ import functools import os -import attr +from pipenv.vendor import attr import packaging.markers import packaging.version import pip_shims.shims @@ -57,6 +57,7 @@ Command, ) from packaging.requirements import Requirement as PackagingRequirement + from packaging.markers import Marker TRequirement = TypeVar("TRequirement") RequirementType = TypeVar( @@ -71,9 +72,14 @@ WHEEL_DOWNLOAD_DIR = fs_str(os.path.join(CACHE_DIR, "wheels")) DEPENDENCY_CACHE = DependencyCache() -WHEEL_CACHE = pip_shims.shims.WheelCache( - CACHE_DIR, pip_shims.shims.FormatControl(set(), set()) -) + + +@contextlib.contextmanager +def _get_wheel_cache(): + with pip_shims.shims.global_tempdir_manager(): + yield pip_shims.shims.WheelCache( + CACHE_DIR, pip_shims.shims.FormatControl(set(), set()) + ) def _get_filtered_versions(ireq, versions, prereleases): @@ -351,6 +357,7 @@ def get_dependencies(ireq, sources=None, parent=None): def get_dependencies_from_wheel_cache(ireq): + # type: (pip_shims.shims.InstallRequirement) -> Optional[Set[pip_shims.shims.InstallRequirement]] """Retrieves dependencies for the given install requirement from the wheel cache. :param ireq: A single InstallRequirement @@ -361,13 +368,14 @@ def get_dependencies_from_wheel_cache(ireq): if ireq.editable or not is_pinned_requirement(ireq): return - matches = WHEEL_CACHE.get(ireq.link, name_from_req(ireq.req)) - if matches: - matches = set(matches) - if not DEPENDENCY_CACHE.get(ireq): - DEPENDENCY_CACHE[ireq] = [format_requirement(m) for m in matches] - return matches - return + with _get_wheel_cache() as wheel_cache: + matches = wheel_cache.get(ireq.link, name_from_req(ireq.req)) + if matches: + matches = set(matches) + if not DEPENDENCY_CACHE.get(ireq): + DEPENDENCY_CACHE[ireq] = [format_requirement(m) for m in matches] + return matches + return None def _marker_contains_extra(ireq): @@ -477,12 +485,12 @@ def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache """ session, finder = get_finder(sources=sources, pip_options=pip_options) - if not wheel_cache: - wheel_cache = WHEEL_CACHE dep.is_direct = True requirements = None setup_requires = {} - with temp_environ(): + with temp_environ(), ExitStack() as stack: + if not wheel_cache: + wheel_cache = stack.enter_context(_get_wheel_cache()) os.environ["PIP_EXISTS_ACTION"] = "i" if dep.editable and not dep.prepared and not dep.req: setup_info = SetupInfo.from_ireq(dep) @@ -570,10 +578,6 @@ def start_resolver(finder=None, session=None, wheel_cache=None): if not session: session = pip_command._build_session(pip_options) - if not wheel_cache: - wheel_cache = WHEEL_CACHE - _ensure_dir(fs_str(os.path.join(wheel_cache.cache_dir, "wheels"))) - download_dir = PKGS_DOWNLOAD_DIR _ensure_dir(download_dir) @@ -582,6 +586,9 @@ def start_resolver(finder=None, session=None, wheel_cache=None): try: with ExitStack() as ctx: ctx.enter_context(pip_shims.shims.global_tempdir_manager()) + if not wheel_cache: + wheel_cache = ctx.enter_context(_get_wheel_cache()) + _ensure_dir(fs_str(os.path.join(wheel_cache.cache_dir, "wheels"))) preparer = ctx.enter_context( pip_shims.shims.make_preparer( options=pip_options, diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 3eabc5043f..841fc74c22 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -5,7 +5,7 @@ import itertools import os -import attr +from pipenv.vendor import attr import plette.lockfiles import six from vistir.compat import FileNotFoundError, JSONDecodeError, Path diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 84637642a6..b07e444cfc 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -3,7 +3,7 @@ import operator import re -import attr +from pipenv.vendor import attr import distlib.markers import packaging.version import six @@ -25,7 +25,7 @@ STRING_TYPE = Union[str, bytes, Text] -MAX_VERSIONS = {2: 7, 3: 11, 4: 0} +MAX_VERSIONS = {1: 7, 2: 7, 3: 11, 4: 0} DEPRECATED_VERSIONS = ["3.0", "3.1", "3.2", "3.3"] @@ -557,7 +557,7 @@ def _split_specifierset_str(specset_str, prefix="=="): else: values = [v.strip() for v in specset_str.split(",")] if prefix == "!=" and any(v in values for v in DEPRECATED_VERSIONS): - values = DEPRECATED_VERSIONS[:] + values += DEPRECATED_VERSIONS[:] for value in sorted(values): specifiers.add(Specifier("{0}{1}".format(prefix, value))) return specifiers diff --git a/pipenv/vendor/requirementslib/models/metadata.py b/pipenv/vendor/requirementslib/models/metadata.py index 912b1b77b2..671a311b1b 100644 --- a/pipenv/vendor/requirementslib/models/metadata.py +++ b/pipenv/vendor/requirementslib/models/metadata.py @@ -9,7 +9,7 @@ import zipfile from collections import defaultdict -import attr +from pipenv.vendor import attr import dateutil.parser import distlib.metadata import distlib.wheel @@ -443,8 +443,7 @@ class ParsedTag(object): def parse_tag(tag): # type: (Tag) -> ParsedTag - """ - Parse a :class:`~packaging.tags.Tag` instance + """Parse a :class:`~packaging.tags.Tag` instance. :param :class:`~packaging.tags.Tag` tag: A tag to parse :return: A parsed tag with combined markers, supported platform and python version @@ -520,6 +519,8 @@ class ReleaseUrl(object): name = attr.ib(type=str, default=None) #: The available comments of the given upload comment_text = attr.ib(type=str, default="") + #: Whether the url has been yanked from the server + yanked = attr.ib(type=bool, default=False) #: The number of downloads (deprecated) downloads = attr.ib(type=int, default=-1) #: The filename of the current upload @@ -716,8 +717,8 @@ def latest_timestamp(self): def find_package_type(self, type_): # type: (str) -> Optional[ReleaseUrl] - """ - Given a package type (e.g. sdist, bdist_wheel), find the matching release + """Given a package type (e.g. sdist, bdist_wheel), find the matching + release. :param str type_: A package type from :const:`~PACKAGE_TYPES` :return: The package from this collection matching that type, if available @@ -956,8 +957,7 @@ def to_dependency(self): def create_dependencies(self, force=False): # type: (bool) -> "PackageInfo" - """ - Create values for **self.dependencies**. + """Create values for **self.dependencies**. :param bool force: Sets **self.dependencies** to an empty tuple if it would be None, defaults to False. diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 9c0aea4ea3..9bda73d49a 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -7,7 +7,7 @@ import os import sys -import attr +from pipenv.vendor import attr import plette.models.base import plette.pipfiles import tomlkit diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py index 7c1b0e8100..4c73823c23 100644 --- a/pipenv/vendor/requirementslib/models/project.py +++ b/pipenv/vendor/requirementslib/models/project.py @@ -6,7 +6,7 @@ import io import os -import attr +from pipenv.vendor import attr import packaging.markers import packaging.utils import plette diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 0537ca08db..3ce8d8f510 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -10,7 +10,7 @@ from distutils.sysconfig import get_python_lib from functools import partial -import attr +from pipenv.vendor import attr import pip_shims import six import vistir @@ -137,9 +137,6 @@ SPECIFIERS_BY_LENGTH = sorted(list(Specifier._operators.keys()), key=len, reverse=True) -run = partial(vistir.misc.run, combine_stderr=False, return_object=True, nospin=True) - - class Line(object): def __init__(self, line, extras=None): # type: (AnyStr, Optional[Union[List[S], Set[S], Tuple[S, ...]]]) -> None @@ -164,8 +161,7 @@ def __init__(self, line, extras=None): self.parsed_marker = None # type: Optional[Marker] self.preferred_scheme = None # type: Optional[STRING_TYPE] self._requirement = None # type: Optional[PackagingRequirement] - self.is_direct_url = False # type: bool - self._parsed_url = None # type: Optional[urllib_parse.ParseResult] + self._parsed_url = None # type: Optional[URI] self._setup_cfg = None # type: Optional[STRING_TYPE] self._setup_py = None # type: Optional[STRING_TYPE] self._pyproject_toml = None # type: Optional[STRING_TYPE] @@ -301,7 +297,7 @@ def line_with_prefix(self): def line_for_ireq(self): # type: () -> STRING_TYPE line = "" # type: STRING_TYPE - if self.is_file or self.is_url and not self.is_vcs: + if self.is_file or self.is_remote_url and not self.is_vcs: scheme = self.preferred_scheme if self.preferred_scheme is not None else "uri" local_line = next( iter( @@ -340,7 +336,7 @@ def line_for_ireq(self): if self.editable: if not line: if self.is_path or self.is_file: - if not self.path: + if not self.path and self.url is not None: line = pip_shims.shims.url_to_path(self.url) else: line = self.path @@ -437,7 +433,7 @@ def specifiers(self): # note: we need versions for direct dependencies at the very least if ( self.is_file - or self.is_url + or self.is_remote_url or self.is_path or (self.is_vcs and not self.editable) ): @@ -485,7 +481,7 @@ def requirement(self): self.parse_requirement() if self._requirement is None and self._name is not None: self._requirement = init_requirement(canonicalize_name(self.name)) - if self.is_file or self.is_url and self._requirement is not None: + if self.is_file or self.is_remote_url and self._requirement is not None: self._requirement.url = self.url if ( self._requirement @@ -549,8 +545,8 @@ def pyproject_backend(self): def parse_hashes(self): # type: () -> "Line" - """ - Parse hashes from *self.line* and set them on the current object. + """Parse hashes from *self.line* and set them on the current object. + :returns: Self :rtype: `:class:~Line` """ @@ -567,17 +563,22 @@ def parse_extras(self): :rtype: :class:`~Line` """ extras = None - if "@" in self.line or self.is_vcs or self.is_url: - line = "{0}".format(self.line) - uri = URI.parse(line) - name = uri.name - if name: - self._name = name - if uri.host and uri.path and uri.scheme: - self.line = uri.to_string( - escape_password=False, direct=False, strip_ssh=uri.is_implicit_ssh - ) - else: + line = "{0}".format(self.line) + if any([self.is_vcs, self.is_url, "@" in line]): + try: + if self.parsed_url.name: + self._name = self.parsed_url.name + if ( + self.parsed_url.host + and self.parsed_url.path + and self.parsed_url.scheme + ): + self.line = self.parsed_url.to_string( + escape_password=False, + direct=False, + strip_ssh=self.parsed_url.is_implicit_ssh, + ) + except ValueError: self.line, extras = pip_shims.shims._strip_extras(self.line) else: self.line, extras = pip_shims.shims._strip_extras(self.line) @@ -595,37 +596,10 @@ def parse_extras(self): def get_url(self): # type: () -> STRING_TYPE - """Sets ``self.name`` if given a **PEP-508** style URL""" - line = self.line - try: - parsed = URI.parse(line) - line = parsed.to_string(escape_password=False, direct=False, strip_ref=True) - except ValueError: - pass - else: - self._parsed_url = parsed - return line - if self.vcs is not None and self.line.startswith("{0}+".format(self.vcs)): - _, _, _parseable = self.line.partition("+") - parsed = urllib_parse.urlparse(add_ssh_scheme_to_git_uri(_parseable)) - line, _ = split_ref_from_uri(line) - else: - parsed = urllib_parse.urlparse(add_ssh_scheme_to_git_uri(line)) - if "@" in self.line and parsed.scheme == "": - name, _, url = self.line.partition("@") - if self._name is None: - url = url.strip() - self._name = name.strip() - if is_valid_url(url): - self.is_direct_url = True - line = url.strip() - parsed = urllib_parse.urlparse(line) - url_path = parsed.path - if "@" in url_path: - url_path, _, _ = url_path.rpartition("@") - parsed = parsed._replace(path=url_path) - self._parsed_url = parsed - return line + """Sets ``self.name`` if given a **PEP-508** style URL.""" + return self.parsed_url.to_string( + escape_password=False, direct=False, strip_ref=True + ) @property def name(self): @@ -655,20 +629,16 @@ def name(self, name): @property def url(self): # type: () -> Optional[STRING_TYPE] - if self.uri is not None: - url = add_ssh_scheme_to_git_uri(self.uri) - else: - url = getattr(self.link, "url_without_fragment", None) - if url is not None: - url = add_ssh_scheme_to_git_uri(unquote(url)) - if url is not None and self._parsed_url is None: - if self.vcs is not None: - _, _, _parseable = url.partition("+") - self._parsed_url = urllib_parse.urlparse(_parseable) - if self.is_vcs: - # strip the ref from the url - url, _ = split_ref_from_uri(url) - return url + try: + return self.parsed_url.to_string( + escape_password=False, + strip_ref=True, + strip_name=True, + strip_subdir=True, + strip_ssh=False, + ) + except ValueError: + return None @property def link(self): @@ -704,21 +674,36 @@ def is_vcs(self): # type: () -> bool # Installable local files and installable non-vcs urls are handled # as files, generally speaking - if is_vcs(self.line) or is_vcs(self.get_url()): - return True + try: + if is_vcs(self.line) or is_vcs(self.get_url()): + return True + except ValueError: + return False return False @property def is_url(self): # type: () -> bool - url = self.get_url() + try: + url = self.get_url() + except ValueError: + return False if is_valid_url(url) or is_file_url(url): return True return False + @property + def is_remote_url(self): + # type: () -> bool + return self.is_url and self.parsed_url.host is not None + @property def is_path(self): # type: () -> bool + try: + line_url = self.get_url() + except ValueError: + line_url = None if ( self.path and ( @@ -730,7 +715,7 @@ def is_path(self): ): return True elif (os.path.exists(self.line) and is_installable_file(self.line)) or ( - os.path.exists(self.get_url()) and is_installable_file(self.get_url()) + line_url and os.path.exists(line_url) and is_installable_file(line_url) ): return True return False @@ -738,22 +723,32 @@ def is_path(self): @property def is_file_url(self): # type: () -> bool - url = self.get_url() - parsed_url_scheme = self._parsed_url.scheme if self._parsed_url else "" - if url and is_file_url(self.get_url()) or parsed_url_scheme == "file": + try: + url = self.get_url() + except ValueError: + return False + try: + parsed_url_scheme = self.parsed_url.scheme + except ValueError: + return False + if url and is_file_url(url) or parsed_url_scheme == "file": return True return False @property def is_file(self): # type: () -> bool + try: + url = self.get_url() + except ValueError: + return False if ( self.is_path - or (is_file_url(self.get_url()) and is_installable_file(self.get_url())) + or (is_file_url(url) and is_installable_file(url)) or ( self._parsed_url - and self._parsed_url.scheme == "file" - and is_installable_file(urllib_parse.urlunparse(self._parsed_url)) + and self._parsed_url.is_file_url + and is_installable_file(self._parsed_url.url_without_fragment_or_ref) ) ): return True @@ -762,7 +757,13 @@ def is_file(self): @property def is_named(self): # type: () -> bool - return not (self.is_file_url or self.is_url or self.is_file or self.is_vcs) + return not ( + self.is_file_url + or self.is_url + or self.is_file + or self.is_vcs + or self.is_direct_url + ) @property def ref(self): @@ -781,7 +782,11 @@ def ireq(self): @property def is_installable(self): # type: () -> bool - possible_paths = (self.line, self.get_url(), self.path, self.base_path) + try: + url = self.get_url() + except ValueError: + url = None + possible_paths = (self.line, url, self.path, self.base_path) return any(is_installable_file(p) for p in possible_paths if p is not None) @property @@ -794,7 +799,7 @@ def get_setup_info(self): # type: () -> SetupInfo setup_info = None with pip_shims.shims.global_tempdir_manager(): - setup_info = SetupInfo.from_ireq(self.ireq) + setup_info = SetupInfo.from_ireq(self.ireq, subdir=self.subdirectory) if not setup_info.name: setup_info.get_info() return setup_info @@ -850,6 +855,21 @@ def vcsrepo(self): self._vcsrepo = self._get_vcsrepo() return self._vcsrepo + @property + def parsed_url(self): + # type: () -> URI + if self._parsed_url is None: + self._parsed_url = URI.parse(self.line) + return self._parsed_url + + @property + def is_direct_url(self): + # type: () -> bool + try: + return self.is_url and self.parsed_url.is_direct_url + except ValueError: + return self.is_url and bool(DIRECT_URL_RE.match(self.line)) + @cached_property def metadata(self): # type: () -> Dict[Any, Any] @@ -886,8 +906,8 @@ def vcsrepo(self, repo): ireq = self.ireq wheel_kwargs = self.wheel_kwargs.copy() wheel_kwargs["src_dir"] = repo.checkout_directory - ireq.ensure_has_source_dir(wheel_kwargs["src_dir"]) with pip_shims.shims.global_tempdir_manager(), temp_path(): + ireq.ensure_has_source_dir(wheel_kwargs["src_dir"]) sys.path = [repo.checkout_directory, "", ".", get_python_lib(plat_specific=0)] setupinfo = SetupInfo.create( repo.checkout_directory, @@ -907,7 +927,7 @@ def get_ireq(self): ireq = pip_shims.shims.install_req_from_line(line) if self.is_named: ireq = pip_shims.shims.install_req_from_line(self.line) - if self.is_file or self.is_url: + if self.is_file or self.is_remote_url: ireq.link = self.link if self.extras and not ireq.extras: ireq.extras = set(self.extras) @@ -1001,10 +1021,11 @@ def parse_name(self): # type: () -> "Line" if self._name is None: name = None - if self.link is not None: + if self.link is not None and self.line_is_installable: name = self._parse_name_from_link() if name is None and ( - (self.is_url or self.is_artifact or self.is_vcs) and self._parsed_url + (self.is_remote_url or self.is_artifact or self.is_vcs) + and self._parsed_url ): if self._parsed_url.fragment: _, _, name = self._parsed_url.fragment.partition("egg=") @@ -1013,7 +1034,7 @@ def parse_name(self): name, _, _ = name.partition("&") if name is None and self.is_named: name = self._parse_name_from_line() - elif name is None and self.is_file or self.is_url or self.is_path: + elif name is None and self.is_file or self.is_remote_url or self.is_path: if self.is_local: name = self._parse_name_from_path() if name is not None: @@ -1052,10 +1073,10 @@ def _parse_requirement_from_vcs(self): # else: # req.link = self.link if self.ref and self._requirement is not None: + self._requirement.revision = self.ref if self._vcsrepo is not None: - self._requirement.revision = self._vcsrepo.get_commit_hash() - else: - self._requirement.revision = self.ref + with pip_shims.shims.global_tempdir_manager(): + self._requirement.revision = self._vcsrepo.get_commit_hash() return self._requirement def parse_requirement(self): @@ -1107,53 +1128,64 @@ def parse_requirement(self): def parse_link(self): # type: () -> "Line" parsed_url = None # type: Optional[URI] - if not is_valid_url(self.line) and ( - self.line.startswith("./") - or (os.path.exists(self.line) or os.path.isabs(self.line)) + if ( + not is_valid_url(self.line) + and is_installable_file(os.path.abspath(self.line)) + and ( + self.line.startswith("./") + or (os.path.exists(self.line) or os.path.isabs(self.line)) + ) ): url = pip_shims.shims.path_to_url(os.path.abspath(self.line)) - parsed_url = URI.parse(url) - elif is_valid_url(self.line) or is_vcs(self.line) or is_file_url(self.line): - parsed_url = URI.parse(self.line) - if parsed_url is not None: - line = parsed_url.to_string( - escape_password=False, direct=False, strip_ref=True, strip_ssh=False - ) - if parsed_url.is_vcs: - self.vcs, _ = parsed_url.scheme.split("+") - if parsed_url.is_file_url: - self.is_local = True - parsed_link = parsed_url.as_link - self._ref = parsed_url.ref - self.uri = parsed_url.bare_url - if parsed_url.name: - self._name = parsed_url.name - if parsed_url.extras: - self.extras = tuple(sorted(set(parsed_url.extras))) + self._parsed_url = parsed_url = URI.parse(url) + elif any( + [ + is_valid_url(self.line), + is_vcs(self.line), + is_file_url(self.line), + self.is_direct_url, + ] + ): + parsed_url = self.parsed_url + if parsed_url is None or ( + parsed_url.is_file_url and not parsed_url.is_installable + ): + return None + if parsed_url.is_vcs: + self.vcs, _ = parsed_url.scheme.split("+") + if parsed_url.is_file_url: + self.is_local = True + parsed_link = parsed_url.as_link + self._ref = parsed_url.ref + self.uri = parsed_url.bare_url + if parsed_url.name: + self._name = parsed_url.name + if parsed_url.extras: + self.extras = tuple(sorted(set(parsed_url.extras))) + self._link = parsed_link + vcs, prefer, relpath, path, uri, link = FileRequirement.get_link_from_line( + self.line + ) + ref = None + if link is not None and "@" in unquote(link.path) and uri is not None: + uri, _, ref = unquote(uri).rpartition("@") + if relpath is not None and "@" in relpath: + relpath, _, ref = relpath.rpartition("@") + if path is not None and "@" in path: + path, _ = split_ref_from_uri(path) + link_url = link.url_without_fragment + if "@" in link_url: + link_url, _ = split_ref_from_uri(link_url) + self.preferred_scheme = prefer + self.relpath = relpath + self.path = path + # self.uri = uri + if prefer in ("path", "relpath") or uri.startswith("file"): + self.is_local = True + if parsed_url.is_vcs or parsed_url.is_direct_url and parsed_link: self._link = parsed_link - vcs, prefer, relpath, path, uri, link = FileRequirement.get_link_from_line( - self.line - ) - ref = None - if link is not None and "@" in unquote(link.path) and uri is not None: - uri, _, ref = unquote(uri).rpartition("@") - if relpath is not None and "@" in relpath: - relpath, _, ref = relpath.rpartition("@") - if path is not None and "@" in path: - path, _ = split_ref_from_uri(path) - link_url = link.url_without_fragment - if "@" in link_url: - link_url, _ = split_ref_from_uri(link_url) - self.preferred_scheme = prefer - self.relpath = relpath - self.path = path - # self.uri = uri - if prefer in ("path", "relpath") or uri.startswith("file"): - self.is_local = True - if parsed_url.is_vcs or parsed_url.is_direct_url and parsed_link: - self._link = parsed_link - else: - self._link = link + else: + self._link = link return self def parse_markers(self): @@ -1206,26 +1238,50 @@ def requirement_info(self): @property def line_is_installable(self): # type: () -> bool - """ - This is a safeguard against decoy requirements when a user installs a package - whose name coincides with the name of a folder in the cwd, e.g. install *alembic* - when there is a folder called *alembic* in the working directory. - - In this case we first need to check that the given requirement is a valid - URL, VCS requirement, or installable filesystem path before deciding to treat it - as a file requirement over a named requirement. + """This is a safeguard against decoy requirements when a user installs + a package whose name coincides with the name of a folder in the cwd, + e.g. install *alembic* when there is a folder called *alembic* in the + working directory. + + In this case we first need to check that the given requirement + is a valid URL, VCS requirement, or installable filesystem path + before deciding to treat it as a file requirement over a named + requirement. """ line = self.line + direct_url_match = DIRECT_URL_RE.match(line) + if direct_url_match: + match_dict = direct_url_match.groupdict() + auth = "" + username = match_dict.get("username", None) + password = match_dict.get("password", None) + port = match_dict.get("port", None) + path = match_dict.get("path", None) + ref = match_dict.get("ref", None) + if username is not None: + auth = "{0}".format(username) + if password: + auth = "{0}:{1}".format(auth, password) if auth else password + line = match_dict.get("host", "") + if auth: + line = "{auth}@{line}".format(auth=auth, line=line) + if port: + line = "{line}:{port}".format(line=line, port=port) + if path: + line = "{line}{pathsep}{path}".format( + line=line, pathsep=match_dict["pathsep"], path=path + ) + if ref: + line = "{line}@{ref}".format(line=line, ref=ref) + line = "{scheme}{line}".format(scheme=match_dict["scheme"], line=line) if is_file_url(line): link = create_link(line) line = link.url_without_fragment line, _ = split_ref_from_uri(line) if ( is_vcs(line) - or ( - is_valid_url(line) - and (not is_file_url(line) or is_installable_file(line)) - ) + or (not is_file_url(line) and is_valid_url(line)) + or (is_file_url(line) and is_installable_file(line)) or is_installable_file(line) ): return True @@ -1253,6 +1309,8 @@ def parse(self): raise RequirementError( "Supplied requirement is not installable: {0!r}".format(self.line) ) + elif self.is_named and self._name is None: + self.parse_name() self.parse_link() # self.parse_requirement() # self.parse_ireq() @@ -1385,6 +1443,7 @@ class FileRequirement(object): pyproject_backend = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] #: PyProject Path pyproject_path = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + subdirectory = attr.ib(default=None) # type: Optional[STRING_TYPE] #: Setup metadata e.g. dependencies _setup_info = attr.ib(default=None, cmp=True) # type: Optional[SetupInfo] _has_hashed_name = attr.ib(default=False, cmp=True) # type: bool @@ -1551,8 +1610,6 @@ def __attrs_post_init__(self): @property def setup_info(self): # type: () -> Optional[SetupInfo] - from .setup_info import SetupInfo - if self._setup_info is None and self.parsed_line: if self.parsed_line and self._parsed_line and self.parsed_line.setup_info: if ( @@ -1566,7 +1623,9 @@ def setup_info(self): self.parsed_line.ireq and not self.parsed_line.is_wheel ): with pip_shims.shims.global_tempdir_manager(): - self._setup_info = SetupInfo.from_ireq(self.parsed_line.ireq) + self._setup_info = SetupInfo.from_ireq( + self.parsed_line.ireq, subdir=self.subdirectory + ) else: if self.link and not self.link.is_wheel: self._setup_info = Line(self.line_part).setup_info @@ -1889,7 +1948,6 @@ class VCSRequirement(FileRequirement): #: vcs reference name (branch / commit / tag) ref = attr.ib(default=None) # type: Optional[STRING_TYPE] #: Subdirectory to use for installation if applicable - subdirectory = attr.ib(default=None) # type: Optional[STRING_TYPE] _repo = attr.ib(default=None) # type: Optional[VCSRepository] _base_line = attr.ib(default=None) # type: Optional[STRING_TYPE] name = attr.ib() # type: STRING_TYPE @@ -1960,20 +2018,18 @@ def setup_info(self): with pip_shims.shims.global_tempdir_manager(): self._parsed_line._setup_info.get_info() return self._parsed_line.setup_info + subdir = self.subdirectory or self.parsed_line.subdirectory if self._repo: - from .setup_info import SetupInfo - with pip_shims.shims.global_tempdir_manager(): self._setup_info = SetupInfo.from_ireq( - Line(self._repo.checkout_directory).ireq + Line(self._repo.checkout_directory).ireq, subdir=subdir ) self._setup_info.get_info() return self._setup_info ireq = self.parsed_line.ireq - from .setup_info import SetupInfo with pip_shims.shims.global_tempdir_manager(): - self._setup_info = SetupInfo.from_ireq(ireq) + self._setup_info = SetupInfo.from_ireq(ireq, subdir=subdir) return self._setup_info @setup_info.setter @@ -2003,7 +2059,7 @@ def get_requirement(self): ) req = init_requirement(canonicalize_name(self.name)) req.editable = self.editable - if not getattr(req, "url"): + if not getattr(req, "url", None): if url is not None: url = add_ssh_scheme_to_git_uri(url) elif self.uri is not None: @@ -2114,21 +2170,18 @@ def get_vcs_repo(self, src_dir=None, checkout_dir=None): def get_commit_hash(self): # type: () -> STRING_TYPE - hash_ = None - hash_ = self.repo.get_commit_hash() + with pip_shims.shims.global_tempdir_manager(): + hash_ = self.repo.get_commit_hash() return hash_ def update_repo(self, src_dir=None, ref=None): # type: (Optional[STRING_TYPE], Optional[STRING_TYPE]) -> STRING_TYPE if ref: self.ref = ref - else: - if self.ref: - ref = self.ref repo_hash = None - if not self.is_local and ref is not None: - self.repo.checkout_ref(ref) - repo_hash = self.repo.get_commit_hash() + if not self.is_local and self.ref is not None: + self.repo.checkout_ref(self.ref) + repo_hash = self.get_commit_hash() if self.req: self.req.revision = repo_hash return repo_hash @@ -2144,7 +2197,8 @@ def locked_vcs_repo(self, src_dir=None): self.req = self.parsed_line.requirement else: self.req = self.get_requirement() - revision = self.req.revision = vcsrepo.get_commit_hash() + with pip_shims.shims.global_tempdir_manager(): + revision = self.req.revision = vcsrepo.get_commit_hash() # Remove potential ref in the end of uri after ref is parsed if self.link and "@" in self.link.show_url and self.uri and "@" in self.uri: @@ -2237,7 +2291,7 @@ def from_line(cls, line, editable=None, extras=None, parsed_line=None): @property def line_part(self): # type: () -> STRING_TYPE - """requirements.txt compatible line part sans-extras""" + """requirements.txt compatible line part sans-extras.""" base = "" # type: STRING_TYPE if self.is_local: base_link = self.link @@ -2620,7 +2674,8 @@ def from_line(cls, line): None ) # type: Optional[Union[VCSRequirement, FileRequirement, NamedRequirement]] if ( - (parsed_line.is_file and parsed_line.is_installable) or parsed_line.is_url + (parsed_line.is_file and parsed_line.is_installable) + or parsed_line.is_remote_url ) and not parsed_line.is_vcs: r = file_req_from_parsed_line(parsed_line) elif parsed_line.is_vcs: @@ -2956,7 +3011,6 @@ def run_requires(self, sources=None, finder=None): elif self.line_instance and self.line_instance.setup_info is not None: info_dict = self.line_instance.setup_info.as_dict() else: - from .setup_info import SetupInfo if not finder: from .dependencies import get_finder @@ -3095,3 +3149,8 @@ def named_req_from_parsed_line(parsed_line): parsed_line=parsed_line, ) return NamedRequirement.from_line(parsed_line.line) + + +if __name__ == "__main__": + line = Line("vistir@ git+https://github.com/sarugaku/vistir.git@master") + print(line) diff --git a/pipenv/vendor/requirementslib/models/resolvers.py b/pipenv/vendor/requirementslib/models/resolvers.py index 43590523d1..4554b29907 100644 --- a/pipenv/vendor/requirementslib/models/resolvers.py +++ b/pipenv/vendor/requirementslib/models/resolvers.py @@ -1,7 +1,7 @@ # -*- coding=utf-8 -*- from contextlib import contextmanager -import attr +from pipenv.vendor import attr import six from pip_shims.shims import Wheel diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 610eb68b39..9c97a3944e 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -12,7 +12,7 @@ import sys from functools import partial -import attr +from pipenv.vendor import attr import chardet import packaging.specifiers import packaging.utils @@ -23,9 +23,10 @@ from appdirs import user_cache_dir from distlib.wheel import Wheel from packaging.markers import Marker +from pip_shims.utils import call_function_with_correct_args from six.moves import configparser from six.moves.urllib.parse import unquote, urlparse, urlunparse -from vistir.compat import FileNotFoundError, Iterable, Mapping, Path, lru_cache +from vistir.compat import FileNotFoundError, Iterable, Mapping, Path, finalize, lru_cache from vistir.contextmanagers import cd, temp_path from vistir.misc import run from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p, rmtree @@ -695,6 +696,7 @@ def __init__(self): self.assignments = {} self.binOps = [] self.binOps_map = {} + self.recurse = True super(Analyzer, self).__init__() def generic_visit(self, node): @@ -709,6 +711,15 @@ def generic_visit(self, node): self.assignments.update(ast_unparse(node, initial_mapping=True)) super(Analyzer, self).generic_visit(node) + @contextlib.contextmanager + def no_recurse(self): + original_recurse_val = self.recurse + try: + self.recurse = False + yield + finally: + self.recurse = original_recurse_val + def visit_BinOp(self, node): node = ast_unparse(node, initial_mapping=True) self.binOps.append(node) @@ -727,6 +738,202 @@ def match_assignment_name(self, match): iter(k for k in self.assignments if getattr(k, "id", "") == match.id), None ) + def generic_unparse(self, item): + if any(isinstance(item, k) for k in AST_BINOP_MAP.keys()): + return AST_BINOP_MAP[type(item)] + elif any(isinstance(item, k) for k in AST_COMPARATORS.keys()): + return AST_COMPARATORS[type(item)] + return item + + def unparse(self, item): + unparser = getattr( + self, "unparse_{0}".format(item.__class__.__name__), self.generic_unparse + ) + return unparser(item) + + def unparse_Dict(self, item): + # unparsed = dict(zip(unparse(item.keys), unparse(item.values))) + return dict( + (self.unparse(k), self.unparse(v)) for k, v in zip(item.keys, item.values) + ) + + def unparse_List(self, item): + return [self.unparse(el) for el in item.elts] + + def unparse_Tuple(self, item): + return tuple([self.unparse(el) for el in item.elts]) + + def unparse_Str(self, item): + return item.s + + def unparse_Subscript(self, item): + unparsed = self.unparse(item.value) + if isinstance(item.slice, ast.Index): + try: + unparsed = unparsed[self.unparse(item.slice.value)] + except KeyError: + # not everything can be looked up before runtime + unparsed = item + return unparsed + + def unparse_Num(self, item): + return item.n + + def unparse_BinOp(self, item): + if item in self.binOps_map: + unparsed = self.binOps_map[item] + else: + right_item = self.unparse(item.right) + left_item = self.unparse(item.left) + op = getattr(item, "op", None) + op_func = self.unparse(op) if op is not None else op + try: + unparsed = op_func(left_item, right_item) + except Exception: + unparsed = (left_item, op_func, right_item) + return unparsed + + def unparse_Name(self, item): + unparsed = item.id + if not self.recurse: + return unparsed + if item in self.assignments and self.recurse: + items = self.unparse(self.assignments[item]) + unparsed = items.get(item.id, item.id) + else: + assignment = self.match_assignment_name(item) + if assignment is not None: + items = self.unparse(self.assignments[assignment]) + unparsed = items.get(item.id, item.id) + return unparsed + + def unparse_NameConstant(self, item): + return item.value + + def unparse_Constant(self, item): + return item.value + + def unparse_Ellipsis(self, item): + return item.value + + def unparse_Attribute(self, item): + attr_name = getattr(item, "value", None) + attr_attr = getattr(item, "attr", None) + name = None + name = self.unparse(attr_name) if attr_name is not None else attr_attr + if attr_name and not self.recurse: + name = attr_name + elif name and attr_attr: + if isinstance(name, six.string_types): + unparsed = ".".join([item for item in (name, attr_attr) if item]) + else: + unparsed = item + elif attr_attr and not name: + unparsed = attr_attr + else: + unparsed = name if not unparsed else unparsed + return unparsed + + def unparse_Compare(self, item): + if isinstance(item.left, ast.Attribute) or isinstance(item.left, ast.Str): + import importlib + + left = unparse(item.left) + if "." in left: + name, _, val = left.rpartition(".") + left = getattr(importlib.import_module(name), val, left) + comparators = [] + for comparator in item.comparators: + right = self.unparse(comparator) + if isinstance(comparator, ast.Attribute) and "." in right: + name, _, val = right.rpartition(".") + right = getattr(importlib.import_module(name), val, right) + comparators.append(right) + unparsed = (left, self.unparse(item.ops), comparators) + else: + unparsed = item + return unparsed + + def unparse_IfExp(self, item): + ops, truth_vals = [], [] + if isinstance(item.test, ast.Compare): + left, ops, right = self.unparse(item.test) + else: + result = self.unparse(item.test) + if isinstance(result, dict): + k, v = result.popitem() + if not v: + truth_vals = [False] + for i, op in enumerate(ops): + if i == 0: + truth_vals.append(op(left, right[i])) + else: + truth_vals.append(op(right[i - 1], right[i])) + if all(truth_vals): + unparsed = self.unparse(item.body) + else: + unparsed = self.unparse(item.orelse) + return unparsed + + def unparse_Call(self, item): + unparsed = {} + if isinstance(item.func, (ast.Name, ast.Attribute)): + func_name = self.unparse(item.func) + else: + try: + func_name = self.unparse(item.func) + except Exception: + func_name = None + if not func_name: + return {} + if isinstance(func_name, dict): + unparsed.update(func_name) + func_name = next(iter(func_name.keys())) + else: + unparsed[func_name] = {} + for key in ("kwargs", "keywords"): + val = getattr(item, key, []) + if val is None: + continue + for keyword in self.unparse(val): + unparsed[func_name].update(self.unparse(keyword)) + return unparsed + + def unparse_keyword(self, item): + return {self.unparse(item.arg): self.unparse(item.value)} + + def unparse_Assign(self, item): + # XXX: DO NOT UNPARSE THIS + # XXX: If we unparse this it becomes impossible to map it back + # XXX: To the original node in the AST so we can find the + # XXX: Original reference + with self.no_recurse(): + target = self.unparse(next(iter(item.targets))) + val = self.unparse(item.value) + if isinstance(target, (tuple, set, list)): + unparsed = dict(zip(target, val)) + else: + unparsed = {target: val} + return unparsed + + def unparse_Mapping(self, item): + unparsed = {} + for k, v in item.items(): + try: + unparsed[self.unparse(k)] = self.unparse(v) + except TypeError: + unparsed[k] = self.unparse(v) + return unparsed + + def unparse_list(self, item): + return type(item)([self.unparse(el) for el in item]) + + def unparse_tuple(self, item): + return self.unparse_list(item) + + def unparse_str(self, item): + return item + def parse_function_names(self, should_retry=True, function_map=None): if function_map is None: function_map = {} @@ -759,6 +966,17 @@ def parse_functions(self): ) return self.resolved_function_names + def parse_setup_function(self): + setup = {} # type: Dict[Any, Any] + self.unmap_binops() + function_names = self.parse_functions() + if "setup" in function_names: + setup = self.unparse(function_names["setup"]) + keys = list(setup.keys()) + if len(keys) == 1 and keys[0] is None: + _, setup = setup.popitem() + return setup + def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # noqa:C901 # type: (Any, bool, Optional[Analyzer], bool) -> Union[List[Any], Dict[Any, Any], Tuple[Any, ...], STRING_TYPE] @@ -895,15 +1113,21 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no func_name = unparse(item.func) except Exception: func_name = None + if func_name and not isinstance(func_name, dict): + unparsed[func_name] = {} if isinstance(func_name, dict): unparsed.update(func_name) func_name = next(iter(func_name.keys())) - for keyword in getattr(item, "keywords", []): - unparsed[func_name].update(unparse(keyword)) - elif func_name: - unparsed[func_name] = {} - for keyword in getattr(item, "keywords", []): - unparsed[func_name].update(unparse(keyword)) + if func_name: + for key in ("kwargs", "keywords"): + val = getattr(item, key, []) + if val is None: + continue + if isinstance(val, ast.Name): + unparsed[func_name] = val + else: + for keyword in unparse(val): + unparsed[func_name].update(unparse(keyword)) elif isinstance(item, ast.keyword): unparsed = {unparse(item.arg): unparse(item.value)} elif isinstance(item, ast.Assign): @@ -978,6 +1202,9 @@ def ast_parse_setup_py(path): function_names = ast_analyzer.parse_functions() if "setup" in function_names: setup = ast_unparse(function_names["setup"], analyzer=ast_analyzer) + keys = list(setup.keys()) + if len(keys) == 1 and keys[0] is None: + _, setup = setup.popitem() return setup @@ -1111,29 +1338,34 @@ def as_dict(self): return {self.name: tuple([r.requirement for r in self.requirements])} -@attr.s(slots=True, cmp=True, hash=True) +@attr.s(slots=True, eq=True, hash=True) class SetupInfo(object): - name = attr.ib(default=None, cmp=True) # type: STRING_TYPE - base_dir = attr.ib(default=None, cmp=True, hash=False) # type: STRING_TYPE - _version = attr.ib(default=None, cmp=True) # type: STRING_TYPE + name = attr.ib(default=None, eq=True) # type: STRING_TYPE + base_dir = attr.ib(default=None, eq=True, hash=False) # type: STRING_TYPE + _version = attr.ib(default=None, eq=True) # type: STRING_TYPE _requirements = attr.ib( - type=frozenset, factory=frozenset, cmp=True, hash=True + type=frozenset, factory=frozenset, eq=True, hash=True ) # type: Optional[frozenset] - build_requires = attr.ib(default=None, cmp=True) # type: Optional[Tuple] - build_backend = attr.ib(cmp=True) # type: STRING_TYPE - setup_requires = attr.ib(default=None, cmp=True) # type: Optional[Tuple] + build_requires = attr.ib(default=None, eq=True) # type: Optional[Tuple] + build_backend = attr.ib(eq=True) # type: STRING_TYPE + setup_requires = attr.ib(default=None, eq=True) # type: Optional[Tuple] python_requires = attr.ib( - default=None, cmp=True + default=None, eq=True ) # type: Optional[packaging.specifiers.SpecifierSet] - _extras_requirements = attr.ib(default=None, cmp=True) # type: Optional[Tuple] - setup_cfg = attr.ib(type=Path, default=None, cmp=True, hash=False) - setup_py = attr.ib(type=Path, default=None, cmp=True, hash=False) - pyproject = attr.ib(type=Path, default=None, cmp=True, hash=False) + _extras_requirements = attr.ib(default=None, eq=True) # type: Optional[Tuple] + setup_cfg = attr.ib(type=Path, default=None, eq=True, hash=False) + setup_py = attr.ib(type=Path, default=None, eq=True, hash=False) + pyproject = attr.ib(type=Path, default=None, eq=True, hash=False) ireq = attr.ib( - default=None, cmp=True, hash=False + default=None, eq=True, hash=False ) # type: Optional[InstallRequirement] - extra_kwargs = attr.ib(default=attr.Factory(dict), type=dict, cmp=False, hash=False) + extra_kwargs = attr.ib(default=attr.Factory(dict), type=dict, eq=False, hash=False) metadata = attr.ib(default=None) # type: Optional[Tuple[STRING_TYPE]] + stack = attr.ib(default=None, eq=False) # type: Optional[ExitStack] + _finalizer = attr.ib(default=None, eq=False) # type: Any + + def __attrs_post_init__(self): + self._finalizer = finalize(self, self.stack.close) @build_backend.default def get_build_backend(self): @@ -1399,8 +1631,8 @@ def reload(self): # type: () -> Dict[S, Any] """Wipe existing distribution info metadata for rebuilding. - Erases metadata from **self.egg_base** and unsets **self.requirements** - and **self.extras**. + Erases metadata from **self.egg_base** and unsets + **self.requirements** and **self.extras**. """ for metadata_dir in os.listdir(self.egg_base): shutil.rmtree(metadata_dir, ignore_errors=True) @@ -1422,7 +1654,8 @@ def get_metadata_from_wheel(self, wheel_path): def get_egg_metadata(self, metadata_dir=None, metadata_type=None): # type: (Optional[AnyStr], Optional[AnyStr]) -> Dict[Any, Any] - """Given a metadata directory, return the corresponding metadata dictionary. + """Given a metadata directory, return the corresponding metadata + dictionary. :param Optional[str] metadata_dir: Root metadata path, default: `os.getcwd()` :param Optional[str] metadata_type: Type of metadata to search for, default None @@ -1586,10 +1819,13 @@ def from_ireq(cls, ireq, subdir=None, finder=None, session=None): return None if ireq.link.is_wheel: return None - if not finder: - from .dependencies import get_finder - - session, finder = get_finder() + stack = ExitStack() + if not session: + cmd = pip_shims.shims.InstallCommand() + options, _ = cmd.parser.parse_args([]) + session = cmd._build_session(options) + finder = cmd._build_package_finder(options, session) + tempdir_manager = stack.enter_context(pip_shims.shims.global_tempdir_manager()) vcs, uri = split_vcs_method_from_uri(unquote(ireq.link.url_without_fragment)) parsed = urlparse(uri) if "file" in parsed.scheme: @@ -1599,7 +1835,9 @@ def from_ireq(cls, ireq, subdir=None, finder=None, session=None): parsed = parsed._replace(path=url_path) uri = urlunparse(parsed) path = None + is_file = False if ireq.link.scheme == "file" or uri.startswith("file://"): + is_file = True if "file:/" in uri and "file:///" not in uri: uri = uri.replace("file:/", "file:///") path = pip_shims.shims.url_to_path(uri) @@ -1608,7 +1846,11 @@ def from_ireq(cls, ireq, subdir=None, finder=None, session=None): ireq.link, "is_vcs", getattr(ireq.link, "is_artifact", False) ) is_vcs = True if vcs else is_artifact_or_vcs - if not (ireq.editable and pip_shims.shims.is_file_url(ireq.link) and is_vcs): + if is_file and not is_vcs and path is not None and os.path.isdir(path): + target = os.path.join(kwargs["src_dir"], os.path.basename(path)) + shutil.copytree(path, target) + ireq.source_dir = target + if not (ireq.editable and is_file and is_vcs): if ireq.is_wheel: only_download = True download_dir = kwargs["wheel_download_dir"] @@ -1624,27 +1866,33 @@ def from_ireq(cls, ireq, subdir=None, finder=None, session=None): build_location_func = getattr(ireq, "build_location", None) if build_location_func is None: build_location_func = getattr(ireq, "ensure_build_location", None) - build_location_func(kwargs["build_dir"]) - ireq.ensure_has_source_dir(kwargs["src_dir"]) - src_dir = ireq.source_dir - with pip_shims.shims.global_tempdir_manager(): + if not ireq.source_dir: + build_kwargs = {"build_dir": kwargs["build_dir"], "autodelete": False} + call_function_with_correct_args(build_location_func, **build_kwargs) + ireq.ensure_has_source_dir(kwargs["src_dir"]) + src_dir = ireq.source_dir pip_shims.shims.shim_unpack( - link=ireq.link, - location=kwargs["src_dir"], download_dir=download_dir, + ireq=ireq, only_download=only_download, session=session, hashes=ireq.hashes(False), - progress_bar="off", ) created = cls.create( - kwargs["src_dir"], subdirectory=subdir, ireq=ireq, kwargs=kwargs + ireq.source_dir, subdirectory=subdir, ireq=ireq, kwargs=kwargs, stack=stack ) return created @classmethod - def create(cls, base_dir, subdirectory=None, ireq=None, kwargs=None): - # type: (AnyStr, Optional[AnyStr], Optional[InstallRequirement], Optional[Dict[AnyStr, AnyStr]]) -> Optional[SetupInfo] + def create( + cls, + base_dir, # type: str + subdirectory=None, # type: Optional[str] + ireq=None, # type: Optional[InstallRequirement] + kwargs=None, # type: Optional[Dict[str, str]] + stack=None, # type: Optional[ExitStack] + ): + # type: (...) -> Optional[SetupInfo] if not base_dir or base_dir is None: return None @@ -1661,6 +1909,9 @@ def create(cls, base_dir, subdirectory=None, ireq=None, kwargs=None): creation_kwargs["pyproject"] = pyproject creation_kwargs["setup_py"] = setup_py creation_kwargs["setup_cfg"] = setup_cfg + if stack is None: + stack = ExitStack() + creation_kwargs["stack"] = stack if ireq: creation_kwargs["ireq"] = ireq created = cls(**creation_kwargs) diff --git a/pipenv/vendor/requirementslib/models/url.py b/pipenv/vendor/requirementslib/models/url.py index 200eba698a..b0c98de8e5 100644 --- a/pipenv/vendor/requirementslib/models/url.py +++ b/pipenv/vendor/requirementslib/models/url.py @@ -1,7 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function -import attr +from pipenv.vendor import attr import pip_shims.shims from orderedmultidict import omdict from six.moves.urllib.parse import quote_plus, unquote_plus @@ -10,6 +10,7 @@ from urllib3.util.url import Url from ..environment import MYPY_RUNNING +from ..utils import is_installable_file from .utils import extras_to_string, parse_extras if MYPY_RUNNING: @@ -24,8 +25,7 @@ def _get_parsed_url(url): # type: (S) -> Url - """ - This is a stand-in function for `urllib3.util.parse_url` + """This is a stand-in function for `urllib3.util.parse_url` The orignal function doesn't handle special characters very well, this simply splits out the authentication section, creates the parsed url, then puts the authentication @@ -49,8 +49,7 @@ def _get_parsed_url(url): def remove_password_from_url(url): # type: (S) -> S - """ - Given a url, remove the password and insert 4 dashes + """Given a url, remove the password and insert 4 dashes. :param url: The url to replace the authentication in :type url: S @@ -108,12 +107,18 @@ def _parse_query(self): query_dict = omdict() queries = query.split("&") query_items = [] + subdirectory = self.subdirectory if self.subdirectory else None for q in queries: key, _, val = q.partition("=") val = unquote_plus(val.replace("+", " ")) - query_items.append((key, val)) + if key == "subdirectory" and not subdirectory: + subdirectory = val + else: + query_items.append((key, val)) query_dict.load(query_items) - return attr.evolve(self, query_dict=query_dict, query=query) + return attr.evolve( + self, query_dict=query_dict, subdirectory=subdirectory, query=query + ) def _parse_fragment(self): # type: () -> URI @@ -187,7 +192,10 @@ def parse_subdirectory(url_part): subdir = None if "&subdirectory" in url_part: url_part, _, subdir = url_part.rpartition("&") - subdir = "&{0}".format(subdir.strip()) + if "#egg=" not in url_part: + subdir = "#{0}".format(subdir.strip()) + else: + subdir = "&{0}".format(subdir.strip()) return url_part.strip(), subdir @classmethod @@ -255,8 +263,8 @@ def to_string( strip_subdir=False, # type: bool ): # type: (...) -> str - """ - Converts the current URI to a string, unquoting or escaping the password as needed + """Converts the current URI to a string, unquoting or escaping the + password as needed. :param escape_password: Whether to replace password with ``----``, default True :param escape_password: bool, optional @@ -295,9 +303,11 @@ def to_string( query = "" if self.query: query = "{query}?{self.query}".format(query=query, self=self) + subdir_prefix = "#" if not direct: if self.name and not strip_name: fragment = "#egg={self.name_with_extras}".format(self=self) + subdir_prefix = "&" elif not strip_name and ( self.extras and self.scheme and self.scheme.startswith("file") ): @@ -308,8 +318,8 @@ def to_string( fragment = "" query = "{query}{fragment}".format(query=query, fragment=fragment) if self.subdirectory and not strip_subdir: - query = "{query}&subdirectory={self.subdirectory}".format( - query=query, self=self + query = "{query}{subdir_prefix}subdirectory={self.subdirectory}".format( + query=query, subdir_prefix=subdir_prefix, self=self ) host_port_path = self.get_host_port_path(strip_ref=strip_ref) url = "{self.scheme}://{auth}{host_port_path}{query}".format( @@ -441,6 +451,11 @@ def uri_escape(self): # type: () -> str return self.to_string(escape_password=False, unquote=False) + @property + def is_installable(self): + # type: () -> bool + return self.is_file_url and is_installable_file(self.bare_url) + @property def is_vcs(self): # type: () -> bool @@ -477,7 +492,6 @@ def update_url_name_and_fragment(name_with_extras, ref, parsed_dict): if fragment_extras: parsed_extras = parsed_extras + tuple(parse_extras(fragment_extras)) name_with_extras = "{0}{1}".format(name, extras_to_string(parsed_extras)) - parsed_dict["fragment"] = "egg={0}".format(name_with_extras) elif ( parsed_dict.get("path") is not None and "&subdirectory" in parsed_dict["path"] ): diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py index 2cd62249fc..273305db0b 100644 --- a/pipenv/vendor/requirementslib/models/vcs.py +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -5,7 +5,7 @@ import os import sys -import attr +from pipenv.vendor import attr import pip_shims import six @@ -106,7 +106,8 @@ def update(self, ref): def get_commit_hash(self, ref=None): # type: (Optional[str]) -> str - return self.repo_backend.get_revision(self.checkout_directory) + with pip_shims.shims.global_tempdir_manager(): + return self.repo_backend.get_revision(self.checkout_directory) @classmethod def monkeypatch_pip(cls): diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index d76f82e974..b9db5d1686 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -121,7 +121,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): # type: (S) -> S - """Cleans VCS uris from pipenv.patched.notpip format""" + """Cleans VCS uris from pip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: diff --git a/pipenv/vendor/toml/LICENSE b/pipenv/vendor/toml/LICENSE.txt similarity index 94% rename from pipenv/vendor/toml/LICENSE rename to pipenv/vendor/toml/LICENSE.txt index 08e981ffac..5010e3075e 100644 --- a/pipenv/vendor/toml/LICENSE +++ b/pipenv/vendor/toml/LICENSE.txt @@ -1,11 +1,12 @@ The MIT License -Copyright 2013-2018 William Pearson +Copyright 2013-2019 William Pearson Copyright 2015-2016 Julien Enselme Copyright 2016 Google Inc. Copyright 2017 Samuel Vasko Copyright 2017 Nate Prewitt Copyright 2017 Jack Evans +Copyright 2019 Filippo Broggini Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/pipenv/vendor/toml/__init__.py b/pipenv/vendor/toml/__init__.py index e436b271b0..7e13a0c36f 100644 --- a/pipenv/vendor/toml/__init__.py +++ b/pipenv/vendor/toml/__init__.py @@ -6,16 +6,20 @@ from toml import encoder from toml import decoder -__version__ = "0.10.0" +__version__ = "0.10.1" _spec_ = "0.5.0" load = decoder.load loads = decoder.loads TomlDecoder = decoder.TomlDecoder TomlDecodeError = decoder.TomlDecodeError +TomlPreserveCommentDecoder = decoder.TomlPreserveCommentDecoder dump = encoder.dump dumps = encoder.dumps TomlEncoder = encoder.TomlEncoder TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder +TomlNumpyEncoder = encoder.TomlNumpyEncoder +TomlPreserveCommentEncoder = encoder.TomlPreserveCommentEncoder +TomlPathlibEncoder = encoder.TomlPathlibEncoder diff --git a/pipenv/vendor/toml/common.py b/pipenv/vendor/toml/common.py new file mode 100644 index 0000000000..a5d673dac5 --- /dev/null +++ b/pipenv/vendor/toml/common.py @@ -0,0 +1,6 @@ +# content after the \ +escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) diff --git a/pipenv/vendor/toml/decoder.py b/pipenv/vendor/toml/decoder.py index 4d468dd488..3ec5b43afe 100644 --- a/pipenv/vendor/toml/decoder.py +++ b/pipenv/vendor/toml/decoder.py @@ -24,7 +24,7 @@ def _detect_pathlib_path(p): def _ispath(p): - if isinstance(p, basestring): + if isinstance(p, (bytes, basestring)): return True return _detect_pathlib_path(p) @@ -44,7 +44,7 @@ def _getpath(p): FNFError = IOError -TIME_RE = re.compile("([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") +TIME_RE = re.compile(r"([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") class TomlDecodeError(ValueError): @@ -66,6 +66,27 @@ def __init__(self, msg, doc, pos): _number_with_underscores = re.compile('([0-9])(_([0-9]))*') +class CommentValue(object): + def __init__(self, val, comment, beginline, _dict): + self.val = val + separator = "\n" if beginline else " " + self.comment = separator + comment + self._dict = _dict + + def __getitem__(self, key): + return self.val[key] + + def __setitem__(self, key, value): + self.val[key] = value + + def dump(self, dump_value_func): + retstr = dump_value_func(self.val) + if isinstance(self.val, self._dict): + return self.comment + "\n" + unicode(retstr) + else: + return unicode(retstr) + self.comment + + def _strictly_valid_num(n): n = n.strip() if not n: @@ -96,6 +117,7 @@ def load(f, _dict=dict, decoder=None): f: Path to the file to open, array of files to read into single dict or a file descriptor _dict: (optional) Specifies the class of the returned toml dictionary + decoder: The decoder to use Returns: Parsed toml file represented as a dictionary @@ -120,9 +142,9 @@ def load(f, _dict=dict, decoder=None): "existing file.") raise FNFError(error_msg) if decoder is None: - decoder = TomlDecoder() + decoder = TomlDecoder(_dict) d = decoder.get_empty_table() - for l in f: + for l in f: # noqa: E741 if op.exists(l): d.update(load(l, _dict, decoder)) else: @@ -177,19 +199,30 @@ def loads(s, _dict=dict, decoder=None): keygroup = False dottedkey = False keyname = 0 + key = '' + prev_key = '' + line_no = 1 + for i, item in enumerate(sl): if item == '\r' and sl[i + 1] == '\n': sl[i] = ' ' continue if keyname: + key += item if item == '\n': raise TomlDecodeError("Key name found without value." " Reached end of line.", original, i) if openstring: if item == openstrchar: - keyname = 2 - openstring = False - openstrchar = "" + oddbackslash = False + k = 1 + while i >= k and sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + if not oddbackslash: + keyname = 2 + openstring = False + openstrchar = "" continue elif keyname == 1: if item.isspace(): @@ -220,6 +253,8 @@ def loads(s, _dict=dict, decoder=None): continue if item == '=': keyname = 0 + prev_key = key[:-1].rstrip() + key = '' dottedkey = False else: raise TomlDecodeError("Found invalid character in key name: '" + @@ -272,12 +307,16 @@ def loads(s, _dict=dict, decoder=None): if item == '#' and (not openstring and not keygroup and not arrayoftables): j = i + comment = "" try: while sl[j] != '\n': + comment += s[j] sl[j] = ' ' j += 1 except IndexError: break + if not openarr: + decoder.preserve_comment(line_no, prev_key, comment, beginline) if item == '[' and (not openstring and not keygroup and not arrayoftables): if beginline: @@ -308,12 +347,20 @@ def loads(s, _dict=dict, decoder=None): sl[i] = ' ' else: beginline = True + line_no += 1 elif beginline and sl[i] != ' ' and sl[i] != '\t': beginline = False if not keygroup and not arrayoftables: if sl[i] == '=': raise TomlDecodeError("Found empty keyname. ", original, i) keyname = 1 + key += item + if keyname: + raise TomlDecodeError("Key name found without value." + " Reached end of file.", original, len(s)) + if openstring: # reached EOF and have an unterminated string + raise TomlDecodeError("Unterminated string found." + " Reached end of file.", original, len(s)) s = ''.join(sl) s = s.split('\n') multikey = None @@ -323,6 +370,9 @@ def loads(s, _dict=dict, decoder=None): for idx, line in enumerate(s): if idx > 0: pos += len(s[idx - 1]) + 1 + + decoder.embed_comments(idx, currentlevel) + if not multilinestr or multibackslash or '\n' not in multilinestr: line = line.strip() if line == "" and (not multikey or multibackslash): @@ -333,9 +383,14 @@ def loads(s, _dict=dict, decoder=None): else: multilinestr += line multibackslash = False - if len(line) > 2 and (line[-1] == multilinestr[0] and - line[-2] == multilinestr[0] and - line[-3] == multilinestr[0]): + closed = False + if multilinestr[0] == '[': + closed = line[-1] == ']' + elif len(line) > 2: + closed = (line[-1] == multilinestr[0] and + line[-2] == multilinestr[0] and + line[-3] == multilinestr[0]) + if closed: try: value, vtype = decoder.load_value(multilinestr) except ValueError as err: @@ -663,7 +718,8 @@ def load_line(self, line, currentlevel, multikey, multibackslash): while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and pair[-1][0] != "'" and pair[-1][0] != '"' and pair[-1][0] != '[' and pair[-1][0] != '{' and - pair[-1] != 'true' and pair[-1] != 'false'): + pair[-1].strip() != 'true' and + pair[-1].strip() != 'false'): try: float(pair[-1]) break @@ -671,6 +727,8 @@ def load_line(self, line, currentlevel, multikey, multibackslash): pass if _load_date(pair[-1]) is not None: break + if TIME_RE.match(pair[-1]): + break i += 1 prev_val = pair[-1] pair = line.split('=', i) @@ -704,16 +762,10 @@ def load_line(self, line, currentlevel, multikey, multibackslash): pair[0] = levels[-1].strip() elif (pair[0][0] == '"' or pair[0][0] == "'") and \ (pair[0][-1] == pair[0][0]): - pair[0] = pair[0][1:-1] - if len(pair[1]) > 2 and ((pair[1][0] == '"' or pair[1][0] == "'") and - pair[1][1] == pair[1][0] and - pair[1][2] == pair[1][0] and - not (len(pair[1]) > 5 and - pair[1][-1] == pair[1][0] and - pair[1][-2] == pair[1][0] and - pair[1][-3] == pair[1][0])): - k = len(pair[1]) - 1 - while k > -1 and pair[1][k] == '\\': + pair[0] = _unescape(pair[0][1:-1]) + k, koffset = self._load_line_multiline_str(pair[1]) + if k > -1: + while k > -1 and pair[1][k + koffset] == '\\': multibackslash = not multibackslash k -= 1 if multibackslash: @@ -734,6 +786,26 @@ def load_line(self, line, currentlevel, multikey, multibackslash): else: currentlevel[pair[0]] = value + def _load_line_multiline_str(self, p): + poffset = 0 + if len(p) < 3: + return -1, poffset + if p[0] == '[' and (p.strip()[-1] != ']' and + self._load_array_isstrarray(p)): + newp = p[1:].strip().split(',') + while len(newp) > 1 and newp[-1][0] != '"' and newp[-1][0] != "'": + newp = newp[:-2] + [newp[-2] + ',' + newp[-1]] + newp = newp[-1] + poffset = len(p) - len(newp) + p = newp + if p[0] != '"' and p[0] != "'": + return -1, poffset + if p[1] != p[0] or p[2] != p[0]: + return -1, poffset + if len(p) > 5 and p[-1] == p[0] and p[-2] == p[0] and p[-3] == p[0]: + return -1, poffset + return len(p) - 1, poffset + def load_value(self, v, strictly_valid=True): if not v: raise ValueError("Empty value is invalid") @@ -769,7 +841,8 @@ def load_value(self, v, strictly_valid=True): pass if not oddbackslash: if closed: - raise ValueError("Stuff after closed string. WTF?") + raise ValueError("Found tokens after a closed " + + "string. Invalid TOML.") else: if not triplequote or triplequotecount > 1: closed = True @@ -857,15 +930,18 @@ def bounded_string(self, s): break return not backslash + def _load_array_isstrarray(self, a): + a = a[1:-1].strip() + if a != '' and (a[0] == '"' or a[0] == "'"): + return True + return False + def load_array(self, a): atype = None retval = [] a = a.strip() if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip(): - strarray = False - tmpa = a[1:-1].strip() - if tmpa != '' and (tmpa[0] == '"' or tmpa[0] == "'"): - strarray = True + strarray = self._load_array_isstrarray(a) if not a[1:-1].strip().startswith('{'): a = a[1:-1].split(',') else: @@ -874,6 +950,7 @@ def load_array(self, a): new_a = [] start_group_index = 1 end_group_index = 2 + open_bracket_count = 1 if a[start_group_index] == '{' else 0 in_str = False while end_group_index < len(a[1:]): if a[end_group_index] == '"' or a[end_group_index] == "'": @@ -884,9 +961,15 @@ def load_array(self, a): in_str = not in_str backslash_index -= 1 in_str = not in_str + if not in_str and a[end_group_index] == '{': + open_bracket_count += 1 if in_str or a[end_group_index] != '}': end_group_index += 1 continue + elif a[end_group_index] == '}' and open_bracket_count > 1: + open_bracket_count -= 1 + end_group_index += 1 + continue # Increase end_group_index by 1 to get the closing bracket end_group_index += 1 @@ -943,3 +1026,27 @@ def load_array(self, a): atype = ntype retval.append(nval) return retval + + def preserve_comment(self, line_no, key, comment, beginline): + pass + + def embed_comments(self, idx, currentlevel): + pass + + +class TomlPreserveCommentDecoder(TomlDecoder): + + def __init__(self, _dict=dict): + self.saved_comments = {} + super(TomlPreserveCommentDecoder, self).__init__(_dict) + + def preserve_comment(self, line_no, key, comment, beginline): + self.saved_comments[line_no] = (key, comment, beginline) + + def embed_comments(self, idx, currentlevel): + if idx not in self.saved_comments: + return + + key, comment, beginline = self.saved_comments[idx] + currentlevel[key] = CommentValue(currentlevel[key], comment, beginline, + self._dict) diff --git a/pipenv/vendor/toml/encoder.py b/pipenv/vendor/toml/encoder.py index 79bfd37b3d..d9e557ed95 100644 --- a/pipenv/vendor/toml/encoder.py +++ b/pipenv/vendor/toml/encoder.py @@ -1,6 +1,7 @@ import datetime import re import sys +from decimal import Decimal from toml.decoder import InlineTableDict @@ -8,12 +9,13 @@ unicode = str -def dump(o, f): +def dump(o, f, encoder=None): """Writes out dict as toml to a file Args: o: Object to dump into toml f: File descriptor where the toml should be stored + encoder: The ``TomlEncoder`` to use for constructing the output string Returns: String containing the toml corresponding to dictionary @@ -24,7 +26,7 @@ def dump(o, f): if not f.write: raise TypeError("You can only dump an object to a file descriptor") - d = dumps(o) + d = dumps(o, encoder=encoder) f.write(d) return d @@ -34,11 +36,22 @@ def dumps(o, encoder=None): Args: o: Object to dump into toml - - preserve: Boolean parameter. If true, preserve inline tables. + encoder: The ``TomlEncoder`` to use for constructing the output string Returns: String containing the toml corresponding to dict + + Examples: + ```python + >>> import toml + >>> output = { + ... 'a': "I'm a string", + ... 'b': ["I'm", "a", "list"], + ... 'c': 2400 + ... } + >>> toml.dumps(output) + 'a = "I\'m a string"\nb = [ "I\'m", "a", "list",]\nc = 2400\n' + ``` """ retval = "" @@ -46,7 +59,13 @@ def dumps(o, encoder=None): encoder = TomlEncoder(o.__class__) addtoretval, sections = encoder.dump_sections(o, "") retval += addtoretval + outer_objs = [id(o)] while sections: + section_ids = [id(section) for section in sections] + for outer_obj in outer_objs: + if outer_obj in section_ids: + raise ValueError("Circular reference detected") + outer_objs += section_ids newsections = encoder.get_empty_table() for section in sections: addtoretval, addtosections = encoder.dump_sections( @@ -96,7 +115,7 @@ def _dump_str(v): def _dump_float(v): - return "{0:.16}".format(v).replace("e+0", "e+").replace("e-0", "e-") + return "{}".format(v).replace("e+0", "e+").replace("e-0", "e-") def _dump_time(v): @@ -119,6 +138,7 @@ def __init__(self, _dict=dict, preserve=False): bool: lambda v: unicode(v).lower(), int: lambda v: v, float: _dump_float, + Decimal: _dump_float, datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), datetime.time: _dump_time, datetime.date: lambda v: v.isoformat() @@ -169,10 +189,7 @@ def dump_sections(self, o, sup): section = unicode(section) qsection = section if not re.match(r'^[A-Za-z0-9_-]+$', section): - if '"' in section: - qsection = "'" + section + "'" - else: - qsection = '"' + section + '"' + qsection = _dump_str(section) if not isinstance(o[section], dict): arrayoftables = False if isinstance(o[section], list): @@ -248,3 +265,40 @@ def dump_list(self, v): t = s retval += "]" return retval + + +class TomlNumpyEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + import numpy as np + super(TomlNumpyEncoder, self).__init__(_dict, preserve) + self.dump_funcs[np.float16] = _dump_float + self.dump_funcs[np.float32] = _dump_float + self.dump_funcs[np.float64] = _dump_float + self.dump_funcs[np.int16] = self._dump_int + self.dump_funcs[np.int32] = self._dump_int + self.dump_funcs[np.int64] = self._dump_int + + def _dump_int(self, v): + return "{}".format(int(v)) + + +class TomlPreserveCommentEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + from toml.decoder import CommentValue + super(TomlPreserveCommentEncoder, self).__init__(_dict, preserve) + self.dump_funcs[CommentValue] = lambda v: v.dump(self.dump_value) + + +class TomlPathlibEncoder(TomlEncoder): + + def _dump_pathlib_path(self, v): + return _dump_str(str(v)) + + def dump_value(self, v): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(v, pathlib.PurePath): + v = str(v) + return super(TomlPathlibEncoder, self).dump_value(v) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 457c127cbf..3e08152026 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -1,7 +1,7 @@ -appdirs==1.4.3 +appdirs==1.4.4 backports.shutil_get_terminal_size==1.0.0 backports.weakref==1.0.post1 -click==7.1.1 +click==7.1.2 click-completion==0.5.2 click-didyoumean==0.0.3 colorama==0.4.3 @@ -11,7 +11,7 @@ delegator.py==0.1.1 python-dotenv==0.10.3 first==2.0.1 iso8601==0.1.12 -jinja2==2.11.1 +jinja2==2.11.2 markupsafe==1.1.1 parse==1.15.0 pathlib2==2.3.5 @@ -26,7 +26,7 @@ requests==2.23.0 idna==2.9 urllib3==1.25.9 certifi==2020.4.5.1 -requirementslib==1.5.7 +requirementslib==1.5.9 attrs==19.3.0 distlib==0.3.0 packaging==20.3 @@ -36,23 +36,24 @@ requirementslib==1.5.7 shellingham==1.3.2 six==1.14.0 semver==2.9.0 -toml==0.10.0 +toml==0.10.1 cached-property==1.5.1 -vistir==0.5.0 +vistir==0.5.2 pip-shims==0.5.2 contextlib2==0.6.0.post1 funcsigs==1.0.2 -enum34==1.1.6 +enum34==1.1.10 # yaspin==0.15.0 yaspin==0.14.3 cerberus==1.3.2 resolvelib==0.3.0 -backports.functools_lru_cache==1.5 +backports.functools_lru_cache==1.6.1 pep517==0.8.2 zipp==0.6.0 importlib_metadata==1.6.0 - importlib-resources==1.4.0 + importlib-resources==1.5.0 more-itertools==5.0.0 git+https://github.com/sarugaku/passa.git@master#egg=passa orderedmultidict==1.0.1 dparse==0.5.0 +python-dateutil==2.8.1 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index fe78c8d5bb..1227629a2f 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -36,7 +36,7 @@ from .path import create_tracked_tempdir, create_tracked_tempfile, mkdir_p, rmtree from .spin import create_spinner -__version__ = "0.5.0" +__version__ = "0.5.2" __all__ = [ diff --git a/pipenv/vendor/vistir/_winconsole.py b/pipenv/vendor/vistir/_winconsole.py index a8be4772ae..24faec598d 100644 --- a/pipenv/vendor/vistir/_winconsole.py +++ b/pipenv/vendor/vistir/_winconsole.py @@ -60,7 +60,7 @@ py_object, windll, ) -from ctypes.wintypes import LPCWSTR, LPWSTR +from ctypes.wintypes import HANDLE, LPCWSTR, LPWSTR from itertools import count import msvcrt @@ -83,19 +83,18 @@ c_ssize_p = POINTER(c_ssize_t) - +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) GetConsoleCursorInfo = kernel32.GetConsoleCursorInfo +GetStdHandle = kernel32.GetStdHandle +LocalFree = WINFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)(("LocalFree", windll.kernel32)) +ReadConsoleW = kernel32.ReadConsoleW SetConsoleCursorInfo = kernel32.SetConsoleCursorInfo -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) - +WriteConsoleW = kernel32.WriteConsoleW # XXX: Added for cursor hiding on windows STDOUT_HANDLE_ID = ctypes.c_ulong(-11) @@ -354,7 +353,11 @@ def _hash_py_argv(): def _get_windows_argv(): argc = c_int(0) argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) - argv = [argv_unicode[i] for i in range(0, argc.value)] + try: + argv = [argv_unicode[i] for i in range(0, argc.value)] + finally: + LocalFree(argv_unicode) + del argv_unicode if not hasattr(sys, "frozen"): argv = argv[1:] diff --git a/pipenv/vendor/vistir/backports/surrogateescape.py b/pipenv/vendor/vistir/backports/surrogateescape.py index 0532be08bf..c506f7b3f6 100644 --- a/pipenv/vendor/vistir/backports/surrogateescape.py +++ b/pipenv/vendor/vistir/backports/surrogateescape.py @@ -35,7 +35,7 @@ def b(data): _unichr = chr bytes_chr = lambda code: bytes((code,)) else: - _unichr = unichr + _unichr = unichr # type: ignore bytes_chr = chr diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index ee96f761d0..a21df8f3c0 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -29,11 +29,23 @@ "TemporaryDirectory", "NamedTemporaryFile", "to_native_string", - "Iterable", + "samefile", "Mapping", - "Sequence", - "Set", + "Hashable", + "MutableMapping", + "Container", + "Iterator", + "KeysView", "ItemsView", + "MappingView", + "Iterable", + "Set", + "Sequence", + "Sized", + "ValuesView", + "MutableSet", + "MutableSequence", + "Callable", "fs_encode", "fs_decode", "_fs_encode_errors", @@ -45,23 +57,79 @@ else: # pragma: no cover from pipenv.vendor.pathlib2 import Path -if six.PY3: # pragma: no cover +if sys.version_info >= (3, 4): # pragma: no cover # Only Python 3.4+ is supported from functools import lru_cache, partialmethod from tempfile import NamedTemporaryFile from shutil import get_terminal_size from weakref import finalize + from collections.abc import ( + Mapping, + Hashable, + MutableMapping, + Container, + Iterator, + KeysView, + ItemsView, + MappingView, + Iterable, + Set, + Sequence, + Sized, + ValuesView, + MutableSet, + MutableSequence, + Callable, + ) + from os.path import samefile + else: # pragma: no cover # Only Python 2.7 is supported from pipenv.vendor.backports.functools_lru_cache import lru_cache - from .backports.functools import partialmethod # type: ignore from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + from .backports.functools import partialmethod # type: ignore from .backports.surrogateescape import register_surrogateescape + from collections import ( + Mapping, + Hashable, + MutableMapping, + Container, + Iterator, + KeysView, + ItemsView, + MappingView, + Iterable, + Set, + Sequence, + Sized, + ValuesView, + MutableSet, + MutableSequence, + Callable, + ) register_surrogateescape() NamedTemporaryFile = _NamedTemporaryFile from pipenv.vendor.backports.weakref import finalize # type: ignore + try: + from os.path import samefile + except ImportError: + + def samestat(s1, s2): + """Test whether two stat buffers reference the same file.""" + return s1.st_ino == s2.st_ino and s1.st_dev == s2.st_dev + + def samefile(f1, f2): + """Test whether two pathnames reference the same actual file or + directory This is determined by the device number and i-node number + and raises an exception if an os.stat() call on either pathname + fails.""" + s1 = os.stat(f1) + s2 = os.stat(f2) + return samestat(s1, s2) + + try: # Introduced Python 3.5 from json import JSONDecodeError @@ -76,7 +144,7 @@ class ResourceWarning(Warning): pass class FileNotFoundError(IOError): - """No such file or directory""" + """No such file or directory.""" def __init__(self, *args, **kwargs): self.errno = errno.ENOENT @@ -95,7 +163,7 @@ def __init__(self, *args, **kwargs): super(TimeoutError, self).__init__(*args, **kwargs) class IsADirectoryError(OSError): - """The command does not work on directories""" + """The command does not work on directories.""" def __init__(self, *args, **kwargs): self.errno = errno.EISDIR @@ -118,24 +186,6 @@ def __init__(self, *args, **kwargs): ) from io import StringIO -six.add_move( - six.MovedAttribute("Iterable", "collections", "collections.abc") -) # type: ignore -six.add_move( - six.MovedAttribute("Mapping", "collections", "collections.abc") -) # type: ignore -six.add_move( - six.MovedAttribute("Sequence", "collections", "collections.abc") -) # type: ignore -six.add_move(six.MovedAttribute("Set", "collections", "collections.abc")) # type: ignore -six.add_move( - six.MovedAttribute("ItemsView", "collections", "collections.abc") -) # type: ignore - -# fmt: off -from six.moves import ItemsView, Iterable, Mapping, Sequence, Set # type: ignore # noqa # isort:skip -# fmt: on - if not sys.warnoptions: warnings.simplefilter("default", ResourceWarning) @@ -213,7 +263,7 @@ def cleanup(self): def is_bytes(string): - """Check if a string is a bytes instance + """Check if a string is a bytes instance. :param Union[str, bytes] string: A string that may be string or bytes like :return: Whether the provided string is a bytes type or not @@ -227,7 +277,7 @@ def is_bytes(string): def fs_str(string): - """Encodes a string into the proper filesystem encoding + """Encodes a string into the proper filesystem encoding. Borrowed from pip-tools """ @@ -239,8 +289,7 @@ def fs_str(string): def _get_path(path): - """ - Fetch the string value from a path-like object + """Fetch the string value from a path-like object. Returns **None** if there is no string value. """ @@ -324,8 +373,7 @@ def _chunks(b, indexes): def fs_encode(path): - """ - Encode a filesystem path to the proper filesystem encoding + """Encode a filesystem path to the proper filesystem encoding. :param Union[str, bytes] path: A string-like path :returns: A bytes-encoded filesystem path representation @@ -349,8 +397,7 @@ def fs_encode(path): def fs_decode(path): - """ - Decode a filesystem path using the proper filesystem encoding + """Decode a filesystem path using the proper filesystem encoding. :param path: The filesystem path to decode from bytes or string :return: The filesystem path, decoded with the determined encoding @@ -376,17 +423,15 @@ def fs_decode(path): if sys.version_info[0] < 3: # pragma: no cover - _fs_encode_errors = "surrogateescape" + _fs_encode_errors = "surrogatepass" if sys.platform == "win32" else "surrogateescape" _fs_decode_errors = "surrogateescape" _fs_encoding = "utf-8" else: # pragma: no cover _fs_encoding = "utf-8" + _fs_decode_errors = "surrogateescape" if sys.platform.startswith("win"): _fs_error_fn = None - if sys.version_info[:2] > (3, 4): - alt_strategy = "surrogatepass" - else: - alt_strategy = "surrogateescape" + _fs_encode_errors = "surrogatepass" else: if sys.version_info >= (3, 3): _fs_encoding = sys.getfilesystemencoding() @@ -394,8 +439,8 @@ def fs_decode(path): _fs_encoding = sys.getdefaultencoding() alt_strategy = "surrogateescape" _fs_error_fn = getattr(sys, "getfilesystemencodeerrors", None) - _fs_encode_errors = _fs_error_fn() if _fs_error_fn else alt_strategy - _fs_decode_errors = _fs_error_fn() if _fs_error_fn else alt_strategy + _fs_encode_errors = _fs_error_fn() if _fs_error_fn else alt_strategy + _fs_decode_errors = _fs_error_fn() if _fs_error_fn else _fs_decode_errors _byte = chr if sys.version_info < (3,) else lambda i: bytes([i]) diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index 66fde577cd..80421a9c33 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -9,9 +9,34 @@ import six -from .compat import NamedTemporaryFile, Path +from .compat import IS_TYPE_CHECKING, NamedTemporaryFile, Path from .path import is_file_url, is_valid_url, path_to_url, url_to_path +if IS_TYPE_CHECKING: + from typing import ( + Any, + Bytes, + Callable, + ContextManager, + Dict, + IO, + Iterator, + Optional, + Union, + Text, + Tuple, + TypeVar, + ) + from types import ModuleType + from requests import Session + from six.moves.http_client import HTTPResponse as Urllib_HTTPResponse + from urllib3.response import HTTPResponse as Urllib3_HTTPResponse + from .spin import VistirSpinner, DummySpinner + + TSpinner = Union[VistirSpinner, DummySpinner] + _T = TypeVar("_T") + + __all__ = [ "temp_environ", "temp_path", @@ -29,6 +54,7 @@ # See https://github.com/berdario/pew/blob/master/pew/_utils.py#L82 @contextmanager def temp_environ(): + # type: () -> Iterator[None] """Allow the ability to set os.environ temporarily""" environ = dict(os.environ) try: @@ -40,17 +66,30 @@ def temp_environ(): @contextmanager def temp_path(): + # type: () -> Iterator[None] """A context manager which allows the ability to set sys.path temporarily >>> path_from_virtualenv = load_path("/path/to/venv/bin/python") >>> print(sys.path) - ['/home/user/.pyenv/versions/3.7.0/bin', '/home/user/.pyenv/versions/3.7.0/lib/python37.zip', '/home/user/.pyenv/versions/3.7.0/lib/python3.7', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/lib-dynload', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/site-packages'] + [ + '/home/user/.pyenv/versions/3.7.0/bin', + '/home/user/.pyenv/versions/3.7.0/lib/python37.zip', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7/lib-dynload', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7/site-packages' + ] >>> with temp_path(): sys.path = path_from_virtualenv # Running in the context of the path above run(["pip", "install", "stuff"]) >>> print(sys.path) - ['/home/user/.pyenv/versions/3.7.0/bin', '/home/user/.pyenv/versions/3.7.0/lib/python37.zip', '/home/user/.pyenv/versions/3.7.0/lib/python3.7', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/lib-dynload', '/home/user/.pyenv/versions/3.7.0/lib/python3.7/site-packages'] + [ + '/home/user/.pyenv/versions/3.7.0/bin', + '/home/user/.pyenv/versions/3.7.0/lib/python37.zip', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7/lib-dynload', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7/site-packages' + ] """ path = [p for p in sys.path] @@ -62,6 +101,7 @@ def temp_path(): @contextmanager def cd(path): + # type: () -> Iterator[None] """Context manager to temporarily change working directories :param str path: The directory to move into @@ -88,6 +128,7 @@ def cd(path): @contextmanager def dummy_spinner(spin_type, text, **kwargs): + # type: (str, str, Any) class FakeClass(object): def __init__(self, text=""): self.text = text @@ -110,12 +151,13 @@ def write(self, text): @contextmanager def spinner( - spinner_name=None, - start_text=None, - handler_map=None, - nospin=False, - write_to_stdout=True, + spinner_name=None, # type: Optional[str] + start_text=None, # type: Optional[str] + handler_map=None, # type: Optional[Dict[str, Callable]] + nospin=False, # type: bool + write_to_stdout=True, # type: bool ): + # type: (...) -> ContextManager[TSpinner] """Get a spinner object or a dummy spinner to wrap a context. :param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"}) @@ -165,6 +207,7 @@ def spinner( @contextmanager def atomic_open_for_write(target, binary=False, newline=None, encoding=None): + # type: (str, bool, Optional[str], Optional[str]) -> None """Atomically open `target` for writing. This is based on Lektor's `atomic_open()` utility, but simplified a lot @@ -173,8 +216,10 @@ def atomic_open_for_write(target, binary=False, newline=None, encoding=None): :param str target: Target filename to write :param bool binary: Whether to open in binary mode, default False - :param str newline: The newline character to use when writing, determined from system if not supplied - :param str encoding: The encoding to use when writing, defaults to system encoding + :param Optional[str] newline: The newline character to use when writing, determined + from system if not supplied. + :param Optional[str] encoding: The encoding to use when writing, defaults to system + encoding. How this works: @@ -234,7 +279,10 @@ def atomic_open_for_write(target, binary=False, newline=None, encoding=None): delete=False, ) # set permissions to 0644 - os.chmod(f.name, stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) + try: + os.chmod(f.name, stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) + except OSError: + pass try: yield f except BaseException: @@ -254,13 +302,19 @@ def atomic_open_for_write(target, binary=False, newline=None, encoding=None): @contextmanager -def open_file(link, session=None, stream=True): +def open_file( + link, # type: Union[_T, str] + session=None, # type: Optional[Session] + stream=True, # type: bool +): + # type: (...) -> ContextManager[Union[IO[bytes], Urllib3_HTTPResponse, Urllib_HTTPResponse]] """ Open local or remote file for reading. - :type link: pip._internal.index.Link or str - :type session: requests.Session - :param bool stream: Try to stream if remote, default True + :param pip._internal.index.Link link: A link object from resolving dependencies with + pip, or else a URL. + :param Optional[Session] session: A :class:`~requests.Session` instance + :param bool stream: Whether to stream the content if remote, default True :raises ValueError: If link points to a local directory. :return: a context manager to the opened file-like object """ @@ -286,7 +340,7 @@ def open_file(link, session=None, stream=True): headers = {"Accept-Encoding": "identity"} if not session: try: - from requests import Session + from requests import Session # noqa except ImportError: session = None else: @@ -302,7 +356,7 @@ def open_file(link, session=None, stream=True): yield result finally: if raw: - conn = getattr(raw, "_connection") + conn = raw._connection if conn is not None: conn.close() result.close() @@ -310,6 +364,7 @@ def open_file(link, session=None, stream=True): @contextmanager def replaced_stream(stream_name): + # type: (str) -> Iterator[IO[Text]] """ Context manager to temporarily swap out *stream_name* with a stream wrapper. @@ -336,6 +391,7 @@ def replaced_stream(stream_name): @contextmanager def replaced_streams(): + # type: () -> Iterator[Tuple[IO[Text], IO[Text]]] """ Context manager to replace both ``sys.stdout`` and ``sys.stderr`` using ``replaced_stream`` diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 54e0d2a062..460e142bb7 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -1,19 +1,23 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals +import atexit import io +import itertools import json import locale import logging import os import subprocess import sys +import threading from collections import OrderedDict from functools import partial from itertools import islice, tee from weakref import WeakKeyDictionary import six +from six.moves.queue import Empty, Queue from .cmdparse import Script from .compat import ( @@ -21,6 +25,8 @@ Path, StringIO, TimeoutError, + _fs_decode_errors, + _fs_encode_errors, fs_str, is_bytes, partialmethod, @@ -58,7 +64,7 @@ class WindowsError(OSError): if MYPY_RUNNING: - from typing import Any, Dict, List, Optional, Union + from typing import Any, Dict, Generator, IO, List, Optional, Text, Tuple, Union from .spin import VistirSpinner @@ -66,8 +72,7 @@ def _get_logger(name=None, level="ERROR"): # type: (Optional[str], str) -> logging.Logger if not name: name = __name__ - if isinstance(level, six.string_types): - level = getattr(logging, level.upper()) + level = getattr(logging, level.upper()) logger = logging.getLogger(name) logger.setLevel(level) formatter = logging.Formatter( @@ -83,8 +88,9 @@ def shell_escape(cmd): # type: (Union[str, List[str]]) -> str """Escape strings for use in :func:`~subprocess.Popen` and :func:`run`. - This is a passthrough method for instantiating a :class:`~vistir.cmdparse.Script` - object which can be used to escape commands to output as a single string. + This is a passthrough method for instantiating a + :class:`~vistir.cmdparse.Script` object which can be used to escape + commands to output as a single string. """ cmd = Script.parse(cmd) return cmd.cmdify() @@ -92,14 +98,25 @@ def shell_escape(cmd): def unnest(elem): # type: (Iterable) -> Any - """Flatten an arbitrarily nested iterable + """Flatten an arbitrarily nested iterable. :param elem: An iterable to flatten :type elem: :class:`~collections.Iterable` - >>> nested_iterable = (1234, (3456, 4398345, (234234)), (2396, (23895750, 9283798, 29384, (289375983275, 293759, 2347, (2098, 7987, 27599))))) + >>> nested_iterable = ( + 1234, (3456, 4398345, (234234)), ( + 2396, ( + 23895750, 9283798, 29384, ( + 289375983275, 293759, 2347, ( + 2098, 7987, 27599 + ) + ) + ) + ) + ) >>> list(vistir.misc.unnest(nested_iterable)) - [1234, 3456, 4398345, 234234, 2396, 23895750, 9283798, 29384, 289375983275, 293759, 2347, 2098, 7987, 27599] + [1234, 3456, 4398345, 234234, 2396, 23895750, 9283798, 29384, 289375983275, 293759, + 2347, 2098, 7987, 27599] """ if isinstance(elem, Iterable) and not isinstance(elem, six.string_types): @@ -127,14 +144,19 @@ def _is_iterable(elem): def dedup(iterable): # type: (Iterable) -> Iterable - """Deduplicate an iterable object like iter(set(iterable)) but - order-reserved. - """ + """Deduplicate an iterable object like iter(set(iterable)) but order- + preserved.""" return iter(OrderedDict.fromkeys(iterable)) -def _spawn_subprocess(script, env=None, block=True, cwd=None, combine_stderr=True): - # type: (Union[str, List[str]], Optional[Dict[str, str], bool, Optional[str], bool]) -> subprocess.Popen +def _spawn_subprocess( + script, # type: Union[str, List[str]] + env=None, # type: Optional[Dict[str, str]] + block=True, # type: bool + cwd=None, # type: Optional[Union[str, Path]] + combine_stderr=True, # type: bool +): + # type: (...) -> subprocess.Popen from distutils.spawn import find_executable if not env: @@ -147,6 +169,10 @@ def _spawn_subprocess(script, env=None, block=True, cwd=None, combine_stderr=Tru "stderr": subprocess.PIPE if not combine_stderr else subprocess.STDOUT, "shell": False, } + if sys.version_info[:2] > (3, 5): + options.update({"universal_newlines": True, "encoding": "utf-8"}) + elif os.name != "nt": + options["universal_newlines"] = True if not block: options["stdin"] = subprocess.PIPE if cwd: @@ -170,79 +196,298 @@ def _spawn_subprocess(script, env=None, block=True, cwd=None, combine_stderr=Tru return subprocess.Popen(script.cmdify(), **options) -def _read_streams(stream_dict): - results = {} - for outstream in stream_dict.keys(): - stream = stream_dict[outstream] - if not stream: - results[outstream] = None - continue - line = to_text(stream.readline()) - if not line: - results[outstream] = None - continue - line = to_text("{0}".format(line.rstrip())) - results[outstream] = line - return results - - -def get_stream_results(cmd_instance, verbose, maxlen, spinner=None, stdout_allowed=False): - stream_results = {"stdout": [], "stderr": []} - streams = {"stderr": cmd_instance.stderr, "stdout": cmd_instance.stdout} - while True: - stream_contents = _read_streams(streams) - stdout_line = stream_contents["stdout"] - stderr_line = stream_contents["stderr"] - if not (stdout_line or stderr_line): - break - last_changed = 0 - display_line = "" - for stream_name in stream_contents.keys(): - if stream_contents[stream_name] and stream_name in stream_results: - line = stream_contents[stream_name] - stream_results[stream_name].append(line) - display_line = ( - fs_str("{0}".format(line)) - if stream_name == "stderr" - else display_line - ) - if display_line and last_changed > 10: - last_changed = 0 - display_line = "" - elif display_line: - last_changed += 1 - if len(display_line) > maxlen: - display_line = "{0}...".format(display_line[:maxlen]) +class SubprocessStreamWrapper(object): + def __init__( + self, + display_stderr_maxlen=200, # type: int + display_line_for_loops=20, # type: int + subprocess=None, # type: subprocess.Popen + spinner=None, # type: Optional[VistirSpinner] + verbose=False, # type: bool + stdout_allowed=False, # type: bool + ): + # type: (...) -> None + stdout_encoding = None + stderr_encoding = None + preferred_encoding = getpreferredencoding() + if subprocess is not None: + stdout_encoding = self.get_subprocess_encoding(subprocess, "stdout") + stderr_encoding = self.get_subprocess_encoding(subprocess, "stderr") + self.stdout_encoding = stdout_encoding or preferred_encoding + self.stderr_encoding = stderr_encoding or preferred_encoding + self.stdout_lines = [] + self.text_stdout_lines = [] + self.stderr_lines = [] + self.text_stderr_lines = [] + self.display_line = "" + self.display_line_loops_displayed = 0 + self.display_line_shown_for_loops = display_line_for_loops + self.display_line_max_len = display_stderr_maxlen + self.spinner = spinner + self.stdout_allowed = stdout_allowed + self.verbose = verbose + self._iterated_stdout = None + self._iterated_stderr = None + self._subprocess = subprocess + self._queues = { + "streams": Queue(), + "lines": Queue(), + } + self._threads = { + stream_name: threading.Thread( + target=self.enqueue_stream, + args=(self._subprocess, stream_name, self._queues["streams"]), + ) + for stream_name in ("stdout", "stderr") + } + self._threads["watcher"] = threading.Thread( + target=self.process_output_lines, + args=(self._queues["streams"], self._queues["lines"]), + ) + self.start_threads() + + def enqueue_stream(self, proc, stream_name, queue): + # type: (subprocess.Popen, str, Queue) -> None + if not getattr(proc, stream_name, None): + queue.put(("stderr", None)) + else: + for line in iter(getattr(proc, stream_name).readline, ""): + queue.put((stream_name, line)) + getattr(proc, stream_name).close() + + @property + def stderr(self): + return self._subprocess.stderr + + @property + def stdout(self): + return self._subprocess.stdout + + @classmethod + def get_subprocess_encoding(cls, cmd_instance, stream_name): + # type: (subprocess.Popen, str) -> Optional[str] + stream = getattr(cmd_instance, stream_name, None) + if stream is not None: + return get_output_encoding(getattr(stream, "encoding", None)) + return None + + @property + def stdout_iter(self): + if self._iterated_stdout is None and self.stdout: + self._iterated_stdout = iter(self.stdout.readline, "") + return self._iterated_stdout + + @property + def stderr_iter(self): + if self._iterated_stderr is None and self.stderr: + self._iterated_stderr = iter(self.stderr.readline, "") + return self._iterated_stderr + + def _decode_line(self, line, encoding): + # type: (Union[str, bytes], str) -> str + if isinstance(line, six.binary_type): + line = to_text( + line.decode(encoding, errors=_fs_decode_errors).encode( + "utf-8", errors=_fs_encode_errors + ), + errors="backslashreplace", + ) + else: + line = to_text(line, encoding=encoding, errors=_fs_encode_errors) + return line + + def start_threads(self): + for thread in self._threads.values(): + thread.daemon = True + thread.start() + + @property + def subprocess(self): + return self._subprocess + + @property + def out(self): + # type: () -> str + return getattr(self.subprocess, "out", "") + + @out.setter + def out(self, value): + # type: (str) -> None + self._subprocess.out = value + + @property + def err(self): + # type: () -> str + return getattr(self.subprocess, "err", "") + + @err.setter + def err(self, value): + # type: (str) -> None + self._subprocess.err = value + + def poll(self): + # type: () -> Optional[int] + return self.subprocess.poll() + + def wait(self, timeout=None): + # type: (self, Optional[int]) -> Optional[int] + kwargs = {} + if sys.version_info[0] >= 3: + kwargs = {"timeout": timeout} + result = self._subprocess.wait(**kwargs) + self.gather_output() + return result + + @property + def returncode(self): + # type: () -> Optional[int] + return self.subprocess.returncode + + @property + def text_stdout(self): + return os.linesep.join(self.text_stdout_lines) + + @property + def text_stderr(self): + return os.linesep.join(self.text_stderr_lines) + + @property + def stderr_closed(self): + # type: () -> bool + return self.stderr is None or (self.stderr is not None and self.stderr.closed) + + @property + def stdout_closed(self): + # type: () -> bool + return self.stdout is None or (self.stdout is not None and self.stdout.closed) + + @property + def running(self): + # type: () -> bool + return any(t.is_alive() for t in self._threads.values()) or not all( + [self.stderr_closed, self.stdout_closed, self.subprocess_finished] + ) + + @property + def subprocess_finished(self): + if self._subprocess is None: + return False + return ( + self._subprocess.poll() is not None or self._subprocess.returncode is not None + ) + + def update_display_line(self, new_line): + # type: () -> None + if self.display_line: + if new_line != self.display_line: + self.display_line_loops_displayed = 0 + new_line = fs_str("{}".format(new_line)) + if len(new_line) > self.display_line_max_len: + new_line = "{}...".format(new_line[: self.display_line_max_len]) + self.display_line = new_line + elif self.display_line_loops_displayed >= self.display_line_shown_for_loops: + self.display_line = "" + self.display_line_loops_displayed = 0 + else: + self.display_line_loops_displayed += 1 + return None + + @classmethod + def check_line_content(cls, line): + # type: (Optional[str]) -> bool + return line is not None and line != "" + + def get_line(self, queue): + # type: (Queue) -> Tuple[Optional[str], ...] + stream, result = None, None + try: + stream, result = queue.get_nowait() + except Empty: + result = None + return stream, result + + def process_output_lines(self, recv_queue, line_queue): + # type: (Queue, Queue) -> None + stream, line = self.get_line(recv_queue) + while self.poll() is None or line is not None: + if self.check_line_content(line): + line = to_text("{}".format(line).rstrip()) + line_queue.put((stream, line)) + stream, line = self.get_line(recv_queue) + + def gather_output(self, spinner=None, stdout_allowed=False, verbose=False): + # type: (Optional[VistirSpinner], bool, bool) -> None + if not getattr(self._subprocess, "out", None): + self._subprocess.out = "" + if not getattr(self._subprocess, "err", None): + self._subprocess.err = "" + if not self._queues["streams"].empty(): + self.process_output_lines(self._queues["streams"], self._queues["lines"]) + while not self._queues["lines"].empty(): + try: + stream_name, line = self._queues["lines"].get() + except Empty: + if not self._threads["watcher"].is_active(): + break + pass + if stream_name == "stdout": + text_line = self._decode_line(line, self.stdout_encoding) + self.text_stdout_lines.append(text_line) + self.out += "{}\n".format(text_line) if verbose: - use_stderr = not stdout_allowed or stream_name != "stdout" - if spinner: - target = spinner.stderr if use_stderr else spinner.stdout - spinner.hide_and_write(display_line, target=target) - else: - target = sys.stderr if use_stderr else sys.stdout - target.write(display_line) - target.flush() - if spinner: - spinner.text = to_native_string( - "{0} {1}".format(spinner.text, display_line) + _write_subprocess_result( + line, "stdout", spinner=spinner, stdout_allowed=stdout_allowed ) - continue - return stream_results + else: + text_err = self._decode_line(line, self.stderr_encoding) + self.text_stderr_lines.append(text_err) + self.update_display_line(line) + self.err += "{}\n".format(text_err) + _write_subprocess_result( + line, "stderr", spinner=spinner, stdout_allowed=stdout_allowed + ) + if spinner: + spinner.text = to_native_string( + "{} {}".format(spinner.text, self.display_line) + ) + self.out = self.out.strip() + self.err = self.err.strip() + + +def _write_subprocess_result(result, stream_name, spinner=None, stdout_allowed=False): + # type: (str, str, Optional[VistirSpinner], bool) -> None + if not stdout_allowed and stream_name == "stdout": + stream_name = "stderr" + if spinner: + spinner.hide_and_write(result, target=getattr(spinner, stream_name)) + else: + target_stream = getattr(sys, stream_name) + target_stream.write(result) + target_stream.flush() + return None + + +def attach_stream_reader( + cmd_instance, verbose, maxlen, spinner=None, stdout_allowed=False +): + streams = SubprocessStreamWrapper( + subprocess=cmd_instance, + display_stderr_maxlen=maxlen, + spinner=spinner, + verbose=verbose, + stdout_allowed=stdout_allowed, + ) + streams.gather_output(spinner=spinner, verbose=verbose, stdout_allowed=stdout_allowed) + return streams def _handle_nonblocking_subprocess(c, spinner=None): - # type: (subprocess.Popen, VistirSpinner) -> subprocess.Popen - try: + while c.running: c.wait() - finally: - if c.stdout: - c.stdout.close() - if c.stderr: - c.stderr.close() if spinner: - if c.returncode > 0: + if c.returncode != 0: spinner.fail(to_native_string("Failed...cleaning up...")) - if not os.name == "nt": + elif c.returncode == 0 and not os.name == "nt": spinner.ok(to_native_string("✔ Complete")) else: spinner.ok(to_native_string("Complete")) @@ -284,7 +529,7 @@ def _create_subprocess( spinner_orig_text = spinner.text if not spinner_orig_text and start_text is not None: spinner_orig_text = start_text - stream_results = get_stream_results( + c = attach_stream_reader( c, verbose=verbose, maxlen=display_limit, @@ -292,10 +537,6 @@ def _create_subprocess( stdout_allowed=write_to_stdout, ) _handle_nonblocking_subprocess(c, spinner) - output = stream_results["stdout"] - err = stream_results["stderr"] - c.out = "\n".join(output) if output else "" - c.err = "\n".join(err) if err else "" else: try: c.out, c.err = c.communicate() @@ -303,10 +544,6 @@ def _create_subprocess( c.terminate() c.out, c.err = c.communicate() raise - if not block: - c.wait() - c.out = to_text("{0}".format(c.out)) if c.out else fs_str("") - c.err = to_text("{0}".format(c.err)) if c.err else fs_str("") if not return_object: return c.out.strip(), c.err.strip() return c @@ -330,14 +567,19 @@ def run( :param list cmd: A list representing the command you want to run. :param dict env: Additional environment settings to pass through to the subprocess. :param bool return_object: When True, returns the whole subprocess instance - :param bool block: When False, returns a potentially still-running :class:`subprocess.Popen` instance + :param bool block: When False, returns a potentially still-running + :class:`subprocess.Popen` instance :param str cwd: Current working directory contect to use for spawning the subprocess. :param bool verbose: Whether to print stdout in real time when non-blocking. :param bool nospin: Whether to disable the cli spinner. - :param str spinner_name: The name of the spinner to use if enabled, defaults to bouncingBar - :param bool combine_stderr: Optionally merge stdout and stderr in the subprocess, false if nonblocking. - :param int dispay_limit: The max width of output lines to display when using a spinner. - :param bool write_to_stdout: Whether to write to stdout when using a spinner, default True. + :param str spinner_name: The name of the spinner to use if enabled, defaults to + bouncingBar + :param bool combine_stderr: Optionally merge stdout and stderr in the subprocess, + false if nonblocking. + :param int dispay_limit: The max width of output lines to display when using a + spinner. + :param bool write_to_stdout: Whether to write to stdout when using a spinner, + defaults to True. :returns: A 2-tuple of (output, error) or a :class:`subprocess.Popen` object. .. Warning:: Merging standard out and standarad error in a nonblocking subprocess @@ -346,11 +588,13 @@ def run( """ _env = os.environ.copy() + _env["PYTHONIOENCODING"] = str("utf-8") + _env["PYTHONUTF8"] = str("1") if env: _env.update(env) if six.PY2: - fs_encode = partial(to_bytes, encoding=locale_encoding) - _env = {fs_encode(k): fs_encode(v) for k, v in _env.items()} + _fs_encode = partial(to_bytes, encoding=locale_encoding) + _env = {_fs_encode(k): _fs_encode(v) for k, v in _env.items()} else: _env = {k: fs_str(v) for k, v in _env.items()} if not spinner_name: @@ -386,14 +630,21 @@ def run( def load_path(python): - """Load the :mod:`sys.path` from the given python executable's environment as json + """Load the :mod:`sys.path` from the given python executable's environment + as json. :param str python: Path to a valid python executable - :return: A python representation of the `sys.path` value of the given python executable. + :return: A python representation of the `sys.path` value of the given python + executable. :rtype: list >>> load_path("/home/user/.virtualenvs/requirementslib-5MhGuG3C/bin/python") - ['', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python37.zip', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7/lib-dynload', '/home/user/.pyenv/versions/3.7.0/lib/python3.7', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7/site-packages', '/home/user/git/requirementslib/src'] + ['', '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python37.zip', + '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7', + '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7/lib-dynload', + '/home/user/.pyenv/versions/3.7.0/lib/python3.7', + '/home/user/.virtualenvs/requirementslib-5MhGuG3C/lib/python3.7/site-packages', + '/home/user/git/requirementslib/src'] """ python = Path(python).as_posix() @@ -407,7 +658,7 @@ def load_path(python): def partialclass(cls, *args, **kwargs): - """Returns a partially instantiated class + """Returns a partially instantiated class. :return: A partial class instance :rtype: cls @@ -417,7 +668,15 @@ def partialclass(cls, *args, **kwargs): >>> source(name="pypi") >>> source.__dict__ - mappingproxy({'__module__': '__main__', '__dict__': , '__weakref__': , '__doc__': None, '__init__': functools.partialmethod(, , url='https://pypi.org/simple')}) + mappingproxy({ + '__module__': '__main__', + '__dict__': , + '__weakref__': , + '__doc__': None, + '__init__': functools.partialmethod( + , , url='https://pypi.org/simple' + ) + }) >>> new_source = source(name="pypi") >>> new_source <__main__.Source object at 0x7f23af189b38> @@ -526,8 +785,8 @@ def to_text(string, encoding="utf-8", errors=None): def divide(n, iterable): - """ - split an iterable into n groups, per https://more-itertools.readthedocs.io/en/latest/api.html#grouping + """split an iterable into n groups, per https://more- + itertools.readthedocs.io/en/latest/api.html#grouping. :param int n: Number of unique groups :param iter iterable: An iterable to split up @@ -578,11 +837,11 @@ def chunked(n, iterable): def getpreferredencoding(): - """Determine the proper output encoding for terminal rendering""" + """Determine the proper output encoding for terminal rendering.""" # Borrowed from Invoke # (see https://github.com/pyinvoke/invoke/blob/93af29d/invoke/runners.py#L881) - _encoding = locale.getpreferredencoding(False) + _encoding = sys.getdefaultencoding() or locale.getpreferredencoding(False) if six.PY2 and not sys.platform == "win32": _default_encoding = locale.getdefaultlocale()[1] if _default_encoding is not None: @@ -594,8 +853,7 @@ def getpreferredencoding(): def get_output_encoding(source_encoding): - """ - Given a source encoding, determine the preferred output encoding. + """Given a source encoding, determine the preferred output encoding. :param str source_encoding: The encoding of the source material. :returns: The output encoding to decode to. @@ -630,11 +888,13 @@ def _encode(output, encoding=None, errors=None, translation_map=None): def decode_for_output(output, target_stream=None, translation_map=None): - """Given a string, decode it for output to a terminal + """Given a string, decode it for output to a terminal. :param str output: A string to print to a terminal - :param target_stream: A stream to write to, we will encode to target this stream if possible. - :param dict translation_map: A mapping of unicode character ordinals to replacement strings. + :param target_stream: A stream to write to, we will encode to target this stream if + possible. + :param dict translation_map: A mapping of unicode character ordinals to replacement + strings. :return: A re-encoded string using the preferred encoding :rtype: str """ @@ -657,8 +917,7 @@ def decode_for_output(output, target_stream=None, translation_map=None): def get_canonical_encoding_name(name): # type: (str) -> str - """ - Given an encoding name, get the canonical name from a codec lookup. + """Given an encoding name, get the canonical name from a codec lookup. :param str name: The name of the codec to lookup :return: The canonical version of the codec name @@ -696,8 +955,8 @@ def _get_binary_buffer(stream): def get_wrapped_stream(stream, encoding=None, errors="replace"): - """ - Given a stream, wrap it in a `StreamWrapper` instance and return the wrapped stream. + """Given a stream, wrap it in a `StreamWrapper` instance and return the + wrapped stream. :param stream: A stream instance to wrap :param str encoding: The encoding to use for the stream @@ -712,7 +971,7 @@ def get_wrapped_stream(stream, encoding=None, errors="replace"): if stream is not None and encoding is None: encoding = "utf-8" if not encoding: - encoding = get_output_encoding(stream) + encoding = get_output_encoding(getattr(stream, "encoding", None)) else: encoding = get_canonical_encoding_name(encoding) return StreamWrapper(stream, encoding, errors, line_buffering=True) @@ -720,10 +979,8 @@ def get_wrapped_stream(stream, encoding=None, errors="replace"): class StreamWrapper(io.TextIOWrapper): - """ - This wrapper class will wrap a provided stream and supply an interface - for compatibility. - """ + """This wrapper class will wrap a provided stream and supply an interface + for compatibility.""" def __init__(self, stream, encoding, errors, line_buffering=True, **kwargs): self._stream = stream = _StreamProvider(stream) @@ -907,7 +1164,7 @@ def lookup(): def get_text_stream(stream="stdout", encoding=None): - """Retrieve a unicode stream wrapper around **sys.stdout** or **sys.stderr**. + """Retrieve a utf-8 stream wrapper around **sys.stdout** or **sys.stderr**. :param str stream: The name of the stream to wrap from the :mod:`sys` module. :param str encoding: An optional encoding to use. @@ -959,7 +1216,8 @@ def get_text_stdin(): def replace_with_text_stream(stream_name): - """Given a stream name, replace the target stream with a text-converted equivalent + """Given a stream name, replace the target stream with a text-converted + equivalent. :param str stream_name: The name of a target stream, such as **stdout** or **stderr** :return: None @@ -984,7 +1242,8 @@ def _can_use_color(stream=None, color=None): def echo(text, fg=None, bg=None, style=None, file=None, err=False, color=None): - """Write the given text to the provided stream or **sys.stdout** by default. + """Write the given text to the provided stream or **sys.stdout** by + default. Provides optional foreground and background colors from the ansi defaults: **grey**, **red**, **green**, **yellow**, **blue**, **magenta**, **cyan** @@ -1002,7 +1261,7 @@ def echo(text, fg=None, bg=None, style=None, file=None, err=False, color=None): """ if file and not hasattr(file, "write"): - raise TypeError("Expected a writable stream, received {0!r}".format(file)) + raise TypeError("Expected a writable stream, received {!r}".format(file)) if not file: if err: file = _text_stderr() diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 25d29eb9ca..73ef1cb6e4 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -8,7 +8,9 @@ import posixpath import shutil import stat +import sys import time +import unicodedata import warnings import six @@ -39,7 +41,27 @@ if IS_TYPE_CHECKING: - from typing import Optional, Callable, Text, ByteString, AnyStr + from types import TracebackType + from typing import ( + Any, + AnyStr, + ByteString, + Callable, + Generator, + Iterator, + List, + Optional, + Text, + Tuple, + Type, + Union, + ) + + if six.PY3: + TPath = os.PathLike + else: + TPath = Union[str, bytes] + TFunc = Callable[..., Any] __all__ = [ "check_for_unc_path", @@ -72,16 +94,18 @@ def unicode_path(path): + # type: (TPath) -> Text # Paths are supposed to be represented as unicode here - if six.PY2 and not isinstance(path, six.text_type): + if six.PY2 and isinstance(path, six.binary_type): return path.decode(_fs_encoding) return path def native_path(path): - if six.PY2 and not isinstance(path, bytes): + # type: (TPath) -> str + if six.PY2 and isinstance(path, six.text_type): return path.encode(_fs_encoding) - return path + return str(path) # once again thank you django... @@ -91,20 +115,18 @@ def native_path(path): else: def abspathu(path): - """ - Version of os.path.abspath that uses the unicode representation - of the current working directory, thus avoiding a UnicodeDecodeError - in join when the cwd has non-ASCII characters. - """ + # type: (TPath) -> Text + """Version of os.path.abspath that uses the unicode representation of + the current working directory, thus avoiding a UnicodeDecodeError in + join when the cwd has non-ASCII characters.""" if not os.path.isabs(path): path = os.path.join(os.getcwdu(), path) return os.path.normpath(path) def normalize_path(path): - # type: (AnyStr) -> AnyStr - """ - Return a case-normalized absolute variable-expanded path. + # type: (TPath) -> Text + """Return a case-normalized absolute variable-expanded path. :param str path: The non-normalized path :return: A normalized, expanded, case-normalized path @@ -121,9 +143,8 @@ def normalize_path(path): def is_in_path(path, parent): - # type: (AnyStr, AnyStr) -> bool - """ - Determine if the provided full path is in the given parent root. + # type: (TPath, TPath) -> bool + """Determine if the provided full path is in the given parent root. :param str path: The full path to check the location of. :param str parent: The parent path to check for membership in @@ -131,11 +152,11 @@ def is_in_path(path, parent): :rtype: bool """ - return normalize_path(str(path)).startswith(normalize_path(str(parent))) + return normalize_path(path).startswith(normalize_path(parent)) def normalize_drive(path): - # type: (str) -> Text + # type: (TPath) -> Text """Normalize drive in path so they stay consistent. This currently only affects local drives on Windows, which can be @@ -144,8 +165,10 @@ def normalize_drive(path): """ from .misc import to_text - if os.name != "nt" or not isinstance(path, six.string_types): - return path + if os.name != "nt" or not ( + isinstance(path, six.string_types) or getattr(path, "__fspath__", None) + ): + return path # type: ignore drive, tail = os.path.splitdrive(path) # Only match (lower cased) local drives (e.g. 'c:'), not UNC mounts. @@ -156,7 +179,7 @@ def normalize_drive(path): def path_to_url(path): - # type: (str) -> Text + # type: (TPath) -> Text """Convert the supplied local path to a file uri. :param str path: A string pointing to or representing a local path @@ -169,7 +192,7 @@ def path_to_url(path): from .misc import to_bytes if not path: - return path + return path # type: ignore normalized_path = Path(normalize_drive(os.path.abspath(path))).as_posix() if os.name == "nt" and normalized_path[1] == ":": drive, _, path = normalized_path.partition(":") @@ -177,18 +200,17 @@ def path_to_url(path): # XXX: actually part of a surrogate pair, but were just incidentally # XXX: passed in as a piece of a filename quoted_path = quote(fs_encode(path)) - return fs_decode("file:///{0}:{1}".format(drive, quoted_path)) + return fs_decode("file:///{}:{}".format(drive, quoted_path)) # XXX: This is also here to help deal with incidental dangling surrogates # XXX: on linux, by making sure they are preserved during encoding so that # XXX: we can urlencode the backslash correctly bytes_path = to_bytes(normalized_path, errors="backslashreplace") - return fs_decode("file://{0}".format(quote(bytes_path))) + return fs_decode("file://{}".format(quote(bytes_path))) def url_to_path(url): - # type: (str) -> ByteString - """ - Convert a valid file url to a local filesystem path + # type: (str) -> str + """Convert a valid file url to a local filesystem path. Follows logic taken from pip's equivalent function """ @@ -204,37 +226,41 @@ def url_to_path(url): def is_valid_url(url): - """Checks if a given string is an url""" + # type: (Union[str, bytes]) -> bool + """Checks if a given string is an url.""" from .misc import to_text if not url: - return url + return url # type: ignore pieces = urllib_parse.urlparse(to_text(url)) return all([pieces.scheme, pieces.netloc]) def is_file_url(url): - """Returns true if the given url is a file url""" + # type: (Any) -> bool + """Returns true if the given url is a file url.""" from .misc import to_text if not url: return False if not isinstance(url, six.string_types): try: - url = getattr(url, "url") + url = url.url except AttributeError: - raise ValueError("Cannot parse url from unknown type: {0!r}".format(url)) + raise ValueError("Cannot parse url from unknown type: {!r}".format(url)) url = to_text(url, encoding="utf-8") return urllib_parse.urlparse(url.lower()).scheme == "file" def is_readonly_path(fn): + # type: (TPath) -> bool """Check if a provided path exists and is readonly. - Permissions check is `bool(path.stat & stat.S_IREAD)` or `not os.access(path, os.W_OK)` + Permissions check is `bool(path.stat & stat.S_IREAD)` or `not + os.access(path, os.W_OK)` """ - fn = fs_encode(fn) + fn = fs_decode(fs_encode(fn)) if os.path.exists(fn): file_stat = os.stat(fn).st_mode return not bool(file_stat & stat.S_IWRITE) or not os.access(fn, os.W_OK) @@ -242,47 +268,35 @@ def is_readonly_path(fn): def mkdir_p(newdir, mode=0o777): - """Recursively creates the target directory and all of its parents if they do not - already exist. Fails silently if they do. + # type: (TPath, int) -> None + """Recursively creates the target directory and all of its parents if they + do not already exist. Fails silently if they do. :param str newdir: The directory path to ensure :raises: OSError if a file is encountered along the way """ - # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ - - newdir = fs_encode(newdir) + newdir = fs_decode(fs_encode(newdir)) if os.path.exists(newdir): if not os.path.isdir(newdir): raise OSError( - "a file with the same name as the desired dir, '{0}', already exists.".format( + "a file with the same name as the desired dir, '{}', already exists.".format( fs_decode(newdir) ) ) - else: - head, tail = os.path.split(newdir) - # Make sure the tail doesn't point to the asame place as the head - curdir = fs_encode(".") - tail_and_head_match = ( - os.path.relpath(tail, start=os.path.basename(head)) == curdir - ) - if tail and not tail_and_head_match and not os.path.isdir(newdir): - target = os.path.join(head, tail) - if os.path.exists(target) and os.path.isfile(target): - raise OSError( - "A file with the same name as the desired dir, '{0}', already exists.".format( - fs_decode(newdir) - ) - ) - os.makedirs(os.path.join(head, tail), mode) + return None + os.makedirs(newdir, mode) def ensure_mkdir_p(mode=0o777): - """Decorator to ensure `mkdir_p` is called to the function's return value. - """ + # type: (int) -> Callable[Callable[..., Any], Callable[..., Any]] + """Decorator to ensure `mkdir_p` is called to the function's return + value.""" def decorator(f): + # type: (Callable[..., Any]) -> Callable[..., Any] @functools.wraps(f) def decorated(*args, **kwargs): + # type: () -> str path = f(*args, **kwargs) mkdir_p(path, mode=mode) return path @@ -296,6 +310,7 @@ def decorated(*args, **kwargs): def create_tracked_tempdir(*args, **kwargs): + # type: (Any, Any) -> str """Create a tracked temporary directory. This uses `TemporaryDirectory`, but does not remove the directory when @@ -313,6 +328,7 @@ def create_tracked_tempdir(*args, **kwargs): def create_tracked_tempfile(*args, **kwargs): + # type: (Any, Any) -> str """Create a tracked temporary file. This uses the `NamedTemporaryFile` construct, but does not remove the file @@ -326,6 +342,7 @@ def create_tracked_tempfile(*args, **kwargs): def _find_icacls_exe(): + # type: () -> Optional[Text] if os.name == "nt": paths = [ os.path.expandvars(r"%windir%\{0}").format(subdir) @@ -343,15 +360,14 @@ def _find_icacls_exe(): def set_write_bit(fn): # type: (str) -> None - """ - Set read-write permissions for the current user on the target path. Fail silently - if the path doesn't exist. + """Set read-write permissions for the current user on the target path. Fail + silently if the path doesn't exist. :param str fn: The target filename or path :return: None """ - fn = fs_encode(fn) + fn = fs_decode(fs_encode(fn)) if not os.path.exists(fn): return file_stat = os.stat(fn).st_mode @@ -367,9 +383,9 @@ def set_write_bit(fn): c = run( [ icacls_exe, - "''{0}''".format(fn), + "''{}''".format(fn), "/grant", - "{0}:WD".format(user_sid), + "{}:WD".format(user_sid), "/T", "/C", "/Q", @@ -396,8 +412,7 @@ def set_write_bit(fn): def rmtree(directory, ignore_errors=False, onerror=None): # type: (str, bool, Optional[Callable]) -> None - """ - Stand-in for :func:`~shutil.rmtree` with additional error-handling. + """Stand-in for :func:`~shutil.rmtree` with additional error-handling. This version of `rmtree` handles read-only paths, especially in the case of index files written by certain source control systems. @@ -411,20 +426,20 @@ def rmtree(directory, ignore_errors=False, onerror=None): Setting `ignore_errors=True` may cause this to silently fail to delete the path """ - directory = fs_encode(directory) + directory = fs_decode(fs_encode(directory)) if onerror is None: onerror = handle_remove_readonly try: shutil.rmtree(directory, ignore_errors=ignore_errors, onerror=onerror) - except (IOError, OSError, FileNotFoundError, PermissionError) as exc: + except (IOError, OSError, FileNotFoundError, PermissionError) as exc: # noqa:B014 # Ignore removal failures where the file doesn't exist if exc.errno != errno.ENOENT: raise def _wait_for_files(path): # pragma: no cover - """ - Retry with backoff up to 1 second to delete files from a directory. + # type: (Union[str, TPath]) -> Optional[List[TPath]] + """Retry with backoff up to 1 second to delete files from a directory. :param str path: The path to crawl to delete files from :return: A list of remaining paths or None @@ -446,7 +461,7 @@ def _wait_for_files(path): # pragma: no cover except FileNotFoundError as e: if e.errno == errno.ENOENT: return - except (OSError, IOError, PermissionError): + except (OSError, IOError, PermissionError): # noqa:B014 time.sleep(timeout) timeout *= 2 remaining.append(path) @@ -456,6 +471,7 @@ def _wait_for_files(path): # pragma: no cover def handle_remove_readonly(func, path, exc): + # type: (Callable[..., str], TPath, Tuple[Type[OSError], OSError, TracebackType]) -> None """Error handler for shutil.rmtree. Windows source repo folders are read-only by default, so this error handler @@ -480,7 +496,7 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except ( + except ( # noqa:B014 OSError, IOError, FileNotFoundError, @@ -503,7 +519,7 @@ def handle_remove_readonly(func, path, exc): remaining = _wait_for_files(path) try: func(path) - except (OSError, IOError, FileNotFoundError, PermissionError) as e: + except (OSError, IOError, FileNotFoundError, PermissionError) as e: # noqa:B014 if e.errno in PERM_ERRORS: if e.errno != errno.ENOENT: # File still exists warnings.warn(default_warning_message.format(path), ResourceWarning) @@ -513,10 +529,12 @@ def handle_remove_readonly(func, path, exc): def walk_up(bottom): + # type: (Union[TPath, str]) -> Generator[Tuple[str, List[str], List[str]], None, None] """Mimic os.walk, but walk 'up' instead of down the directory tree. + From: https://gist.github.com/zdavkeos/1098474 """ - bottom = os.path.realpath(bottom) + bottom = os.path.realpath(str(bottom)) # Get files in current dir. try: names = os.listdir(bottom) @@ -541,7 +559,8 @@ def walk_up(bottom): def check_for_unc_path(path): - """ Checks to see if a pathlib `Path` object is a unc path or not""" + # type: (Path) -> bool + """Checks to see if a pathlib `Path` object is a unc path or not.""" if ( os.name == "nt" and len(path.drive) > 2 @@ -554,6 +573,7 @@ def check_for_unc_path(path): def get_converted_relative_path(path, relative_to=None): + # type: (TPath, Optional[TPath]) -> str """Convert `path` to be relative. Given a vague relative path, return the path relative to the given @@ -609,11 +629,11 @@ def get_converted_relative_path(path, relative_to=None): def safe_expandvars(value): - """Call os.path.expandvars if value is a string, otherwise do nothing. - """ + # type: (TPath) -> str + """Call os.path.expandvars if value is a string, otherwise do nothing.""" if isinstance(value, six.string_types): return os.path.expandvars(value) - return value + return value # type: ignore class _TrackedTempfileWrapper(_TemporaryFileWrapper, object): diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index 64b615de9e..43821cb178 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -12,11 +12,31 @@ import colorama import six -from .compat import to_native_string +from .compat import IS_TYPE_CHECKING, to_native_string from .cursor import hide_cursor, show_cursor from .misc import decode_for_output, to_text from .termcolors import COLOR_MAP, COLORS, DISABLE_COLORS, colored +if IS_TYPE_CHECKING: + from typing import ( + Any, + Callable, + ContextManager, + Dict, + IO, + Optional, + Text, + Type, + TypeVar, + Union, + ) + + TSignalMap = Dict[ + Type[signal.SIGINT], + Callable[..., int, str, Union["DummySpinner", "VistirSpinner"]], + ] + _T = TypeVar("_T", covariant=True) + try: import yaspin except ImportError: # pragma: no cover @@ -66,6 +86,7 @@ def handler(signum, frame, spinner): class DummySpinner(object): def __init__(self, text="", **kwargs): + # type: (str, Any) -> None if DISABLE_COLORS: colorama.init() self.text = to_native_string(decode_output(text)) if text else "" @@ -108,6 +129,7 @@ def _close_output_buffer(self): pass def fail(self, exitcode=1, text="FAIL"): + # type: (int, str) -> None if text is not None and text != "None": if self.write_to_stdout: self.write(text) @@ -116,6 +138,7 @@ def fail(self, exitcode=1, text="FAIL"): self._close_output_buffer() def ok(self, text="OK"): + # type: (str) -> int if text is not None and text != "None": if self.write_to_stdout: self.write(text) @@ -125,6 +148,7 @@ def ok(self, text="OK"): return 0 def hide_and_write(self, text, target=None): + # type: (str, Optional[str]) -> None if not target: target = self.stdout if text is None or isinstance(text, six.string_types) and text == "None": @@ -136,6 +160,7 @@ def hide_and_write(self, text, target=None): self._show_cursor(target=target) def write(self, text=None): + # type: (Optional[str]) -> None if not self.write_to_stdout: return self.write_err(text) if text is None or isinstance(text, six.string_types) and text == "None": @@ -151,6 +176,7 @@ def write(self, text=None): stdout.write(CLEAR_LINE) def write_err(self, text=None): + # type: (Optional[str]) -> None if text is None or isinstance(text, six.string_types) and text == "None": pass text = to_text(text) @@ -168,10 +194,12 @@ def write_err(self, text=None): @staticmethod def _hide_cursor(target=None): + # type: (Optional[IO]) -> None pass @staticmethod def _show_cursor(target=None): + # type: (Optional[IO]) -> None pass @@ -183,6 +211,7 @@ class VistirSpinner(SpinBase): "A spinner class for handling spinners on windows and posix." def __init__(self, *args, **kwargs): + # type: (Any, Any) """ Get a spinner object or a dummy spinner to wrap a context. @@ -196,7 +225,7 @@ def __init__(self, *args, **kwargs): self.handler = handler colorama.init() - sigmap = {} + sigmap = {} # type: TSignalMap if handler: sigmap.update({signal.SIGINT: handler, signal.SIGTERM: handler}) handler_map = kwargs.pop("handler_map", {}) @@ -218,11 +247,15 @@ def __init__(self, *args, **kwargs): self.out_buff = StringIO() self.write_to_stdout = write_to_stdout self.is_dummy = bool(yaspin is None) + self._stop_spin = None # type: Optional[threading.Event] + self._hide_spin = None # type: Optional[threading.Event] + self._spin_thread = None # type: Optional[threading.Thread] super(VistirSpinner, self).__init__(*args, **kwargs) if DISABLE_COLORS: colorama.deinit() def ok(self, text=u"OK", err=False): + # type: (str, bool) -> None """Set Ok (success) finalizer to a spinner.""" # Do not display spin text for ok state self._text = None @@ -232,6 +265,7 @@ def ok(self, text=u"OK", err=False): self._freeze(_text, err=err) def fail(self, text=u"FAIL", err=False): + # type: (str, bool) -> None """Set fail finalizer to a spinner.""" # Do not display spin text for fail state self._text = None @@ -241,6 +275,7 @@ def fail(self, text=u"FAIL", err=False): self._freeze(_text, err=err) def hide_and_write(self, text, target=None): + # type: (str, Optional[str]) -> None if not target: target = self.stdout if text is None or isinstance(text, six.string_types) and text == u"None": @@ -252,6 +287,7 @@ def hide_and_write(self, text, target=None): self._show_cursor(target=target) def write(self, text): # pragma: no cover + # type: (str) -> None if not self.write_to_stdout: return self.write_err(text) stdout = self.stdout @@ -266,6 +302,7 @@ def write(self, text): # pragma: no cover self.out_buff.write(text) def write_err(self, text): # pragma: no cover + # type: (str) -> None """Write error text in the terminal without breaking the spinner.""" stderr = self.stderr if self.stderr.closed: @@ -279,6 +316,7 @@ def write_err(self, text): # pragma: no cover self.out_buff.write(decode_output(text, target_stream=self.out_buff)) def start(self): + # type: () -> None if self._sigmap: self._register_signal_handlers() @@ -292,6 +330,7 @@ def start(self): self._spin_thread.start() def stop(self): + # type: () -> None if self._dfl_sigmap: # Reset registered signal handlers to default ones self._reset_signal_handlers() @@ -314,6 +353,7 @@ def stop(self): self.out_buff.close() def _freeze(self, final_text, err=False): + # type: (str, bool) -> None """Stop spinner, compose last frame and 'freeze' it.""" if not final_text: final_text = "" @@ -330,12 +370,14 @@ def _freeze(self, final_text, err=False): target.write(self._last_frame) def _compose_color_func(self): + # type: () -> Callable[..., str] fn = functools.partial( colored, color=self._color, on_color=self._on_color, attrs=list(self._attrs) ) return fn def _compose_out(self, frame, mode=None): + # type: (str, Optional[str]) -> Text # Ensure Unicode input frame = to_text(frame) @@ -355,6 +397,7 @@ def _compose_out(self, frame, mode=None): return out def _spin(self): + # type: () -> None target = self.stdout if self.write_to_stdout else self.stderr clear_fn = self._clear_line if self.write_to_stdout else self._clear_err while not self._stop_spin.is_set(): @@ -379,6 +422,7 @@ def _spin(self): target.write("\b") def _register_signal_handlers(self): + # type: () -> None # SIGKILL cannot be caught or ignored, and the receiving # process cannot perform any clean-up upon receiving this # signal. @@ -411,31 +455,37 @@ def _register_signal_handlers(self): signal.signal(sig, sig_handler) def _reset_signal_handlers(self): + # type: () -> None for sig, sig_handler in self._dfl_sigmap.items(): signal.signal(sig, sig_handler) @staticmethod def _hide_cursor(target=None): + # type: (Optional[IO]) -> None if not target: target = sys.stdout hide_cursor(stream=target) @staticmethod def _show_cursor(target=None): + # type: (Optional[IO]) -> None if not target: target = sys.stdout show_cursor(stream=target) @staticmethod def _clear_err(): + # type: () -> None sys.stderr.write(CLEAR_LINE) @staticmethod def _clear_line(): + # type: () -> None sys.stdout.write(CLEAR_LINE) def create_spinner(*args, **kwargs): + # type: (Any, Any) -> Union[DummySpinner, VistirSpinner] nospin = kwargs.pop("nospin", False) use_yaspin = kwargs.pop("use_yaspin", not nospin) if nospin or not use_yaspin: diff --git a/tasks/release.py b/tasks/release.py index 20fd5a44e3..a5dbc1bf9b 100644 --- a/tasks/release.py +++ b/tasks/release.py @@ -3,6 +3,7 @@ import os import pathlib import re +import sys import invoke @@ -71,10 +72,29 @@ def _render_log(): return rendered -@invoke.task -def release(ctx, dry_run=False): +release_help = { + "manual": "Build the man pages.", + "dry_run": "No-op, simulate what would happen if run for real.", + "local": "Build package locally and upload to PyPI.", + "pre": "Build a pre-release version, must be paired with a tag.", + "tag": "A release tag, e.g. 'a', 'b', 'rc', 'post'.", + "month_offset": "How many months to offset the release date by.", +} + +@invoke.task(help=release_help) +def release(ctx, manual=False, local=False, dry_run=False, pre=False, tag=None, month_offset="0"): + trunc_month = False + if pre: + trunc_month = True drop_dist_dirs(ctx) - bump_version(ctx, dry_run=dry_run) + bump_version( + ctx, + dry_run=dry_run, + pre=pre, + tag=tag, + month_offset=month_offset, + trunc_month=trunc_month + ) version = find_version(ctx) tag_content = _render_log() if dry_run: @@ -84,36 +104,48 @@ def release(ctx, dry_run=False): log("would update: pipenv/pipenv.1") log(f'Would commit with message: "Release v{version}"') else: - ctx.run("towncrier") - ctx.run( - "git add CHANGELOG.rst news/ {0}".format(get_version_file(ctx).as_posix()) - ) - ctx.run("git rm CHANGELOG.draft.rst") + if pre: + log("generating towncrier draft...") + ctx.run("towncrier --draft > CHANGELOG.draft.rst") + ctx.run("git add {0}".format(get_version_file(ctx).as_posix())) + else: + ctx.run("towncrier") + ctx.run( + "git add CHANGELOG.rst news/ {0}".format(get_version_file(ctx).as_posix()) + ) + log("removing changelog draft if present") + draft_changelog = pathlib.Path("CHANGELOG.draft.rst") + if draft_changelog.exists(): + draft_changelog.unlink() + log("generating man files...") generate_manual(ctx) ctx.run("git add pipenv/pipenv.1") ctx.run(f'git commit -m "Release v{version}"') tag_content = tag_content.replace('"', '\\"') - if dry_run: + if dry_run or pre: log(f"Generated tag content: {tag_content}") - markdown = ctx.run( - "pandoc CHANGELOG.draft.rst -f rst -t markdown", hide=True - ).stdout.strip() - content = clean_mdchangelog(ctx, markdown) - log(f"would generate markdown: {content}") + draft_rstfile = "CHANGELOG.draft.rst" + markdown_path = pathlib.Path(draft_rstfile).with_suffix(".md") + generate_markdown(ctx, source_rstfile=draft_rstfile) + content = clean_mdchangelog(ctx, markdown_path.as_posix()) + log(f"would generate markdown: {markdown_path.read_text()}") + if pre and not dry_run: + ctx.run(f'git tag -a v{version} -m "Version v{version}\n\n{tag_content}"') else: generate_markdown(ctx) clean_mdchangelog(ctx) ctx.run(f'git tag -a v{version} -m "Version v{version}\n\n{tag_content}"') - build_dists(ctx) - if dry_run: + if local: + build_dists(ctx) dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*' artifacts = list(ROOT.joinpath("dist").glob(dist_pattern)) - filename_display = "\n".join(f" {a}" for a in artifacts) - log(f"Would upload dists: {filename_display}") - else: - upload_dists(ctx) - bump_version(ctx, dev=True) + if dry_run: + filename_display = "\n".join(f" {a}" for a in artifacts) + log(f"Would upload dists: {filename_display}") + else: + upload_dists(ctx) + bump_version(ctx, dev=True) def drop_dist_dirs(ctx): @@ -159,15 +191,21 @@ def upload_dists(ctx, repo="pypi"): @invoke.task -def generate_markdown(ctx): +def generate_markdown(ctx, source_rstfile=None): log("Generating markdown from changelog...") - ctx.run("pandoc CHANGELOG.rst -f rst -t markdown -o CHANGELOG.md") + if source_rstfile is None: + source_rstfile = "CHANGELOG.rst" + source_file = pathlib.Path(source_rstfile) + dest_file = source_file.with_suffix(".md") + ctx.run( + f"pandoc {source_file.as_posix()} -f rst -t markdown -o {dest_file.as_posix()}" + ) @invoke.task def generate_manual(ctx, commit=False): log("Generating manual from reStructuredText source...") - ctx.run("make man -C docs") + ctx.run("make man") ctx.run("cp docs/_build/man/pipenv.1 pipenv/") if commit: log("Commiting...") @@ -202,10 +240,13 @@ def generate_changelog(ctx, commit=False, draft=False): @invoke.task -def clean_mdchangelog(ctx, content=None): +def clean_mdchangelog(ctx, filename=None, content=None): changelog = None if not content: - changelog = _get_git_root(ctx) / "CHANGELOG.md" + if filename is not None: + changelog = pathlib.Path(filename) + else: + changelog = _get_git_root(ctx) / "CHANGELOG.md" content = changelog.read_text() content = re.sub( r"([^\n]+)\n?\s+\[[\\]+(#\d+)\]\(https://github\.com/pypa/[\w\-]+/issues/\d+\)", @@ -237,7 +278,7 @@ def add_one_day(dt): def date_offset(dt, month_offset=0, day_offset=0, truncate=False): new_month = (dt.month + month_offset) % 12 - year_offset = month_offset // 12 + year_offset = new_month // 12 replace_args = { "month": dt.month + month_offset, "year": dt.year + year_offset, @@ -285,10 +326,22 @@ def bump_version(ctx, dry_run=False, dev=False, pre=False, tag=None, commit=Fals if not tag: print('Using "pre" requires a corresponding tag.') return - if new_version.pre_tag and new_version.pre_tag != tag: - log("Swapping prerelease tag: {0} for {1}".format(new_version.pre_tag, tag)) - new_version = new_version.replace(pre_tag=tag, pre=0) - new_version = new_version.bump_pre(tag=tag) + tag_version = re.match( + r"(?Palpha|a|beta|b|c|preview|pre|rc)(?P[0-9]+)?", tag + ) + tag_dict = tag_version.groupdict() + tag = tag_dict.get("tag", tag) + tag_version = int(tag_dict["version"]) if tag_dict["version"] is not None else 0 + if new_version.dev is not None: + new_version = new_version.replace(dev=None) + if new_version.pre_tag: + if new_version.pre_tag != tag: + log("Swapping prerelease tag: {0} for {1}".format(new_version.pre_tag, tag)) + new_version = new_version.replace(pre_tag=tag, pre=tag_version) + else: + new_version = new_version.replace(pre_tag=tag, pre=tag_version) + if tag_version == 0: + new_version = new_version.bump_pre(tag=tag) else: new_version = new_version.replace(pre=None, dev=None) log("Updating version to %s" % new_version.normalize()) diff --git a/tasks/vendoring/patches/patched/crayons.patch b/tasks/vendoring/patches/patched/crayons.patch index d7fa3d40d5..2760ca81c1 100644 --- a/tasks/vendoring/patches/patched/crayons.patch +++ b/tasks/vendoring/patches/patched/crayons.patch @@ -8,8 +8,9 @@ index 455d3e90..de735daf 100644 -PY3 = sys.version_info[0] >= 3 - -+import shellingham - import colorama +-import colorama ++from pipenv.vendor import shellingham ++from pipenv.vendor import colorama +PY3 = sys.version_info[0] >= 3 + diff --git a/tasks/vendoring/patches/patched/pip20.patch b/tasks/vendoring/patches/patched/pip20.patch index 9fa79b0fda..c3dcd2a184 100644 --- a/tasks/vendoring/patches/patched/pip20.patch +++ b/tasks/vendoring/patches/patched/pip20.patch @@ -114,7 +114,7 @@ index 02a187c8..f917e645 100644 modifying_pip=modifying_pip ) diff --git a/pipenv/patched/pip/_internal/index/package_finder.py b/pipenv/patched/pip/_internal/index/package_finder.py -index a74d78db..11128f4d 100644 +index a74d78db..7c9dc1be 100644 --- a/pipenv/patched/pip/_internal/index/package_finder.py +++ b/pipenv/patched/pip/_internal/index/package_finder.py @@ -121,6 +121,7 @@ class LinkEvaluator(object): @@ -201,7 +201,7 @@ index a74d78db..11128f4d 100644 if self._prefer_binary: binary_preference = 1 - pri = -(wheel.support_index_min(valid_tags)) -+ tags = self.valid_tags if not ignore_compatibility else None ++ tags = valid_tags + try: + pri = -(wheel.support_index_min(tags=tags)) + except TypeError: @@ -589,3 +589,17 @@ index 65e41bc7..9eabf28e 100644 class AdjacentTempDirectory(TempDirectory): +diff --git a/pipenv/patched/pip/__main__.py b/pipenv/patched/pip/__main__.py +index 56f669fa..3c216189 100644 +--- a/pipenv/patched/pip/__main__.py ++++ b/pipenv/patched/pip/__main__.py +@@ -11,7 +11,9 @@ if __package__ == '': + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) ++ pipenv = os.path.dirname(os.path.dirname(path)) + sys.path.insert(0, path) ++ sys.path.insert(0, pipenv) + + from pip._internal.cli.main import main as _main # isort:skip # noqa + diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index ebd94723a5..1f0cca36d8 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -510,7 +510,7 @@ index ef5ba4e..b96acf6 100644 + map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) + if h is not None + } -+ return result ++ return result - # Iterate over the chosen context manager - with context_manager as bar: @@ -671,7 +671,7 @@ index 430b4bb..015ff7a 100644 from . import click from .exceptions import IncompatibleRequirements diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py -index 7733447..aa93ec8 100644 +index 7733447..e6f232f 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -1,14 +1,19 @@ @@ -745,7 +745,7 @@ index 7733447..aa93ec8 100644 +def clean_requires_python(candidates): + """Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes.""" + all_candidates = [] -+ py_version = parse_version(os.environ.get('PIP_PYTHON_VERSION', '.'.join(map(str, sys.version_info[:3])))) ++ py_version = parse_version(os.environ.get('PIPENV_REQUESTED_PYTHON_VERSION', '.'.join(map(str, sys.version_info[:3])))) + for c in candidates: + if getattr(c, "requires_python", None): + # Old specifications had people setting this to single digits @@ -796,13 +796,13 @@ index 7733447..aa93ec8 100644 + Formats a packaging.requirements.Requirement with a lowercase name. + + This is simply a copy of -+ https://github.com/pypa/pipenv/patched/packaging/blob/pipenv/patched/16.8/packaging/requirements.py#L109-L124 ++ https://github.com/pypa/pipenv/patched/pipenv/patched/packaging/blob/pipenv/patched/pipenv/patched/16.8/packaging/requirements.py#L109-L124 + modified to lowercase the dependency name. + + Previously, we were invoking the original Requirement.__str__ method and + lowercasing the entire result, which would lowercase the name, *and* other, + important stuff that should not be lowercased (such as the marker). See -+ this issue for more information: https://github.com/pypa/pipenv/patched/pipenv/issues/2113. ++ this issue for more information: https://github.com/pypa/pipenv/patched/pipenv/patched/pipenv/issues/2113. + """ + parts = [requirement.name.lower()] + @@ -822,7 +822,12 @@ index 7733447..aa93ec8 100644 def is_url_requirement(ireq): -@@ -80,10 +184,10 @@ def format_requirement(ireq, marker=None, hashes=None): +@@ -77,13 +181,15 @@ def format_requirement(ireq, marker=None, hashes=None): + """ + if ireq.editable: + line = "-e {}".format(ireq.link.url) ++ elif ireq.link and ireq.link.is_vcs: ++ line = str(ireq.req) elif is_url_requirement(ireq): line = ireq.link.url else: diff --git a/tasks/vendoring/patches/vendor/update-attrs-import-path.patch b/tasks/vendoring/patches/vendor/update-attrs-import-path.patch new file mode 100644 index 0000000000..cf2968971f --- /dev/null +++ b/tasks/vendoring/patches/vendor/update-attrs-import-path.patch @@ -0,0 +1,221 @@ +diff --git a/pipenv/vendor/passa/models/projects.py b/pipenv/vendor/passa/models/projects.py +index f6e037d6..c7807c05 100644 +--- a/pipenv/vendor/passa/models/projects.py ++++ b/pipenv/vendor/passa/models/projects.py +@@ -6,7 +6,7 @@ import collections + import io + import os + +-import attr ++from pipenv.vendor import attr + import packaging.markers + import packaging.utils + import plette +diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py +index 76327115..aeba0443 100644 +--- a/pipenv/vendor/pythonfinder/models/mixins.py ++++ b/pipenv/vendor/pythonfinder/models/mixins.py +@@ -5,7 +5,7 @@ import abc + import operator + from collections import defaultdict + +-import attr ++from pipenv.vendor import attr + import six + + from ..compat import fs_str +diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py +index b855a05d..a8070c91 100644 +--- a/pipenv/vendor/pythonfinder/models/path.py ++++ b/pipenv/vendor/pythonfinder/models/path.py +@@ -7,7 +7,7 @@ import sys + from collections import defaultdict + from itertools import chain + +-import attr ++from pipenv.vendor import attr + import six + from cached_property import cached_property + from ..compat import Path, fs_str +diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py +index 619e7761..ff249be2 100644 +--- a/pipenv/vendor/pythonfinder/models/python.py ++++ b/pipenv/vendor/pythonfinder/models/python.py +@@ -7,7 +7,7 @@ import platform + import sys + from collections import defaultdict + +-import attr ++from pipenv.vendor import attr + import six + from packaging.version import Version + +diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py +index a0e69b03..39353cdb 100644 +--- a/pipenv/vendor/pythonfinder/models/windows.py ++++ b/pipenv/vendor/pythonfinder/models/windows.py +@@ -4,7 +4,7 @@ from __future__ import absolute_import, print_function + import operator + from collections import defaultdict + +-import attr ++from pipenv.vendor import attr + + from ..environment import MYPY_RUNNING + from ..exceptions import InvalidPythonVersion +diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py +index 8150545c..ef48e628 100644 +--- a/pipenv/vendor/pythonfinder/utils.py ++++ b/pipenv/vendor/pythonfinder/utils.py +@@ -10,7 +10,7 @@ from collections import OrderedDict + from fnmatch import fnmatch + from threading import Timer + +-import attr ++from pipenv.vendor import attr + import six + from packaging.version import LegacyVersion, Version + +diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py +index 2608479a..1a610ce7 100644 +--- a/pipenv/vendor/requirementslib/models/dependencies.py ++++ b/pipenv/vendor/requirementslib/models/dependencies.py +@@ -6,7 +6,7 @@ import copy + import functools + import os + +-import attr ++from pipenv.vendor import attr + import packaging.markers + import packaging.version + import pip_shims.shims +diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py +index 3eabc504..841fc74c 100644 +--- a/pipenv/vendor/requirementslib/models/lockfile.py ++++ b/pipenv/vendor/requirementslib/models/lockfile.py +@@ -5,7 +5,7 @@ import copy + import itertools + import os + +-import attr ++from pipenv.vendor import attr + import plette.lockfiles + import six + from vistir.compat import FileNotFoundError, JSONDecodeError, Path +diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py +index 94410a20..b07e444c 100644 +--- a/pipenv/vendor/requirementslib/models/markers.py ++++ b/pipenv/vendor/requirementslib/models/markers.py +@@ -3,7 +3,7 @@ import itertools + import operator + import re + +-import attr ++from pipenv.vendor import attr + import distlib.markers + import packaging.version + import six +diff --git a/pipenv/vendor/requirementslib/models/metadata.py b/pipenv/vendor/requirementslib/models/metadata.py +index b45b1f02..671a311b 100644 +--- a/pipenv/vendor/requirementslib/models/metadata.py ++++ b/pipenv/vendor/requirementslib/models/metadata.py +@@ -9,7 +9,7 @@ import os + import zipfile + from collections import defaultdict + +-import attr ++from pipenv.vendor import attr + import dateutil.parser + import distlib.metadata + import distlib.wheel +diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py +index 9c0aea4e..9bda73d4 100644 +--- a/pipenv/vendor/requirementslib/models/pipfile.py ++++ b/pipenv/vendor/requirementslib/models/pipfile.py +@@ -7,7 +7,7 @@ import itertools + import os + import sys + +-import attr ++from pipenv.vendor import attr + import plette.models.base + import plette.pipfiles + import tomlkit +diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py +index 7c1b0e81..4c73823c 100644 +--- a/pipenv/vendor/requirementslib/models/project.py ++++ b/pipenv/vendor/requirementslib/models/project.py +@@ -6,7 +6,7 @@ import collections + import io + import os + +-import attr ++from pipenv.vendor import attr + import packaging.markers + import packaging.utils + import plette +diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py +index a0045f45..3ce8d8f5 100644 +--- a/pipenv/vendor/requirementslib/models/requirements.py ++++ b/pipenv/vendor/requirementslib/models/requirements.py +@@ -10,7 +10,7 @@ from contextlib import contextmanager + from distutils.sysconfig import get_python_lib + from functools import partial + +-import attr ++from pipenv.vendor import attr + import pip_shims + import six + import vistir +diff --git a/pipenv/vendor/requirementslib/models/resolvers.py b/pipenv/vendor/requirementslib/models/resolvers.py +index 43590523..4554b299 100644 +--- a/pipenv/vendor/requirementslib/models/resolvers.py ++++ b/pipenv/vendor/requirementslib/models/resolvers.py +@@ -1,7 +1,7 @@ + # -*- coding=utf-8 -*- + from contextlib import contextmanager + +-import attr ++from pipenv.vendor import attr + import six + from pip_shims.shims import Wheel + +diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py +index f0d40f29..9c97a394 100644 +--- a/pipenv/vendor/requirementslib/models/setup_info.py ++++ b/pipenv/vendor/requirementslib/models/setup_info.py +@@ -12,7 +12,7 @@ import shutil + import sys + from functools import partial + +-import attr ++from pipenv.vendor import attr + import chardet + import packaging.specifiers + import packaging.utils +diff --git a/pipenv/vendor/requirementslib/models/url.py b/pipenv/vendor/requirementslib/models/url.py +index 3d5743e6..b0c98de8 100644 +--- a/pipenv/vendor/requirementslib/models/url.py ++++ b/pipenv/vendor/requirementslib/models/url.py +@@ -1,7 +1,7 @@ + # -*- coding=utf-8 -*- + from __future__ import absolute_import, print_function + +-import attr ++from pipenv.vendor import attr + import pip_shims.shims + from orderedmultidict import omdict + from six.moves.urllib.parse import quote_plus, unquote_plus +diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py +index 0f96a331..273305db 100644 +--- a/pipenv/vendor/requirementslib/models/vcs.py ++++ b/pipenv/vendor/requirementslib/models/vcs.py +@@ -5,7 +5,7 @@ import importlib + import os + import sys + +-import attr ++from pipenv.vendor import attr + import pip_shims + import six + diff --git a/tasks/vendoring/patches/vendor/vistir-imports.patch b/tasks/vendoring/patches/vendor/vistir-imports.patch index 725e8a56d9..dc48bf3ab8 100644 --- a/tasks/vendoring/patches/vendor/vistir-imports.patch +++ b/tasks/vendoring/patches/vendor/vistir-imports.patch @@ -15,30 +15,32 @@ diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index b5904bc7..a44aafbe 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py -@@ -43,7 +43,7 @@ __all__ = [ +@@ -55,7 +55,7 @@ __all__ = [ if sys.version_info >= (3, 5): # pragma: no cover from pathlib import Path else: # pragma: no cover - from pathlib2 import Path + from pipenv.vendor.pathlib2 import Path - if six.PY3: # pragma: no cover + if sys.version_info >= (3, 4): # pragma: no cover # Only Python 3.4+ is supported -@@ -53,14 +53,14 @@ if six.PY3: # pragma: no cover - from weakref import finalize +@@ -85,8 +85,8 @@ if sys.version_info >= (3, 4): # pragma: no cover + else: # pragma: no cover # Only Python 2.7 is supported - from backports.functools_lru_cache import lru_cache -+ from pipenv.vendor.backports.functools_lru_cache import lru_cache - from .backports.functools import partialmethod # type: ignore - from backports.shutil_get_terminal_size import get_terminal_size ++ from pipenv.vendor.backports.functools_lru_cache import lru_cache + from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + from .backports.functools import partialmethod # type: ignore from .backports.surrogateescape import register_surrogateescape + from collections import ( +@@ -110,7 +110,7 @@ else: # pragma: no cover register_surrogateescape() NamedTemporaryFile = _NamedTemporaryFile - from backports.weakref import finalize # type: ignore + from pipenv.vendor.backports.weakref import finalize # type: ignore - try: - # Introduced Python 3.5 + try: + from os.path import samefile diff --git a/tests/fixtures/cython-import-package/pyproject.toml b/tests/fixtures/cython-import-package/pyproject.toml new file mode 100644 index 0000000000..661b63c5bf --- /dev/null +++ b/tests/fixtures/cython-import-package/pyproject.toml @@ -0,0 +1,52 @@ +[build-system] +requires = ["setuptools >= 40.6.0", "setuptools-scm", "cython"] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 90 +target_version = ['py27', 'py35', 'py36', 'py37', 'py38'] +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.pyre_configuration + | \.venv + | _build + | buck-out + | build + | dist +) +''' + +[tool.towncrier] +package = 'cython-import-package' +package_dir = 'src' +filename = 'CHANGELOG.rst' +directory = 'news/' +title_format = '{version} ({project_date})' +issue_format = '`#{issue} `_' +template = 'tasks/CHANGELOG.rst.jinja2' + + [[tool.towncrier.type]] + directory = 'feature' + name = 'Features' + showcontent = true + + [[tool.towncrier.type]] + directory = 'bugfix' + name = 'Bug Fixes' + showcontent = true + + [[tool.towncrier.type]] + directory = 'trivial' + name = 'Trivial Changes' + showcontent = false + + [[tool.towncrier.type]] + directory = 'removal' + name = 'Removals and Deprecations' + showcontent = true diff --git a/tests/fixtures/cython-import-package/setup.cfg b/tests/fixtures/cython-import-package/setup.cfg new file mode 100644 index 0000000000..a43ee22b9b --- /dev/null +++ b/tests/fixtures/cython-import-package/setup.cfg @@ -0,0 +1,58 @@ +[metadata] +name = cython_import_package +package_name = cython-import-package +description = A fake python package. +url = https://github.com/sarugaku/cython_import_package +author = Dan Ryan +author_email = dan@danryan.co +long_description = file: README.rst +license = ISC License +keywords = fake package test +classifier = + Development Status :: 1 - Planning + License :: OSI Approved :: ISC License (ISCL) + Operating System :: OS Independent + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.6 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.4 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Topic :: Software Development :: Libraries :: Python Modules + +[options.extras_require] +tests = + pytest + pytest-xdist + pytest-cov + pytest-timeout + readme-renderer[md] + twine +dev = + black;python_version>="3.6" + flake8 + flake8-bugbear;python_version>="3.5" + invoke + isort + mypy;python_version>="3.5" + parver + pre-commit + rope + wheel + +[options] +zip_safe = true +python_requires = >=2.6,!=3.0,!=3.1,!=3.2,!=3.3 +install_requires = + attrs + vistir + +[bdist_wheel] +universal = 1 + +[egg_info] +tag_build = +tag_date = 0 + diff --git a/tests/fixtures/cython-import-package/setup.py b/tests/fixtures/cython-import-package/setup.py new file mode 100644 index 0000000000..78eeeeda17 --- /dev/null +++ b/tests/fixtures/cython-import-package/setup.py @@ -0,0 +1,43 @@ +import ast +import os + +from setuptools import setup, find_packages +from setuptools.command.test import test as TestCommand + +# ORDER MATTERS +# Import this after setuptools or it will fail +from Cython.Build import cythonize # noqa: I100 +import Cython.Distutils + + + +ROOT = os.path.dirname(__file__) + +PACKAGE_NAME = 'cython_import_package' + +VERSION = None + +with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: + for line in f: + if line.startswith('__version__ = '): + VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + break +if VERSION is None: + raise EnvironmentError('failed to read version') + + +# Put everything in setup.cfg, except those that don't actually work? +setup( + # These really don't work. + package_dir={'': 'src'}, + packages=find_packages('src'), + + # I don't know how to specify an empty key in setup.cfg. + package_data={ + '': ['LICENSE*', 'README*'], + }, + setup_requires=["setuptools_scm", "cython"], + + # I need this to be dynamic. + version=VERSION, +) diff --git a/tests/fixtures/cython-import-package/src/cython_import_package/__init__.py b/tests/fixtures/cython-import-package/src/cython_import_package/__init__.py new file mode 100644 index 0000000000..f102a9cadf --- /dev/null +++ b/tests/fixtures/cython-import-package/src/cython_import_package/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/tests/fixtures/legacy-backend-package/pyproject.toml b/tests/fixtures/legacy-backend-package/pyproject.toml new file mode 100644 index 0000000000..e646fb3c25 --- /dev/null +++ b/tests/fixtures/legacy-backend-package/pyproject.toml @@ -0,0 +1,51 @@ +[build-system] +requires = ["setuptools>=30.3.0", "wheel", "setuptools_scm>=3.3.1"] + +[tool.black] +line-length = 90 +target_version = ['py27', 'py35', 'py36', 'py37', 'py38'] +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.pyre_configuration + | \.venv + | _build + | buck-out + | build + | dist +) +''' + +[tool.towncrier] +package = 'legacy-backend-package' +package_dir = 'src' +filename = 'CHANGELOG.rst' +directory = 'news/' +title_format = '{version} ({project_date})' +issue_format = '`#{issue} `_' +template = 'tasks/CHANGELOG.rst.jinja2' + + [[tool.towncrier.type]] + directory = 'feature' + name = 'Features' + showcontent = true + + [[tool.towncrier.type]] + directory = 'bugfix' + name = 'Bug Fixes' + showcontent = true + + [[tool.towncrier.type]] + directory = 'trivial' + name = 'Trivial Changes' + showcontent = false + + [[tool.towncrier.type]] + directory = 'removal' + name = 'Removals and Deprecations' + showcontent = true diff --git a/tests/fixtures/legacy-backend-package/setup.cfg b/tests/fixtures/legacy-backend-package/setup.cfg new file mode 100644 index 0000000000..1e5f1ed85d --- /dev/null +++ b/tests/fixtures/legacy-backend-package/setup.cfg @@ -0,0 +1,127 @@ +[metadata] +name = legacy_backend_package +package_name = legacy-backend-package +description = A fake python package. +url = https://github.com/sarugaku/legacy_backend_package +author = Dan Ryan +author_email = dan@danryan.co +long_description = file: README.rst +license = ISC License +keywords = fake package test +classifier = + Development Status :: 1 - Planning + License :: OSI Approved :: ISC License (ISCL) + Operating System :: OS Independent + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.6 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.4 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Topic :: Software Development :: Libraries :: Python Modules + +[options.extras_require] +tests = + pytest + pytest-xdist + pytest-cov + pytest-timeout + readme-renderer[md] + twine +dev = + black;python_version>="3.6" + flake8 + flake8-bugbear;python_version>="3.5" + invoke + isort + mypy;python_version>="3.5" + parver + pre-commit + rope + wheel + +[options] +zip_safe = true +python_requires = >=2.6,!=3.0,!=3.1,!=3.2,!=3.3 +setup_requires = + setuptools_scm>=3.3.1 +install_requires = + attrs + vistir + +[bdist_wheel] +universal = 1 + +[egg_info] +tag_build = +tag_date = 0 + + +[tool:pytest] +strict = true +plugins = cov flake8 +addopts = -ra +testpaths = tests/ +norecursedirs = .* build dist news tasks docs +flake8-ignore = + docs/source/* ALL + tests/*.py ALL + setup.py ALL +filterwarnings = + ignore::DeprecationWarning + ignore::PendingDeprecationWarning + +[isort] +atomic = true +not_skip = __init__.py +line_length = 90 +indent = ' ' +multi_line_output = 3 +known_third_party = invoke,parver,pytest,setuptools,towncrier +known_first_party = + legacy_backend_package + tests +combine_as_imports=True +include_trailing_comma = True +force_grid_wrap=0 + +[flake8] +max-line-length = 90 +select = C,E,F,W,B,B950 +ignore = + # The default ignore list: + D203,F401,E123,E203,W503,E501,E402 + #E121,E123,E126,E226,E24,E704, + # Our additions: + # E123: closing bracket does not match indentation of opening bracket’s line + # E203: whitespace before ‘:’ + # E129: visually indented line with same indent as next logical line + # E222: multiple spaces after operator + # E231: missing whitespace after ',' + # D203: 1 blank line required before class docstring + # E402: module level import not at top of file + # E501: line too long (using B950 from flake8-bugbear) + # F401: Module imported but unused + # W503: line break before binary operator (not a pep8 issue, should be ignored) +exclude = + .tox, + .git, + __pycache__, + docs/source/*, + build, + dist, + tests/*, + *.pyc, + *.egg-info, + .cache, + .eggs, + setup.py, +max-complexity=13 + +[mypy] +ignore_missing_imports=true +follow_imports=skip +html_report=mypyhtml +python_version=2.7 diff --git a/tests/fixtures/legacy-backend-package/setup.py b/tests/fixtures/legacy-backend-package/setup.py new file mode 100644 index 0000000000..e41a3e36a6 --- /dev/null +++ b/tests/fixtures/legacy-backend-package/setup.py @@ -0,0 +1,35 @@ +import ast +import os + +from setuptools import setup, find_packages +from setuptools.command.test import test as TestCommand + +ROOT = os.path.dirname(__file__) + +PACKAGE_NAME = 'legacy_backend_package' + +VERSION = None + +with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: + for line in f: + if line.startswith('__version__ = '): + VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + break +if VERSION is None: + raise EnvironmentError('failed to read version') + + +# Put everything in setup.cfg, except those that don't actually work? +setup( + # These really don't work. + package_dir={'': 'src'}, + packages=find_packages('src'), + + # I don't know how to specify an empty key in setup.cfg. + package_data={ + '': ['LICENSE*', 'README*'], + }, + + # I need this to be dynamic. + version=VERSION, +) diff --git a/tests/fixtures/legacy-backend-package/src/legacy_backend_package/__init__.py b/tests/fixtures/legacy-backend-package/src/legacy_backend_package/__init__.py new file mode 100644 index 0000000000..f102a9cadf --- /dev/null +++ b/tests/fixtures/legacy-backend-package/src/legacy_backend_package/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index ef91f5031f..4940fb57f4 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -133,6 +133,8 @@ def pytest_runtest_setup(item): sys.version_info[:2] == (3, 6) ): pytest.skip('test is skipped on python 3.6') + if item.get_closest_marker('skip_windows') is not None and (os.name == 'nt'): + pytest.skip('test does not run on windows') @pytest.fixture diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 9d3247fed8..6eacf5cf98 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -280,14 +280,13 @@ def test_bad_packages(PipenvInstance): @pytest.mark.extras @pytest.mark.install @pytest.mark.requirements -@pytest.mark.skip(reason="Not mocking this.") -def test_requirements_to_pipfile(PipenvInstance): +def test_requirements_to_pipfile(PipenvInstance, pypi): with PipenvInstance(pipfile=False, chdir=True) as p: # Write a requirements file with open("requirements.txt", "w") as f: - f.write("requests[socks]==2.18.1\n") + f.write("-i {}\nrequests[socks]==2.19.1\n".format(pypi.url)) c = p.pipenv("install") assert c.return_code == 0 @@ -433,7 +432,7 @@ def test_system_and_deploy_work(PipenvInstance): Path(p.pipfile_path).write_text( u""" [packages] -requests +requests = "*" """.strip() ) c = p.pipenv("install --system") diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py index 907264467b..2ee5b5d24d 100644 --- a/tests/integration/test_install_twists.py +++ b/tests/integration/test_install_twists.py @@ -97,8 +97,6 @@ def helper_dependency_links_install_test(pipenv_instance, deplink): c = pipenv_instance.pipenv("install -v -e .") assert c.return_code == 0 assert "test-private-dependency" in pipenv_instance.lockfile["default"] - assert "version" in pipenv_instance.lockfile["default"]["test-private-dependency"] - assert "0.1" in pipenv_instance.lockfile["default"]["test-private-dependency"]["version"] def test_https_dependency_links_install(self, PipenvInstance): """Ensure dependency_links are parsed and installed (needed for private repo dependencies). diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 8772df54ff..2300ec8240 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -13,7 +13,7 @@ @pytest.mark.vcs @pytest.mark.install @pytest.mark.needs_internet -def test_basic_vcs_install(PipenvInstance): # ! This is failing +def test_basic_vcs_install(PipenvInstance): with PipenvInstance(chdir=True) as p: c = p.pipenv("install git+https://github.com/benjaminp/six.git@1.11.0#egg=six") assert c.return_code == 0 @@ -25,7 +25,6 @@ def test_basic_vcs_install(PipenvInstance): # ! This is failing assert p.lockfile["default"]["six"] == { "git": "https://github.com/benjaminp/six.git", "ref": "15e31431af97e5e64b80af0a3f598d382bcdd49a", - "version": "==1.11.0" } assert "gitdb2" in p.lockfile["default"] @@ -43,7 +42,6 @@ def test_git_vcs_install(PipenvInstance): assert p.lockfile["default"]["six"] == { "git": "git://github.com/benjaminp/six.git", "ref": "15e31431af97e5e64b80af0a3f598d382bcdd49a", - "version": "==1.11.0" } diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 4f202beaab..672fbedf3e 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -2,6 +2,7 @@ import json import os +import shutil import sys import pytest @@ -10,6 +11,7 @@ from vistir.compat import Path from vistir.misc import to_text from pipenv.utils import temp_environ +import delegator @pytest.mark.lock @@ -384,6 +386,57 @@ def test_private_index_mirror_lock_requirements(PipenvInstance_NoPyPI): assert '--extra-index-url {}'.format(mirror_url) not in c.out.strip() +@pytest.mark.lock +@pytest.mark.install +@pytest.mark.skip_windows +@pytest.mark.needs_internet +def test_outdated_setuptools_with_pep517_legacy_build_meta_is_updated(PipenvInstance): + """ + This test ensures we are using build isolation and a pep517 backend + because the package in question includes ``pyproject.toml`` but lacks + a ``build-backend`` declaration. In this case, ``pip`` defaults to using + ``setuptools.build_meta:__legacy__`` as a builder, but without ``pep517`` + enabled and with ``setuptools==40.2.0`` installed, this build backend was + not yet available. ``setuptools<40.8`` will not be aware of this backend. + + If pip is able to build in isolation with a pep517 backend, this will not + matter and the test will still pass as pip will by default install a more + recent version of ``setuptools``. + """ + with PipenvInstance(chdir=True) as p: + c = p.pipenv('run pip install "setuptools<=40.2"') + assert c.return_code == 0 + c = p.pipenv("run python -c 'import setuptools; print(setuptools.__version__)'") + assert c.return_code == 0 + assert c.out.strip() == "40.2.0" + c = p.pipenv("install legacy-backend-package") + assert c.return_code == 0 + assert "vistir" in p.lockfile["default"] + + +@pytest.mark.lock +@pytest.mark.install +@pytest.mark.skip_windows +@pytest.mark.needs_internet +def test_outdated_setuptools_with_pep517_cython_import_in_setuppy(PipenvInstance): + """ + This test ensures we are using build isolation and a pep517 backend + because the package in question declares 'cython' as a build dependency + in ``pyproject.toml``, then imports it in ``setup.py``. The pep517 + backend will have to install it first, so this will only pass if the + resolver is buliding with a proper backend. + """ + with PipenvInstance(chdir=True) as p: + c = p.pipenv('run pip install "setuptools<=40.2"') + assert c.return_code == 0 + c = p.pipenv("run python -c 'import setuptools; print(setuptools.__version__)'") + assert c.return_code == 0 + assert c.out.strip() == "40.2.0" + c = p.pipenv("install cython-import-package") + assert c.return_code == 0 + assert "vistir" in p.lockfile["default"] + + @pytest.mark.index @pytest.mark.install def test_lock_updated_source(PipenvInstance): @@ -494,6 +547,9 @@ def test_lock_editable_vcs_with_extras_without_install(PipenvInstance): assert "socks" in p.lockfile["default"]["requests"]["extras"] c = p.pipenv('install') assert c.return_code == 0 + assert "requests" in p.lockfile["default"] + # For backward compatibility we want to make sure not to include the 'version' key + assert "version" not in p.lockfile["default"]["requests"] @pytest.mark.vcs @@ -658,4 +714,37 @@ def test_lock_after_update_source_name(PipenvInstance): c = p.pipenv("lock --clear") assert c.return_code == 0 assert "index" in p.lockfile["default"]["six"] - assert p.lockfile["default"]["six"]["index"] == "custom", Path(p.lockfile_path).read_text() # p.lockfile["default"]["six"] + assert p.lockfile["default"]["six"]["index"] == "custom", Path(p.lockfile_path).read_text() + + +@pytest.mark.lock +def test_lock_nested_direct_url(PipenvInstance): + """ + The dependency 'test_package' has a declared dependency on + a PEP508 style VCS URL. This ensures that we capture the dependency + here along with its own dependencies. + """ + with PipenvInstance(chdir=True) as p: + c = p.pipenv("install test_package") + assert c.return_code == 0 + assert "vistir" in p.lockfile["default"] + assert "colorama" in p.lockfile["default"] + assert "six" in p.lockfile["default"] + + +@pytest.mark.lock +@pytest.mark.needs_internet +def test_lock_nested_vcs_direct_url(PipenvInstance): + with PipenvInstance(chdir=True) as p: + p._pipfile.add("pep508_package", { + "git": "https://github.com/techalchemy/test-project.git", + "editable": True, "ref": "master", + "subdirectory": "parent_folder/pep508-package" + }) + c = p.pipenv("install") + assert c.return_code == 0 + assert "git" in p.lockfile["default"]["pep508-package"] + assert "sibling-package" in p.lockfile["default"] + assert "git" in p.lockfile["default"]["sibling-package"] + assert "subdirectory" in p.lockfile["default"]["sibling-package"] + assert "version" not in p.lockfile["default"]["sibling-package"] diff --git a/tests/pypi b/tests/pypi index 6faddf97c2..1881ecb454 160000 --- a/tests/pypi +++ b/tests/pypi @@ -1 +1 @@ -Subproject commit 6faddf97c2a0220870da0a1409a196667b06c9cc +Subproject commit 1881ecb45431952d2e18e2be3416a8835e53778a diff --git a/tests/unit/test_environments.py b/tests/unit/test_environments.py new file mode 100644 index 0000000000..6baf84c810 --- /dev/null +++ b/tests/unit/test_environments.py @@ -0,0 +1,68 @@ +import itertools +import pytest +import os +from pipenv import environments +from pipenv.utils import temp_environ + + +@pytest.mark.environments +@pytest.mark.parametrize( + "arg, prefix, use_negation", + list(itertools.product(("ENABLE_SOMETHING",), ("FAKEPREFIX", None), (True, False))), +) +def test_get_from_env(arg, prefix, use_negation): + negated_arg = "NO_{0}".format(arg) + positive_var = arg + negative_var = negated_arg + if prefix: + negative_var = "{0}_{1}".format(prefix, negative_var) + positive_var = "{0}_{1}".format(prefix, positive_var) + # set the positive first + for var_to_set, opposite_var in ((arg, negated_arg), (negated_arg, arg)): + os.environ.pop(var_to_set, None) + os.environ.pop(opposite_var, None) + with temp_environ(): + is_positive = var_to_set == arg + is_negative = not is_positive + envvar = positive_var if is_positive else negative_var + os.environ[envvar] = "true" + main_expected_value = True if is_positive else None + if use_negation and not is_positive: + main_expected_value = False + # use negation means if the normal variable isnt set we will check + # for the negated version + negative_expected_value = ( + True if is_negative else None + ) + if is_positive: + assert ( + environments.get_from_env( + var_to_set, prefix, check_for_negation=use_negation + ) + is main_expected_value + ) + assert ( + environments.get_from_env( + opposite_var, prefix, check_for_negation=use_negation + ) + is negative_expected_value + ) + else: + # var_to_set = negative version i.e. NO_xxxx + # opposite_var = positive_version i.e. XXXX + + # get NO_BLAH -- expecting this to be True + assert ( + environments.get_from_env( + var_to_set, prefix, check_for_negation=use_negation + ) + is negative_expected_value + ) + # get BLAH -- expecting False if checking for negation + # but otherwise should be None + assert ( + environments.get_from_env( + opposite_var, prefix, check_for_negation=use_negation + ) + is main_expected_value + ) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 4b9cd75ce6..24df26a64a 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -78,6 +78,7 @@ def mock_unpack(link, source_dir, download_dir, only_download=False, session=Non @pytest.mark.utils @pytest.mark.parametrize("deps, expected", DEP_PIP_PAIRS) +@pytest.mark.needs_internet def test_convert_deps_to_pip(monkeypatch, deps, expected): with monkeypatch.context() as m: import pip_shims @@ -223,6 +224,7 @@ def test_is_valid_url(self): assert pipenv.utils.is_valid_url(not_url) is False @pytest.mark.utils + @pytest.mark.needs_internet def test_download_file(self): url = "https://github.com/pypa/pipenv/blob/master/README.md" output = "test_download.md"