diff --git a/.azure-pipelines/jobs/test-windows.yml b/.azure-pipelines/jobs/test-windows.yml index 2d1166f4e37..6053b0eb005 100644 --- a/.azure-pipelines/jobs/test-windows.yml +++ b/.azure-pipelines/jobs/test-windows.yml @@ -12,19 +12,13 @@ jobs: "2.7-x86": python.version: '2.7' python.architecture: x86 - "2.7": + "2.7": # because Python 2! python.version: '2.7' python.architecture: x64 - "3.5": + "3.5": # lowest Py3 version python.version: '3.5' python.architecture: x64 - "3.6": - python.version: '3.6' - python.architecture: x64 - "3.7": - python.version: '3.7' - python.architecture: x64 - "3.8": + "3.8": # current python.version: '3.8' python.architecture: x64 maxParallel: 6 @@ -44,6 +38,12 @@ jobs: vmImage: ${{ parameters.vmImage }} strategy: matrix: + "3.6": + python.version: '3.6' + python.architecture: x64 + "3.7": + python.version: '3.7' + python.architecture: x64 # This is for Windows, so test x86 builds "3.5-x86": python.version: '3.5' diff --git a/.azure-pipelines/jobs/test.yml b/.azure-pipelines/jobs/test.yml index f7dac5143f7..274e075a69b 100644 --- a/.azure-pipelines/jobs/test.yml +++ b/.azure-pipelines/jobs/test.yml @@ -12,8 +12,8 @@ jobs: "2.7": python.version: '2.7' python.architecture: x64 - "3.6": - python.version: '3.6' + "3.8": + python.version: '3.8' python.architecture: x64 maxParallel: 2 @@ -32,12 +32,12 @@ jobs: "3.5": python.version: '3.5' python.architecture: x64 + "3.6": + python.version: '3.6' + python.architecture: x64 "3.7": python.version: '3.7' python.architecture: x64 - "3.8": - python.version: '3.8' - python.architecture: x64 maxParallel: 4 steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 89430962cab..0a7847c132c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: exclude: .patch - repo: https://gitlab.com/pycqa/flake8 - rev: 3.7.9 + rev: 3.8.1 hooks: - id: flake8 exclude: tests/data @@ -29,7 +29,7 @@ repos: files: \.py$ - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.760 + rev: v0.770 hooks: - id: mypy exclude: docs|tests diff --git a/.travis.yml b/.travis.yml index 02c71a243fb..8d84f958161 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,13 +19,6 @@ jobs: env: TOXENV=docs - env: TOXENV=lint - env: TOXENV=vendoring - # Latest CPython - - env: GROUP=1 - python: 2.7 - - env: GROUP=2 - python: 2.7 - - env: GROUP=1 - - env: GROUP=2 # Complete checking for ensuring compatibility # PyPy @@ -38,19 +31,6 @@ jobs: python: pypy2.7-7.1.1 - env: GROUP=2 python: pypy2.7-7.1.1 - # Other Supported CPython - - env: GROUP=1 - python: 3.7 - - env: GROUP=2 - python: 3.7 - - env: GROUP=1 - python: 3.6 - - env: GROUP=2 - python: 3.6 - - env: GROUP=1 - python: 3.5 - - env: GROUP=2 - python: 3.5 # Test experimental stuff that are not part of the standard pip usage. # Helpful for developers working on them to see how they're doing. @@ -61,6 +41,9 @@ jobs: - env: - GROUP=2 - NEW_RESOLVER=1 + - env: + - GROUP=3 + - NEW_RESOLVER=1 fast_finish: true allow_failures: diff --git a/AUTHORS.txt b/AUTHORS.txt index 04c42fc2156..dff91f93092 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -380,6 +380,7 @@ Nick Coghlan Nick Stenning Nick Timkovich Nicolas Bock +Nicole Harris Nikhil Benesch Nikolay Korolev Nitesh Sharma @@ -419,6 +420,7 @@ Peter Lisák Peter Waller petr-tik Phaneendra Chiruvella +Phil Elson Phil Freo Phil Pennock Phil Whelan diff --git a/NEWS.rst b/NEWS.rst index 78d8d52252b..46d322a4216 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,29 @@ +20.2b1 (2020-05-21) +=================== + +Deprecations and Removals +------------------------- + +- Drop parallelization from ``pip list --outdated``. (`#8167 `_) + +Bug Fixes +--------- + +- Correctly treat wheels contenting non-ASCII file contents so they can be + installed on Windows. (`#5712 `_) +- Revert building of local directories in place, restoring the pre-20.1 + behaviour of copying to a temporary directory. (`#7555 `_) +- Prompt the user for password if the keyring backend doesn't return one (`#7998 `_) +- Fix metadata permission issues when umask has the executable bit set. (`#8164 `_) +- Avoid unnecessary message about the wheel package not being installed + when a wheel would not have been built. Additionally, clarify the message. (`#8178 `_) + +Improved Documentation +---------------------- + +- Add GitHub issue template for reporting when the dependency resolver fails (`#8207 `_) + + .. NOTE: You should *NOT* be adding new change log entries to this file, this file is managed by towncrier. You *may* edit previous change logs to fix problems like typo corrections or such. @@ -7,6 +33,24 @@ .. towncrier release notes start +20.1.1 (2020-05-19) +=================== + +Deprecations and Removals +------------------------- + +- Revert building of local directories in place, restoring the pre-20.1 + behaviour of copying to a temporary directory. (`#7555 `_) +- Drop parallelization from ``pip list --outdated``. (`#8167 `_) + +Bug Fixes +--------- + +- Fix metadata permission issues when umask has the executable bit set. (`#8164 `_) +- Avoid unnecessary message about the wheel package not being installed + when a wheel would not have been built. Additionally, clarify the message. (`#8178 `_) + + 20.1 (2020-04-28) ================= diff --git a/docs/html/development/architecture/anatomy.rst b/docs/html/development/architecture/anatomy.rst index 769d2b4796f..4b117bafe42 100644 --- a/docs/html/development/architecture/anatomy.rst +++ b/docs/html/development/architecture/anatomy.rst @@ -25,7 +25,6 @@ The ``README``, license, ``pyproject.toml``, ``setup.py``, and so on are in the * ``setup.cfg`` * ``setup.py`` * ``tox.ini`` -- ``pip`` uses Tox, an automation tool, configured by this `tox.ini`_ file. ``tox.ini`` describes a few environments ``pip`` uses during development for simplifying how tests are run (complicated situation there). Example: ``tox -e -py36``. We can run tests for different versions of Python by changing “36” to “27” or similar. -* ``.appveyor.yml`` * ``.coveragerc`` * ``.gitattributes`` * ``.gitignore`` diff --git a/docs/html/development/ci.rst b/docs/html/development/ci.rst index 8b6412307a9..7f214a3b84c 100644 --- a/docs/html/development/ci.rst +++ b/docs/html/development/ci.rst @@ -66,9 +66,9 @@ Services pip test suite and checks are distributed on three different platforms that provides free executors for open source packages: - - `Travis CI`_ (Used for Linux) - - `Azure DevOps CI`_ (Linux, MacOS & Windows tests) - - `GitHub Actions`_ (Linux, MacOS & Windows tests) + - `GitHub Actions`_ (Used for code quality and development tasks) + - `Azure DevOps CI`_ (Used for tests) + - `Travis CI`_ (Used for PyPy tests) .. _`Travis CI`: https://travis-ci.org/ .. _`Azure DevOps CI`: https://azure.microsoft.com/en-us/services/devops/ @@ -81,13 +81,13 @@ Current run tests Developer tasks --------------- -======== =============== ================ ================== ============ - OS docs lint vendoring packages -======== =============== ================ ================== ============ -Linux Travis, Github Travis, Github Travis, Github Azure -Windows Azure -MacOS Azure -======== =============== ================ ================== ============ +======== =============== ================ ================== ============= + OS docs lint vendoring packaging +======== =============== ================ ================== ============= +Linux Travis, Github Travis, Github Travis, Github Azure +Windows Github Github Github Azure +MacOS Github Github Github Azure +======== =============== ================ ================== ============= Actual testing -------------- @@ -113,9 +113,9 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.5 | Azure | Azure | | | +-------+---------------+-----------------+ -| | | CP3.6 | Azure | Azure | +| | | CP3.6 | Azure | | | | +-------+---------------+-----------------+ -| | x64 | CP3.7 | Azure | Azure | +| | x64 | CP3.7 | Azure | | | | +-------+---------------+-----------------+ | | | CP3.8 | Azure | Azure | | | +-------+---------------+-----------------+ @@ -137,15 +137,15 @@ Actual testing | | +-------+---------------+-----------------+ | | | PyPy3 | | | | Linux +----------+-------+---------------+-----------------+ -| | | CP2.7 | Travis,Azure | Travis,Azure | +| | | CP2.7 | Azure | Azure | | | +-------+---------------+-----------------+ -| | | CP3.5 | Travis,Azure | Travis,Azure | +| | | CP3.5 | Azure | Azure | | | +-------+---------------+-----------------+ -| | | CP3.6 | Travis,Azure | Travis,Azure | +| | | CP3.6 | Azure | Azure | | | +-------+---------------+-----------------+ -| | x64 | CP3.7 | Travis,Azure | Travis,Azure | +| | x64 | CP3.7 | Azure | Azure | | | +-------+---------------+-----------------+ -| | | CP3.8 | Travis | Travis | +| | | CP3.8 | Azure | Azure | | | +-------+---------------+-----------------+ | | | PyPy | Travis | Travis | | | +-------+---------------+-----------------+ @@ -173,7 +173,7 @@ Actual testing | | +-------+---------------+-----------------+ | | x64 | CP3.7 | Azure | Azure | | | +-------+---------------+-----------------+ -| | | CP3.8 | | | +| | | CP3.8 | Azure | Azure | | | +-------+---------------+-----------------+ | | | PyPy | | | | | +-------+---------------+-----------------+ diff --git a/docs/html/development/contributing.rst b/docs/html/development/contributing.rst index 100d894ad56..15690dae456 100644 --- a/docs/html/development/contributing.rst +++ b/docs/html/development/contributing.rst @@ -39,8 +39,11 @@ separately, as a "formatting cleanup" PR, if needed. Automated Testing ================= -All pull requests and merges to 'master' branch are tested using `Travis CI`_ -and `Appveyor CI`_ based on our `.travis.yml`_ and `.appveyor.yml`_ files. +All pull requests and merges to 'master' branch are tested using `Travis CI`_, +`Azure Pipelines`_ and `GitHub Actions`_ based on our `.travis.yml`_, +`.azure-pipelines`_ and `.github/workflows`_ files. More details about pip's +Continuous Integration can be found in the `CI Documentation`_ + You can find the status and results to the CI runs for your PR on GitHub's web UI for the pull request. You can also find links to the CI services' pages for @@ -260,8 +263,11 @@ will initiate a vote among the existing maintainers. .. _`Studies have shown`: https://www.kessler.de/prd/smartbear/BestPracticesForPeerCodeReview.pdf .. _`resolve merge conflicts`: https://help.github.com/articles/resolving-a-merge-conflict-using-the-command-line .. _`Travis CI`: https://travis-ci.org/ -.. _`Appveyor CI`: https://www.appveyor.com/ +.. _`Azure Pipelines`: https://azure.microsoft.com/en-in/services/devops/pipelines/ +.. _`GitHub Actions`: https://github.com/features/actions .. _`.travis.yml`: https://github.com/pypa/pip/blob/master/.travis.yml -.. _`.appveyor.yml`: https://github.com/pypa/pip/blob/master/.appveyor.yml +.. _`.azure-pipelines`: https://github.com/pypa/pip/blob/master/.azure-pipelines +.. _`.github/workflows`: https://github.com/pypa/pip/blob/master/.github/workflows +.. _`CI Documentation`: https://pip.pypa.io/en/latest/development/ci/ .. _`towncrier`: https://pypi.org/project/towncrier/ .. _`Testing the next-gen pip dependency resolver`: https://pradyunsg.me/blog/2020/03/27/pip-resolver-testing/ diff --git a/news/86222709-663e-40a1-af2e-f20afab42122.trivial b/news/3C29002F-4AB2-4093-B321-994F7882F944.trivial similarity index 100% rename from news/86222709-663e-40a1-af2e-f20afab42122.trivial rename to news/3C29002F-4AB2-4093-B321-994F7882F944.trivial diff --git a/news/9CD0A87D-0ACD-418E-8C02-4560A99FEB71.trivial b/news/579B649E-EE91-4EA2-9860-4D13F792959F.trivial similarity index 100% rename from news/9CD0A87D-0ACD-418E-8C02-4560A99FEB71.trivial rename to news/579B649E-EE91-4EA2-9860-4D13F792959F.trivial diff --git a/news/BF3EC962-957A-4DB8-A849-2E7179F875A9.trivial b/news/598F8551-DB46-4A12-987E-094EF18DAF7C.trivial similarity index 100% rename from news/BF3EC962-957A-4DB8-A849-2E7179F875A9.trivial rename to news/598F8551-DB46-4A12-987E-094EF18DAF7C.trivial diff --git a/news/6754.feature b/news/6754.feature new file mode 100644 index 00000000000..561643dbd28 --- /dev/null +++ b/news/6754.feature @@ -0,0 +1 @@ +Warn if index pages have unexpected content-type diff --git a/news/C7A26013-0E79-4DBB-B0E3-2DA5C5587CDC.trivial b/news/749E6F3D-CAEB-4AEA-A53F-E623365ACB82.trivial similarity index 100% rename from news/C7A26013-0E79-4DBB-B0E3-2DA5C5587CDC.trivial rename to news/749E6F3D-CAEB-4AEA-A53F-E623365ACB82.trivial diff --git a/news/7555.bugfix b/news/7555.bugfix deleted file mode 100644 index f762236e235..00000000000 --- a/news/7555.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Revert building of local directories in place, restoring the pre-20.1 -behaviour of copying to a temporary directory. diff --git a/news/7625.bugfix b/news/7625.bugfix new file mode 100644 index 00000000000..3a675f8d2b0 --- /dev/null +++ b/news/7625.bugfix @@ -0,0 +1 @@ +Fix normalizing path on Windows when installing package on another logical disk. diff --git a/news/7693.feature b/news/7693.feature new file mode 100644 index 00000000000..4e458559110 --- /dev/null +++ b/news/7693.feature @@ -0,0 +1 @@ +Allow specifying ``--prefer-binary`` option in a requirements file diff --git a/news/770AC380-E84F-44C7-A20C-CD31A829EDA5.trivial b/news/770AC380-E84F-44C7-A20C-CD31A829EDA5.trivial deleted file mode 100644 index 5e5d3c4f01d..00000000000 --- a/news/770AC380-E84F-44C7-A20C-CD31A829EDA5.trivial +++ /dev/null @@ -1 +0,0 @@ -Remove "type: ignore" comments from cli subpackage diff --git a/news/7968.bugfix b/news/7968.bugfix new file mode 100644 index 00000000000..36b282fc821 --- /dev/null +++ b/news/7968.bugfix @@ -0,0 +1 @@ +The VCS commands run by pip as subprocesses don't merge stdout and stderr anymore, improving the output parsing by subsequent commands. diff --git a/news/8072.doc b/news/8072.doc new file mode 100644 index 00000000000..71eb46f292d --- /dev/null +++ b/news/8072.doc @@ -0,0 +1 @@ +Fix pip config docstring so that the subcommands render correctly in the docs diff --git a/news/8164.bugfix b/news/8164.bugfix deleted file mode 100644 index 1707d28401a..00000000000 --- a/news/8164.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix metadata permission issues when umask has the executable bit set. diff --git a/news/8167.removal b/news/8167.removal deleted file mode 100644 index d719377eb26..00000000000 --- a/news/8167.removal +++ /dev/null @@ -1 +0,0 @@ -Drop parallelization from ``pip list --outdated``. diff --git a/news/8178.bugfix b/news/8178.bugfix deleted file mode 100644 index 6960053eda2..00000000000 --- a/news/8178.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Avoid unnecessary message about the wheel package not being installed -when a wheel would not have been built. Additionally, clarify the message. diff --git a/news/8207.doc b/news/8207.doc deleted file mode 100644 index a9cf944c62a..00000000000 --- a/news/8207.doc +++ /dev/null @@ -1 +0,0 @@ -Add GitHub issue template for reporting when the dependency resolver fails diff --git a/news/ac1c4196-d21d-4e39-9d39-118e39c837ab.trivial b/news/93898036-99ac-4e02-88c7-429280fe3e27.trivial similarity index 100% rename from news/ac1c4196-d21d-4e39-9d39-118e39c837ab.trivial rename to news/93898036-99ac-4e02-88c7-429280fe3e27.trivial diff --git a/news/e20153d5-ae85-4b80-80f7-1c46e7b566dc.trivial b/news/EBB1CF12-70ED-405F-90C0-BEA7CF25DCE4.trivial similarity index 100% rename from news/e20153d5-ae85-4b80-80f7-1c46e7b566dc.trivial rename to news/EBB1CF12-70ED-405F-90C0-BEA7CF25DCE4.trivial diff --git a/news/FCD7E4ED-BA3E-4018-B43E-D445DA8E542B.trivial b/news/FCD7E4ED-BA3E-4018-B43E-D445DA8E542B.trivial new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/FDE77CF6-D22C-45A1-840F-AA913FF90F93.trivial b/news/FDE77CF6-D22C-45A1-840F-AA913FF90F93.trivial new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/d9f9c55b-f959-456f-a849-ee976ef227de.trivial b/news/d9f9c55b-f959-456f-a849-ee976ef227de.trivial new file mode 100644 index 00000000000..ece7751fc27 --- /dev/null +++ b/news/d9f9c55b-f959-456f-a849-ee976ef227de.trivial @@ -0,0 +1,2 @@ +Refactor the commands by removing the ``__init__`` method and defining and explicit +``add_options`` method for adding command options. diff --git a/noxfile.py b/noxfile.py index f2a959ca601..1746bb69915 100644 --- a/noxfile.py +++ b/noxfile.py @@ -274,11 +274,12 @@ def upload_release(session): f"Remove dist/ and run 'nox -s build-release -- {version}'" ) # Sanity check: Make sure the files are correctly named. + distfile_names = map(os.path.basename, distribution_files) expected_distribution_files = [ - f"dist/pip-{version}-py2.py3-none-any.whl", - f"dist/pip-{version}.tar.gz", + f"pip-{version}-py2.py3-none-any.whl", + f"pip-{version}.tar.gz", ] - if sorted(distribution_files) != sorted(expected_distribution_files): + if sorted(distfile_names) != sorted(expected_distribution_files): session.error( f"Distribution files do not seem to be for {version} release." ) diff --git a/setup.cfg b/setup.cfg index 617e8b5673f..f0bd7a8d9bd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,6 +38,19 @@ ignore_errors = True [tool:pytest] addopts = --ignore src/pip/_vendor --ignore tests/tests_cache -r aR +markers = + network: tests that need network + incompatible_with_test_venv + incompatible_with_venv + no_auto_tempdir_manager + unit: unit tests + integration: integration tests + bzr: VCS: Bazaar + svn: VCS: Subversion + mercurial: VCS: Mercurial + git: VCS: git + yaml: yaml based tests + fails_on_new_resolver: Does not yet work on the new resolver [bdist_wheel] universal = 1 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index dc41d31b63e..90ce10888ef 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -4,7 +4,7 @@ from typing import List, Optional -__version__ = "20.2.dev0" +__version__ = "20.2.dev1" def main(args=None): diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index b8f005f5ca9..089a523b725 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -193,6 +193,8 @@ def install_requirements( args.extend(['--trusted-host', host]) if finder.allow_all_prereleases: args.append('--pre') + if finder.prefer_binary: + args.append('--prefer-binary') args.append('--') args.extend(requirements) with open_spinner(message) as spinner: diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 1fa5ba0bd4a..c52ffa2f267 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -28,6 +28,7 @@ CommandError, InstallationError, PreviousBuildDirError, + SubProcessError, UninstallationError, ) from pip._internal.utils.deprecation import deprecated @@ -88,6 +89,12 @@ def __init__(self, name, summary, isolated=False): ) self.parser.add_option_group(gen_opts) + self.add_options() + + def add_options(self): + # type: () -> None + pass + def handle_pip_version_check(self, options): # type: (Values) -> None """ @@ -195,7 +202,8 @@ def _main(self, args): logger.debug('Exception information:', exc_info=True) return PREVIOUS_BUILD_DIR_ERROR - except (InstallationError, UninstallationError, BadCommand) as exc: + except (InstallationError, UninstallationError, BadCommand, + SubProcessError) as exc: logger.critical(str(exc)) logger.debug('Exception information:', exc_info=True) diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index ca6d4379be3..209614ff6d4 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -24,13 +24,13 @@ class CacheCommand(Command): Subcommands: - dir: Show the cache directory. - info: Show information about the cache. - list: List filenames of packages stored in the cache. - remove: Remove one or more package from the cache. - purge: Remove all items from the cache. + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. - can be a glob expression or a package name. + ```` can be a glob expression or a package name. """ ignore_require_venv = True diff --git a/src/pip/_internal/commands/completion.py b/src/pip/_internal/commands/completion.py index 70d33243fcd..9b99f51f006 100644 --- a/src/pip/_internal/commands/completion.py +++ b/src/pip/_internal/commands/completion.py @@ -9,7 +9,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, List + from typing import List from optparse import Values BASE_COMPLETION = """ @@ -56,32 +56,28 @@ class CompletionCommand(Command): ignore_require_venv = True - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(CompletionCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( '--bash', '-b', action='store_const', const='bash', dest='shell', help='Emit completion code for bash') - cmd_opts.add_option( + self.cmd_opts.add_option( '--zsh', '-z', action='store_const', const='zsh', dest='shell', help='Emit completion code for zsh') - cmd_opts.add_option( + self.cmd_opts.add_option( '--fish', '-f', action='store_const', const='fish', dest='shell', help='Emit completion code for fish') - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): # type: (Values, List[str]) -> int diff --git a/src/pip/_internal/commands/configuration.py b/src/pip/_internal/commands/configuration.py index b801be6a03c..a8d3aaddb74 100644 --- a/src/pip/_internal/commands/configuration.py +++ b/src/pip/_internal/commands/configuration.py @@ -19,15 +19,16 @@ class ConfigurationCommand(Command): - """Manage local and global configuration. + """ + Manage local and global configuration. Subcommands: - list: List the active configuration (or from the file specified) - edit: Edit the configuration file in an editor - get: Get the value associated with name - set: Set the name=value - unset: Unset the value associated with name + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with name + - set: Set the name=value + - unset: Unset the value associated with name If none of --user, --global and --site are passed, a virtual environment configuration file is used if one is active and the file @@ -45,11 +46,14 @@ class ConfigurationCommand(Command): %prog [] unset name """ - def __init__(self, *args, **kwargs): - super(ConfigurationCommand, self).__init__(*args, **kwargs) + def __init__(self, name, summary, isolated=False): + super(ConfigurationCommand, self).__init__( + name, summary, isolated=isolated + ) self.configuration = None + def add_options(self): self.cmd_opts.add_option( '--editor', dest='editor', diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 8e243011f97..d8e2484c1b4 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -20,7 +20,7 @@ if MYPY_CHECK_RUNNING: from types import ModuleType - from typing import Any, List, Optional, Dict + from typing import List, Optional, Dict from optparse import Values logger = logging.getLogger(__name__) @@ -193,13 +193,10 @@ class DebugCommand(Command): %prog """ ignore_require_venv = True - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(DebugCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - cmdoptions.add_target_python_options(cmd_opts) - self.parser.insert_option_group(0, cmd_opts) + def add_options(self): + # type: () -> None + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) self.parser.config.load() def run(self, options, args): diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index c829550633e..46e8371261e 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import logging @@ -9,9 +6,15 @@ from pip._internal.cli import cmdoptions from pip._internal.cli.cmdoptions import make_target_python from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS from pip._internal.req.req_tracker import get_requirement_tracker from pip._internal.utils.misc import ensure_dir, normalize_path, write_output from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List logger = logging.getLogger(__name__) @@ -36,28 +39,25 @@ class DownloadCommand(RequirementCommand): %prog [options] ... %prog [options] ...""" - def __init__(self, *args, **kw): - super(DownloadCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.pre()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + + self.cmd_opts.add_option( '-d', '--dest', '--destination-dir', '--destination-directory', dest='download_dir', metavar='dir', @@ -65,7 +65,7 @@ def __init__(self, *args, **kw): help=("Download packages into ."), ) - cmdoptions.add_target_python_options(cmd_opts) + cmdoptions.add_target_python_options(self.cmd_opts) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -73,10 +73,12 @@ def __init__(self, *args, **kw): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int + options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. @@ -132,11 +134,10 @@ def run(self, options, args): reqs, check_supported_wheels=True ) - downloaded = ' '.join([ - req.name for req in requirement_set.requirements.values() - if req.successfully_downloaded - ]) + downloaded = ' '.join([req.name # type: ignore + for req in requirement_set.requirements.values() + if req.successfully_downloaded]) if downloaded: write_output('Successfully downloaded %s', downloaded) - return requirement_set + return SUCCESS diff --git a/src/pip/_internal/commands/freeze.py b/src/pip/_internal/commands/freeze.py index 13171772e5c..2071fbabd61 100644 --- a/src/pip/_internal/commands/freeze.py +++ b/src/pip/_internal/commands/freeze.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import sys @@ -8,12 +5,18 @@ from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS from pip._internal.models.format_control import FormatControl from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List + class FreezeCommand(Command): """ @@ -26,9 +29,8 @@ class FreezeCommand(Command): %prog [options]""" log_streams = ("ext://sys.stderr", "ext://sys.stderr") - def __init__(self, *args, **kw): - super(FreezeCommand, self).__init__(*args, **kw) - + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', @@ -75,6 +77,7 @@ def __init__(self, *args, **kw): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int format_control = FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) skip = set(stdlib_pkgs) @@ -97,3 +100,4 @@ def run(self, options, args): for line in freeze(**freeze_kwargs): sys.stdout.write(line + '\n') + return SUCCESS diff --git a/src/pip/_internal/commands/hash.py b/src/pip/_internal/commands/hash.py index aab4a3dc2fe..37831c39522 100644 --- a/src/pip/_internal/commands/hash.py +++ b/src/pip/_internal/commands/hash.py @@ -12,7 +12,7 @@ if MYPY_CHECK_RUNNING: from optparse import Values - from typing import Any, List + from typing import List logger = logging.getLogger(__name__) @@ -28,9 +28,8 @@ class HashCommand(Command): usage = '%prog [options] ...' ignore_require_venv = True - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(HashCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-a', '--algorithm', dest='algorithm', diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index c9b9ea4a8c5..56dd707bdd9 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -87,18 +87,15 @@ class InstallCommand(RequirementCommand): %prog [options] [-e] ... %prog [options] ...""" - def __init__(self, *args, **kw): - super(InstallCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.pre()) - - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( '-t', '--target', dest='target_dir', metavar='dir', @@ -108,9 +105,9 @@ def __init__(self, *args, **kw): '. Use --upgrade to replace existing packages in ' 'with new versions.' ) - cmdoptions.add_target_python_options(cmd_opts) + cmdoptions.add_target_python_options(self.cmd_opts) - cmd_opts.add_option( + self.cmd_opts.add_option( '--user', dest='use_user_site', action='store_true', @@ -118,19 +115,19 @@ def __init__(self, *args, **kw): "platform. Typically ~/.local/, or %APPDATA%\\Python on " "Windows. (See the Python documentation for site.USER_BASE " "for full details.)") - cmd_opts.add_option( + self.cmd_opts.add_option( '--no-user', dest='use_user_site', action='store_false', help=SUPPRESS_HELP) - cmd_opts.add_option( + self.cmd_opts.add_option( '--root', dest='root_path', metavar='dir', default=None, help="Install everything relative to this alternate root " "directory.") - cmd_opts.add_option( + self.cmd_opts.add_option( '--prefix', dest='prefix_path', metavar='dir', @@ -138,11 +135,11 @@ def __init__(self, *args, **kw): help="Installation prefix where lib, bin and other top-level " "folders are placed") - cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option( + self.cmd_opts.add_option( '-U', '--upgrade', dest='upgrade', action='store_true', @@ -151,7 +148,7 @@ def __init__(self, *args, **kw): 'upgrade-strategy used.' ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--upgrade-strategy', dest='upgrade_strategy', default='only-if-needed', @@ -165,14 +162,14 @@ def __init__(self, *args, **kw): 'satisfy the requirements of the upgraded package(s).' ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--force-reinstall', dest='force_reinstall', action='store_true', help='Reinstall all packages even if they are already ' 'up-to-date.') - cmd_opts.add_option( + self.cmd_opts.add_option( '-I', '--ignore-installed', dest='ignore_installed', action='store_true', @@ -182,15 +179,15 @@ def __init__(self, *args, **kw): 'with a different package manager!' ) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.install_options()) - cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.install_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option( + self.cmd_opts.add_option( "--compile", action="store_true", dest="compile", @@ -198,21 +195,21 @@ def __init__(self, *args, **kw): help="Compile Python source files to bytecode", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-compile", action="store_false", dest="compile", help="Do not compile Python source files to bytecode", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-warn-script-location", action="store_false", dest="warn_script_location", default=True, help="Do not warn when installing scripts outside PATH", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-warn-conflicts", action="store_false", dest="warn_about_conflicts", @@ -220,11 +217,11 @@ def __init__(self, *args, **kw): help="Do not warn about broken dependencies", ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -232,7 +229,7 @@ def __init__(self, *args, **kw): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) @with_cleanup def run(self, options, args): diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index 052f63890ec..df9e1b38eda 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -23,7 +23,7 @@ if MYPY_CHECK_RUNNING: from optparse import Values - from typing import Any, List, Set, Tuple, Iterator + from typing import List, Set, Tuple, Iterator from pip._internal.network.session import PipSession from pip._vendor.pkg_resources import Distribution @@ -41,28 +41,24 @@ class ListCommand(IndexGroupCommand): usage = """ %prog [options]""" - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(ListCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( '-o', '--outdated', action='store_true', default=False, help='List outdated packages') - cmd_opts.add_option( + self.cmd_opts.add_option( '-u', '--uptodate', action='store_true', default=False, help='List uptodate packages') - cmd_opts.add_option( + self.cmd_opts.add_option( '-e', '--editable', action='store_true', default=False, help='List editable projects.') - cmd_opts.add_option( + self.cmd_opts.add_option( '-l', '--local', action='store_true', default=False, @@ -75,8 +71,8 @@ def __init__(self, *args, **kw): action='store_true', default=False, help='Only output packages installed in user-site.') - cmd_opts.add_option(cmdoptions.list_path()) - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( '--pre', action='store_true', default=False, @@ -84,7 +80,7 @@ def __init__(self, *args, **kw): "pip only finds stable versions."), ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--format', action='store', dest='list_format', @@ -94,7 +90,7 @@ def __init__(self, *args, **kw): "or json", ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--not-required', action='store_true', dest='not_required', @@ -102,13 +98,13 @@ def __init__(self, *args, **kw): "installed packages.", ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--exclude-editable', action='store_false', dest='include_editable', help='Exclude editable package from output.', ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--include-editable', action='store_true', dest='include_editable', @@ -120,7 +116,7 @@ def __init__(self, *args, **kw): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) def _build_package_finder(self, options, session): # type: (Values, PipSession) -> PackageFinder diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py index e5f286ea5bf..3e75254812a 100644 --- a/src/pip/_internal/commands/search.py +++ b/src/pip/_internal/commands/search.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import logging @@ -23,6 +20,16 @@ from pip._internal.utils.compat import get_terminal_size from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Dict, Optional + from typing_extensions import TypedDict + TransformedHit = TypedDict( + 'TransformedHit', + {'name': str, 'summary': str, 'versions': List[str]}, + ) logger = logging.getLogger(__name__) @@ -34,8 +41,8 @@ class SearchCommand(Command, SessionCommandMixin): %prog [options] """ ignore_require_venv = True - def __init__(self, *args, **kw): - super(SearchCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-i', '--index', dest='index', @@ -46,6 +53,7 @@ def __init__(self, *args, **kw): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: raise CommandError('Missing required argument (search query).') query = args @@ -62,6 +70,7 @@ def run(self, options, args): return NO_MATCHES_FOUND def search(self, query, options): + # type: (List[str], Values) -> List[Dict[str, str]] index_url = options.index session = self.get_default_session(options) @@ -73,12 +82,13 @@ def search(self, query, options): def transform_hits(hits): + # type: (List[Dict[str, str]]) -> List[TransformedHit] """ The list from pypi is really a list of versions. We want a list of packages with the list of versions stored inline. This converts the list from pypi into one we can use. """ - packages = OrderedDict() + packages = OrderedDict() # type: OrderedDict[str, TransformedHit] for hit in hits: name = hit['name'] summary = hit['summary'] @@ -101,6 +111,7 @@ def transform_hits(hits): def print_results(hits, name_column_width=None, terminal_width=None): + # type: (List[TransformedHit], Optional[int], Optional[int]) -> None if not hits: return if name_column_width is None: @@ -118,8 +129,9 @@ def print_results(hits, name_column_width=None, terminal_width=None): target_width = terminal_width - name_column_width - 5 if target_width > 10: # wrap and indent summary to fit terminal - summary = textwrap.wrap(summary, target_width) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + summary_lines = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join( + summary_lines) line = '{name_latest:{name_column_width}} - {summary}'.format( name_latest='{name} ({latest})'.format(**locals()), @@ -143,4 +155,5 @@ def print_results(hits, name_column_width=None, terminal_width=None): def highest_version(versions): + # type: (List[str]) -> str return max(versions, key=parse_version) diff --git a/src/pip/_internal/commands/show.py b/src/pip/_internal/commands/show.py index eace2e5cb50..4090ea319a1 100644 --- a/src/pip/_internal/commands/show.py +++ b/src/pip/_internal/commands/show.py @@ -31,9 +31,8 @@ class ShowCommand(Command): %prog [options] ...""" ignore_require_venv = True - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(ShowCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-f', '--files', dest='files', @@ -111,7 +110,7 @@ def get_requiring_packages(package_name): # RECORDs should be part of .dist-info metadatas if dist.has_metadata('RECORD'): lines = dist.get_metadata_lines('RECORD') - paths = [l.split(',')[0] for l in lines] + paths = [line.split(',')[0] for line in lines] paths = [os.path.join(dist.location, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index 0542e78c79c..3371fe47ff1 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -16,7 +16,7 @@ if MYPY_CHECK_RUNNING: from optparse import Values - from typing import Any, List + from typing import List class UninstallCommand(Command, SessionCommandMixin): @@ -34,9 +34,8 @@ class UninstallCommand(Command, SessionCommandMixin): %prog [options] ... %prog [options] -r ...""" - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(UninstallCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index f028d681f7b..0f718566bd0 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -19,7 +19,7 @@ if MYPY_CHECK_RUNNING: from optparse import Values - from typing import Any, List + from typing import List logger = logging.getLogger(__name__) @@ -47,13 +47,10 @@ class WheelCommand(RequirementCommand): %prog [options] [-e] ... %prog [options] ...""" - def __init__(self, *args, **kw): - # type: (*Any, **Any) -> None - super(WheelCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + self.cmd_opts.add_option( '-w', '--wheel-dir', dest='wheel_dir', metavar='dir', @@ -61,29 +58,29 @@ def __init__(self, *args, **kw): help=("Build wheels into , where the default is the " "current working directory."), ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option( '--build-option', dest='build_options', metavar='options', action='append', help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", ) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.progress_bar()) - - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( '--global-option', dest='global_options', action='append', @@ -91,7 +88,7 @@ def __init__(self, *args, **kw): help="Extra global options to be supplied to the setup.py " "call before the 'bdist_wheel' command.") - cmd_opts.add_option( + self.cmd_opts.add_option( '--pre', action='store_true', default=False, @@ -99,7 +96,7 @@ def __init__(self, *args, **kw): "pip only finds stable versions."), ) - cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -107,7 +104,7 @@ def __init__(self, *args, **kw): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) @with_cleanup def run(self, options, args): diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 8ac85485e17..e0d7f095d48 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -84,6 +84,11 @@ class CommandError(PipError): """Raised when there is an error in command-line arguments""" +class SubProcessError(PipError): + """Raised when there is an error raised while executing a + command in subprocess""" + + class PreviousBuildDirError(PipError): """Raised when there's a previous conflicting build directory""" diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index e2c800c2cde..7908ab996e6 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -455,8 +455,9 @@ def _get_html_page(link, session=None): 'be checked by HEAD.', link, ) except _NotHTML as exc: - logger.debug( - 'Skipping page %s because the %s request got Content-Type: %s', + logger.warning( + 'Skipping page %s because the %s request got Content-Type: %s.' + 'The only supported Content-Type is text/html', link, exc.request_desc, exc.content_type, ) except HTTPError as exc: diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py index 441992b92b3..731e4981d72 100644 --- a/src/pip/_internal/index/package_finder.py +++ b/src/pip/_internal/index/package_finder.py @@ -693,6 +693,15 @@ def set_allow_all_prereleases(self): # type: () -> None self._candidate_prefs.allow_all_prereleases = True + @property + def prefer_binary(self): + # type: () -> bool + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self): + # type: () -> None + self._candidate_prefs.prefer_binary = True + def make_link_evaluator(self, project_name): # type: (str) -> LinkEvaluator canonical_name = canonicalize_name(project_name) diff --git a/src/pip/_internal/models/format_control.py b/src/pip/_internal/models/format_control.py index 2e13727ca00..c39b84a84b5 100644 --- a/src/pip/_internal/models/format_control.py +++ b/src/pip/_internal/models/format_control.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - from pip._vendor.packaging.utils import canonicalize_name from pip._internal.exceptions import CommandError @@ -42,7 +39,7 @@ def __repr__(self): @staticmethod def handle_mutual_excludes(value, target, other): - # type: (str, Optional[Set[str]], Optional[Set[str]]) -> None + # type: (str, Set[str], Set[str]) -> None if value.startswith('-'): raise CommandError( "--no-binary / --only-binary option requires 1 argument." diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 94da3d46aaa..ca729fcdf5e 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -4,9 +4,6 @@ providing credentials in the context of network requests. """ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import logging from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth @@ -23,11 +20,12 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from optparse import Values - from typing import Dict, Optional, Tuple + from typing import Dict, Optional, Tuple, List, Any from pip._internal.vcs.versioncontrol import AuthInfo + from pip._vendor.requests.models import Response, Request + Credentials = Tuple[str, str, str] logger = logging.getLogger(__name__) @@ -44,6 +42,7 @@ def get_keyring_auth(url, username): + # type: (str, str) -> Optional[AuthInfo] """Return the tuple auth for a given url from keyring.""" if not url or not keyring: return None @@ -70,12 +69,13 @@ def get_keyring_auth(url, username): logger.warning( "Keyring is skipped due to an exception: %s", str(exc), ) + return None class MultiDomainBasicAuth(AuthBase): def __init__(self, prompting=True, index_urls=None): - # type: (bool, Optional[Values]) -> None + # type: (bool, Optional[List[str]]) -> None self.prompting = prompting self.index_urls = index_urls self.passwords = {} # type: Dict[str, AuthInfo] @@ -87,6 +87,7 @@ def __init__(self, prompting=True, index_urls=None): self._credentials_to_save = None # type: Optional[Credentials] def _get_index_url(self, url): + # type: (str) -> Optional[str] """Return the original index URL matching the requested URL. Cached or dynamically generated credentials may work against @@ -106,9 +107,11 @@ def _get_index_url(self, url): prefix = remove_auth_from_url(u).rstrip("/") + "/" if url.startswith(prefix): return u + return None def _get_new_credentials(self, original_url, allow_netrc=True, allow_keyring=True): + # type: (str, bool, bool) -> AuthInfo """Find and return credentials for the specified URL.""" # Split the credentials and netloc from the url. url, netloc, url_user_password = split_auth_netloc_from_url( @@ -158,6 +161,7 @@ def _get_new_credentials(self, original_url, allow_netrc=True, return username, password def _get_url_and_credentials(self, original_url): + # type: (str) -> Tuple[str, Optional[str], Optional[str]] """Return the credentials to use for the provided URL. If allowed, netrc and keyring may be used to obtain the @@ -198,6 +202,7 @@ def _get_url_and_credentials(self, original_url): return url, username, password def __call__(self, req): + # type: (Request) -> Request # Get credentials for this request url, username, password = self._get_url_and_credentials(req.url) @@ -215,22 +220,25 @@ def __call__(self, req): # Factored out to allow for easy patching in tests def _prompt_for_password(self, netloc): + # type: (str) -> Tuple[Optional[str], Optional[str], bool] username = ask_input("User for {}: ".format(netloc)) if not username: - return None, None + return None, None, False auth = get_keyring_auth(netloc, username) - if auth: + if auth and auth[0] is not None and auth[1] is not None: return auth[0], auth[1], False password = ask_password("Password: ") return username, password, True # Factored out to allow for easy patching in tests def _should_save_password_to_keyring(self): + # type: () -> bool if not keyring: return False return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" def handle_401(self, resp, **kwargs): + # type: (Response, **Any) -> Response # We only care about 401 responses, anything else we want to just # pass through the actual response if resp.status_code != 401: @@ -276,6 +284,7 @@ def handle_401(self, resp, **kwargs): return new_resp def warn_on_401(self, resp, **kwargs): + # type: (Response, **Any) -> None """Response callback to warn about incorrect credentials.""" if resp.status_code == 401: logger.warning( @@ -283,6 +292,7 @@ def warn_on_401(self, resp, **kwargs): ) def save_credentials(self, resp, **kwargs): + # type: (Response, **Any) -> None """Response callback to save credentials on success.""" assert keyring is not None, "should never reach here without keyring" if not keyring: diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index c9386e17360..a0d55b5e992 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -1,9 +1,6 @@ """HTTP cache implementation. """ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import os from contextlib import contextmanager @@ -16,7 +13,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional + from typing import Optional, Iterator def is_from_cache(response): @@ -26,6 +23,7 @@ def is_from_cache(response): @contextmanager def suppressed_cache_errors(): + # type: () -> Iterator[None] """If we can't access the cache then we can just skip caching and process requests as if caching wasn't enabled. """ diff --git a/src/pip/_internal/network/xmlrpc.py b/src/pip/_internal/network/xmlrpc.py index 121edd93056..beab4fcfa7a 100644 --- a/src/pip/_internal/network/xmlrpc.py +++ b/src/pip/_internal/network/xmlrpc.py @@ -1,9 +1,6 @@ """xmlrpclib.Transport implementation """ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import logging from pip._vendor import requests @@ -12,6 +9,12 @@ from pip._vendor.six.moves import xmlrpc_client # type: ignore from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict + from pip._internal.network.session import PipSession + logger = logging.getLogger(__name__) @@ -21,12 +24,14 @@ class PipXmlrpcTransport(xmlrpc_client.Transport): """ def __init__(self, index_url, session, use_datetime=False): + # type: (str, PipSession, bool) -> None xmlrpc_client.Transport.__init__(self, use_datetime) index_parts = urllib_parse.urlparse(index_url) self._scheme = index_parts.scheme self._session = session def request(self, host, handler, request_body, verbose=False): + # type: (str, str, Dict[str, str], bool) -> None parts = (self._scheme, host, handler, None, None, None) url = urllib_parse.urlunparse(parts) try: diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index 2fb86b866db..36877ca5e76 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -1,9 +1,6 @@ """Support for installing and building the "wheel" binary package format. """ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - from __future__ import absolute_import import collections @@ -24,7 +21,14 @@ from pip._vendor import pkg_resources from pip._vendor.distlib.scripts import ScriptMaker from pip._vendor.distlib.util import get_export_entry -from pip._vendor.six import StringIO +from pip._vendor.six import ( + PY2, + StringIO, + ensure_str, + ensure_text, + itervalues, + text_type, +) from pip._internal.exceptions import InstallationError from pip._internal.locations import get_major_minor_version @@ -36,29 +40,42 @@ from pip._internal.utils.unpacking import current_umask, unpack_file from pip._internal.utils.wheel import parse_wheel -if MYPY_CHECK_RUNNING: +# Use the custom cast function at runtime to make cast work, +# and import typing.cast when performing pre-commit and type +# checks +if not MYPY_CHECK_RUNNING: + from pip._internal.utils.typing import cast +else: from email.message import Message from typing import ( - Dict, List, Optional, Sequence, Tuple, Any, - Iterable, Iterator, Callable, Set, + Any, + Callable, + Dict, + IO, + Iterable, + Iterator, + List, + NewType, + Optional, + Sequence, + Set, + Tuple, + Union, + cast, ) from pip._internal.models.scheme import Scheme from pip._internal.utils.filesystem import NamedTemporaryFileResult - InstalledCSVRow = Tuple[str, ...] + RecordPath = NewType('RecordPath', text_type) + InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] logger = logging.getLogger(__name__) -def normpath(src, p): - # type: (str, str) -> str - return os.path.relpath(src, p).replace(os.path.sep, '/') - - def rehash(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[str, str] + # type: (text_type, int) -> Tuple[str, str] """Return (encoded_digest, length) for path using hashlib.sha256()""" h, length = hash_file(path, blocksize) digest = 'sha256=' + urlsafe_b64encode( @@ -73,14 +90,14 @@ def csv_io_kwargs(mode): """Return keyword arguments to properly open a CSV file in the given mode. """ - if sys.version_info.major < 3: + if PY2: return {'mode': '{}b'.format(mode)} else: - return {'mode': mode, 'newline': ''} + return {'mode': mode, 'newline': '', 'encoding': 'utf-8'} def fix_script(path): - # type: (str) -> Optional[bool] + # type: (text_type) -> Optional[bool] """Replace #!python with #!/path/to/python Return True if file was changed. """ @@ -211,9 +228,12 @@ def message_about_scripts_not_on_PATH(scripts): return "\n".join(msg_lines) -def sorted_outrows(outrows): - # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] - """Return the given rows of a RECORD file in sorted order. +def _normalized_outrows(outrows): + # type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]] + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. Each row is a 3-tuple (path, hash, size) and corresponds to a record of a RECORD file (see PEP 376 and PEP 427 for details). For the rows @@ -228,13 +248,39 @@ def sorted_outrows(outrows): # coerce each element to a string to avoid a TypeError in this case. # For additional background, see-- # https://github.com/pypa/pip/issues/5868 - return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) + return sorted( + (ensure_str(record_path, encoding='utf-8'), hash_, str(size)) + for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path): + # type: (RecordPath) -> text_type + return record_path + + +def _fs_to_record_path(path, relative_to=None): + # type: (text_type, Optional[text_type]) -> RecordPath + if relative_to is not None: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if os.path.splitdrive(path)[0].lower() == \ + os.path.splitdrive(relative_to)[0].lower(): + path = os.path.relpath(path, relative_to) + path = path.replace(os.path.sep, '/') + return cast('RecordPath', path) + + +def _parse_record_path(record_column): + # type: (str) -> RecordPath + p = ensure_text(record_column, encoding='utf-8') + return cast('RecordPath', p) def get_csv_rows_for_installed( old_csv_rows, # type: Iterable[List[str]] - installed, # type: Dict[str, str] - changed, # type: Set[str] + installed, # type: Dict[RecordPath, RecordPath] + changed, # type: Set[RecordPath] generated, # type: List[str] lib_dir, # type: str ): @@ -249,21 +295,20 @@ def get_csv_rows_for_installed( logger.warning( 'RECORD line has more than three elements: {}'.format(row) ) - # Make a copy because we are mutating the row. - row = list(row) - old_path = row[0] - new_path = installed.pop(old_path, old_path) - row[0] = new_path - if new_path in changed: - digest, length = rehash(new_path) - row[1] = digest - row[2] = length - installed_rows.append(tuple(row)) + old_record_path = _parse_record_path(row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path)) + else: + digest = row[1] if len(row) > 1 else '' + length = row[2] if len(row) > 2 else '' + installed_rows.append((new_record_path, digest, length)) for f in generated: + path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) - installed_rows.append((normpath(f, lib_dir), digest, str(length))) - for f in installed: - installed_rows.append((installed[f], '', '')) + installed_rows.append((path, digest, length)) + for installed_record_path in itervalues(installed): + installed_rows.append((installed_record_path, '', '')) return installed_rows @@ -332,8 +377,8 @@ def install_unpacked_wheel( # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) - installed = {} # type: Dict[str, str] - changed = set() + installed = {} # type: Dict[RecordPath, RecordPath] + changed = set() # type: Set[RecordPath] generated = [] # type: List[str] # Compile all of the pyc files that we're going to be installing @@ -345,20 +390,20 @@ def install_unpacked_wheel( logger.debug(stdout.getvalue()) def record_installed(srcfile, destfile, modified=False): - # type: (str, str, bool) -> None + # type: (text_type, text_type, bool) -> None """Map archive RECORD paths to installation RECORD paths.""" - oldpath = normpath(srcfile, wheeldir) - newpath = normpath(destfile, lib_dir) + oldpath = _fs_to_record_path(srcfile, wheeldir) + newpath = _fs_to_record_path(destfile, lib_dir) installed[oldpath] = newpath if modified: - changed.add(destfile) + changed.add(_fs_to_record_path(destfile)) def clobber( - source, # type: str - dest, # type: str + source, # type: text_type + dest, # type: text_type is_base, # type: bool - fixer=None, # type: Optional[Callable[[str], Any]] - filter=None # type: Optional[Callable[[str], bool]] + fixer=None, # type: Optional[Callable[[text_type], Any]] + filter=None # type: Optional[Callable[[text_type], bool]] ): # type: (...) -> None ensure_dir(dest) # common for the 'include' path @@ -417,7 +462,11 @@ def clobber( changed = fixer(destfile) record_installed(srcfile, destfile, changed) - clobber(source, lib_dir, True) + clobber( + ensure_text(source, encoding=sys.getfilesystemencoding()), + ensure_text(lib_dir, encoding=sys.getfilesystemencoding()), + True, + ) dest_info_dir = os.path.join(lib_dir, info_dir) @@ -426,7 +475,7 @@ def clobber( console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): - # type: (str) -> bool + # type: (text_type) -> bool # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools if name.lower().endswith('.exe'): @@ -450,7 +499,13 @@ def is_entrypoint_wrapper(name): filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = getattr(scheme, subdir) - clobber(source, dest, False, fixer=fixer, filter=filter) + clobber( + ensure_text(source, encoding=sys.getfilesystemencoding()), + ensure_text(dest, encoding=sys.getfilesystemencoding()), + False, + fixer=fixer, + filter=filter, + ) maker = PipScriptMaker(None, scheme.scripts) @@ -600,8 +655,11 @@ def _generate_file(path, **kwargs): generated=generated, lib_dir=lib_dir) with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: - writer = csv.writer(record_file) - writer.writerows(sorted_outrows(rows)) # sort to simplify testing + # The type mypy infers for record_file is different for Python 3 + # (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly + # cast to typing.IO[str] as a workaround. + writer = csv.writer(cast('IO[str]', record_file)) + writer.writerows(_normalized_outrows(rows)) def install_wheel( diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index c9f1fe71396..7ca3370110f 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -8,9 +8,6 @@ InstallRequirement. """ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import logging import os import re @@ -78,7 +75,7 @@ def convert_extras(extras): def parse_editable(editable_req): - # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]] + # type: (str) -> Tuple[Optional[str], str, Set[str]] """Parses an editable requirement into: - a requirement name - an URL @@ -120,7 +117,7 @@ def parse_editable(editable_req): Requirement("placeholder" + extras.lower()).extras, ) else: - return package_name, url_no_extras, None + return package_name, url_no_extras, set() for version_control in vcs: if url.lower().startswith('{}:'.format(version_control)): @@ -149,7 +146,7 @@ def parse_editable(editable_req): "Could not detect requirement name for '{}', please specify one " "with #egg=your_package_name".format(editable_req) ) - return package_name, url, None + return package_name, url, set() def deduce_helpful_msg(req): @@ -264,7 +261,7 @@ def _looks_like_path(name): def _get_url_from_path(path, name): - # type: (str, str) -> str + # type: (str, str) -> Optional[str] """ First, it checks whether a provided path is an installable directory (e.g. it has a setup.py). If it is, returns the path. diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index 63cab76f6f2..cde0b08d6dd 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -60,6 +60,7 @@ cmdoptions.find_links, cmdoptions.no_binary, cmdoptions.only_binary, + cmdoptions.prefer_binary, cmdoptions.require_hashes, cmdoptions.pre, cmdoptions.trusted_host, @@ -260,6 +261,9 @@ def handle_option_line( if opts.pre: finder.set_allow_all_prereleases() + if opts.prefer_binary: + finder.set_prefer_binary() + if session: for host in opts.trusted_hosts or []: source = 'line {} of {}'.format(lineno, filename) diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index f168ce17abd..d64bb78a327 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - from __future__ import absolute_import import logging @@ -122,7 +119,8 @@ def add_requirement( return [install_req], None try: - existing_req = self.get_requirement(install_req.name) + existing_req = self.get_requirement( + install_req.name) # type: Optional[InstallRequirement] except KeyError: existing_req = None diff --git a/src/pip/_internal/req/req_tracker.py b/src/pip/_internal/req/req_tracker.py index 14adeab29b5..13fb24563fe 100644 --- a/src/pip/_internal/req/req_tracker.py +++ b/src/pip/_internal/req/req_tracker.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - from __future__ import absolute_import import contextlib @@ -98,6 +95,7 @@ def add(self, req): """Add an InstallRequirement to build tracking. """ + assert req.link # Get the file to write information about this requirement. entry_path = self._entry_path(req.link) @@ -130,6 +128,7 @@ def remove(self, req): """Remove an InstallRequirement from build tracking. """ + assert req.link # Delete the created file and the corresponding entries. os.unlink(self._entry_path(req.link)) self._entries.remove(req) diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py index eacdf8ecc8e..17513d336e7 100644 --- a/src/pip/_internal/resolution/resolvelib/base.py +++ b/src/pip/_internal/resolution/resolvelib/base.py @@ -3,7 +3,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, Sequence, Set + from typing import Iterable, Optional, Sequence, Set from pip._internal.req.req_install import InstallRequirement from pip._vendor.packaging.specifiers import SpecifierSet @@ -49,8 +49,8 @@ def is_installed(self): # type: () -> bool raise NotImplementedError("Override in subclass") - def get_dependencies(self): - # type: () -> Sequence[Requirement] + def iter_dependencies(self): + # type: () -> Iterable[Requirement] raise NotImplementedError("Override in subclass") def get_install_requirement(self): diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 418da5d4d0d..da11c4fe785 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -17,7 +17,7 @@ from .base import Candidate, format_name if MYPY_CHECK_RUNNING: - from typing import Any, Optional, Sequence, Set, Tuple, Union + from typing import Any, Iterable, Optional, Set, Tuple, Union from pip._vendor.packaging.version import _BaseVersion from pip._vendor.pkg_resources import Distribution @@ -212,18 +212,15 @@ def _get_requires_python_specifier(self): return None return spec - def get_dependencies(self): - # type: () -> Sequence[Requirement] - deps = [ - self._factory.make_requirement_from_spec(str(r), self._ireq) - for r in self.dist.requires() - ] + def iter_dependencies(self): + # type: () -> Iterable[Requirement] + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) python_dep = self._factory.make_requires_python_requirement( self._get_requires_python_specifier(), ) if python_dep: - deps.append(python_dep) - return deps + yield python_dep def get_install_requirement(self): # type: () -> Optional[InstallRequirement] @@ -326,12 +323,10 @@ def version(self): # type: () -> _BaseVersion return self.dist.parsed_version - def get_dependencies(self): - # type: () -> Sequence[Requirement] - return [ - self._factory.make_requirement_from_spec(str(r), self._ireq) - for r in self.dist.requires() - ] + def iter_dependencies(self): + # type: () -> Iterable[Requirement] + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) def get_install_requirement(self): # type: () -> Optional[InstallRequirement] @@ -352,8 +347,8 @@ class ExtrasCandidate(Candidate): to treat it as a separate node in the dependency graph. 2. When we're getting the candidate's dependencies, a) We specify that we want the extra dependencies as well. - b) We add a dependency on the base candidate (matching the name and - version). See below for why this is needed. + b) We add a dependency on the base candidate. + See below for why this is needed. 3. We return None for the underlying InstallRequirement, as the base candidate will provide it, and we don't want to end up with duplicates. @@ -406,8 +401,8 @@ def is_installed(self): # type: () -> _BaseVersion return self.base.is_installed - def get_dependencies(self): - # type: () -> Sequence[Requirement] + def iter_dependencies(self): + # type: () -> Iterable[Requirement] factory = self.base._factory # The user may have specified extras that the candidate doesn't @@ -422,17 +417,16 @@ def get_dependencies(self): extra ) - deps = [ - factory.make_requirement_from_spec(str(r), self.base._ireq) - for r in self.base.dist.requires(valid_extras) - ] - # Add a dependency on the exact base. + # Add a dependency on the exact base # (See note 2b in the class docstring) - # FIXME: This does not work if the base candidate is specified by - # link, e.g. "pip install .[dev]" will fail. - spec = "{}=={}".format(self.base.name, self.base.version) - deps.append(factory.make_requirement_from_spec(spec, self.base._ireq)) - return deps + yield factory.make_requirement_from_candidate(self.base) + + for r in self.base.dist.requires(valid_extras): + requirement = factory.make_requirement_from_spec_matching_extras( + str(r), self.base._ireq, valid_extras, + ) + if requirement: + yield requirement def get_install_requirement(self): # type: () -> Optional[InstallRequirement] @@ -468,9 +462,9 @@ def version(self): # type: () -> _BaseVersion return self._version - def get_dependencies(self): - # type: () -> Sequence[Requirement] - return [] + def iter_dependencies(self): + # type: () -> Iterable[Requirement] + return () def get_install_requirement(self): # type: () -> Optional[InstallRequirement] diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index ce93707b701..046119cfe45 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -1,3 +1,7 @@ +import collections +import logging + +from pip._vendor import six from pip._vendor.packaging.utils import canonicalize_name from pip._internal.exceptions import ( @@ -26,7 +30,7 @@ ) if MYPY_CHECK_RUNNING: - from typing import Dict, Iterator, Optional, Set, Tuple, TypeVar + from typing import Dict, Iterable, Iterator, Optional, Set, Tuple, TypeVar from pip._vendor.packaging.specifiers import SpecifierSet from pip._vendor.packaging.version import _BaseVersion @@ -44,6 +48,10 @@ C = TypeVar("C") Cache = Dict[Link, C] + VersionCandidates = Dict[_BaseVersion, Candidate] + + +logger = logging.getLogger(__name__) class Factory(object): @@ -127,11 +135,12 @@ def iter_found_candidates(self, ireq, extras): # requirement needs to return only one candidate per version, so we # implement that logic here so that requirements using this helper # don't all have to do the same thing later. - seen_versions = set() # type: Set[_BaseVersion] + candidates = collections.OrderedDict() # type: VersionCandidates # Yield the installed version, if it matches, unless the user # specified `--force-reinstall`, when we want the version from # the index instead. + installed_version = None if not self._force_reinstall and name in self._installed_dists: installed_dist = self._installed_dists[name] installed_version = installed_dist.parsed_version @@ -139,12 +148,12 @@ def iter_found_candidates(self, ireq, extras): installed_version, prereleases=True ): - seen_versions.add(installed_version) - yield self._make_candidate_from_dist( + candidate = self._make_candidate_from_dist( dist=installed_dist, extras=extras, parent=ireq, ) + candidates[installed_version] = candidate found = self.finder.find_best_candidate( project_name=ireq.req.name, @@ -152,15 +161,18 @@ def iter_found_candidates(self, ireq, extras): hashes=ireq.hashes(trust_internet=False), ) for ican in found.iter_applicable(): - if ican.version not in seen_versions: - seen_versions.add(ican.version) - yield self._make_candidate_from_link( - link=ican.link, - extras=extras, - parent=ireq, - name=name, - version=ican.version, - ) + if ican.version == installed_version: + continue + candidate = self._make_candidate_from_link( + link=ican.link, + extras=extras, + parent=ireq, + name=name, + version=ican.version, + ) + candidates[ican.version] = candidate + + return six.itervalues(candidates) def make_requirement_from_install_req(self, ireq): # type: (InstallRequirement) -> Requirement @@ -168,17 +180,37 @@ def make_requirement_from_install_req(self, ireq): # TODO: Get name and version from ireq, if possible? # Specifically, this might be needed in "name @ URL" # syntax - need to check where that syntax is handled. - cand = self._make_candidate_from_link( + candidate = self._make_candidate_from_link( ireq.link, extras=set(ireq.extras), parent=ireq, ) - return ExplicitRequirement(cand) + return self.make_requirement_from_candidate(candidate) return SpecifierRequirement(ireq, factory=self) + def make_requirement_from_candidate(self, candidate): + # type: (Candidate) -> ExplicitRequirement + return ExplicitRequirement(candidate) + def make_requirement_from_spec(self, specifier, comes_from): # type: (str, InstallRequirement) -> Requirement ireq = self._make_install_req_from_spec(specifier, comes_from) return self.make_requirement_from_install_req(ireq) + def make_requirement_from_spec_matching_extras( + self, + specifier, # type: str + comes_from, # type: InstallRequirement + requested_extras=(), # type: Iterable[str] + ): + # type: (...) -> Optional[Requirement] + ireq = self._make_install_req_from_spec(specifier, comes_from) + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, ireq.markers, + ) + return None + return self.make_requirement_from_install_req(ireq) + def make_requires_python_requirement(self, specifier): # type: (Optional[SpecifierSet]) -> Optional[Requirement] if self._ignore_requires_python or specifier is None: diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index 4e8c5ae309b..e4c516948c4 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -132,4 +132,4 @@ def get_dependencies(self, candidate): # type: (Candidate) -> Sequence[Requirement] if self._ignore_dependencies: return [] - return candidate.get_dependencies() + return list(candidate.iter_dependencies()) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 208f7a300e1..f21e37a4a63 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -55,7 +55,7 @@ def __init__(self, ireq, factory): assert ireq.link is None, "This is a link, not a specifier" self._ireq = ireq self._factory = factory - self.extras = ireq.req.extras + self.extras = set(ireq.extras) def __str__(self): # type: () -> str diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index d05a277b000..9eab87b3a72 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -6,7 +6,7 @@ from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible from pip._vendor.resolvelib import Resolver as RLResolver -from pip._internal.exceptions import InstallationError +from pip._internal.exceptions import DistributionNotFound, InstallationError from pip._internal.req.req_set import RequirementSet from pip._internal.resolution.base import BaseResolver from pip._internal.resolution.resolvelib.provider import PipProvider @@ -105,6 +105,8 @@ def resolve(self, root_reqs, check_supported_wheels): user_requested = set() # type: Set[str] requirements = [] for req in root_reqs: + if not req.match_markers(): + continue if req.constraint: # Ensure we only accept valid constraints reject_invalid_constraint_types(req) @@ -132,7 +134,10 @@ def resolve(self, root_reqs, check_supported_wheels): resolver = RLResolver(provider, reporter) try: - self._result = resolver.resolve(requirements) + try_to_avoid_resolution_too_deep = 2000000 + self._result = resolver.resolve( + requirements, max_rounds=try_to_avoid_resolution_too_deep, + ) except ResolutionImpossible as e: error = self.factory.get_installation_error(e) @@ -150,11 +155,10 @@ def resolve(self, root_reqs, check_supported_wheels): parent.name )) ) - raise InstallationError( + raise DistributionNotFound( "No matching distribution found for " + ", ".join([r.name for r, _ in e.causes]) ) - raise six.raise_from(error, e) req_set = RequirementSet(check_supported_wheels=check_supported_wheels) diff --git a/src/pip/_internal/utils/encoding.py b/src/pip/_internal/utils/encoding.py index ab4d4b98e3e..5b83d61bb13 100644 --- a/src/pip/_internal/utils/encoding.py +++ b/src/pip/_internal/utils/encoding.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import codecs import locale import re @@ -35,7 +32,9 @@ def auto_decode(data): # Lets check the first two lines as in PEP263 for line in data.split(b'\n')[:2]: if line[0:1] == b'#' and ENCODING_RE.search(line): - encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode('ascii') return data.decode(encoding) return data.decode( locale.getpreferredencoding(False) or sys.getdefaultencoding(), diff --git a/src/pip/_internal/utils/filesystem.py b/src/pip/_internal/utils/filesystem.py index 437a7fd1482..c706a038c2a 100644 --- a/src/pip/_internal/utils/filesystem.py +++ b/src/pip/_internal/utils/filesystem.py @@ -170,6 +170,8 @@ def _test_writable_dir_win(path): # This could be because there's a directory with the same name. # But it's highly unlikely there's a directory called that, # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. return False raise else: diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 09031825afa..658a30b86ac 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -131,7 +131,7 @@ def get_prog(): # Retry every half second for up to 3 seconds @retry(stop_max_delay=3000, wait_fixed=500) def rmtree(dir, ignore_errors=False): - # type: (str, bool) -> None + # type: (Text, bool) -> None shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) @@ -541,7 +541,7 @@ class FakeFile(object): """Wrap a list of lines in an object with readline() to make ConfigParser happy.""" def __init__(self, lines): - self._gen = (l for l in lines) + self._gen = iter(lines) def readline(self): try: @@ -876,7 +876,7 @@ def is_console_interactive(): def hash_file(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[Any, int] + # type: (Text, int) -> Tuple[Any, int] """Return (hash, length) for path using hashlib.sha256() """ diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 201ba6d9811..54c3140110c 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -8,6 +8,7 @@ from contextlib import contextmanager from pip._vendor.contextlib2 import ExitStack +from pip._vendor.six import ensure_text from pip._internal.utils.misc import enum, rmtree from pip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -193,7 +194,9 @@ def cleanup(self): """ self._deleted = True if os.path.exists(self._path): - rmtree(self._path) + # Make sure to pass unicode on Python 2 to make the contents also + # use unicode, ensuring non-ASCII names and can be represented. + rmtree(ensure_text(self._path)) class AdjacentTempDirectory(TempDirectory): diff --git a/src/pip/_internal/utils/unpacking.py b/src/pip/_internal/utils/unpacking.py index 7252dc217bf..fe71d26e355 100644 --- a/src/pip/_internal/utils/unpacking.py +++ b/src/pip/_internal/utils/unpacking.py @@ -1,10 +1,6 @@ """Utilities related archives. """ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import logging @@ -48,6 +44,7 @@ def current_umask(): + # type: () -> int """Get the current umask which involves having to set it temporarily.""" mask = os.umask(0) os.umask(mask) @@ -219,6 +216,7 @@ def untar_file(filename, location): ) continue ensure_dir(os.path.dirname(path)) + assert fp is not None with open(path, 'wb') as destfp: shutil.copyfileobj(fp, destfp) fp.close() diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index 347c06f9dc7..94408c52fa9 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -54,8 +54,7 @@ def export(self, location, url): url, rev_options = self.get_url_rev_options(url) self.run_command( - make_command('export', location, url, rev_options.to_args()), - show_stdout=False, + make_command('export', location, url, rev_options.to_args()) ) def fetch_new(self, dest, url, rev_options): @@ -92,7 +91,7 @@ def get_url_rev_and_auth(cls, url): @classmethod def get_remote_url(cls, location): - urls = cls.run_command(['info'], show_stdout=False, cwd=location) + urls = cls.run_command(['info'], cwd=location) for line in urls.splitlines(): line = line.strip() for x in ('checkout of branch: ', @@ -107,7 +106,7 @@ def get_remote_url(cls, location): @classmethod def get_revision(cls, location): revision = cls.run_command( - ['revno'], show_stdout=False, cwd=location, + ['revno'], cwd=location, ) return revision.splitlines()[-1] diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index e173ec894ca..a9c7fb66e33 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -11,7 +11,7 @@ from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request -from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.exceptions import BadCommand, SubProcessError from pip._internal.utils.misc import display_path, hide_url from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory @@ -78,7 +78,7 @@ def is_immutable_rev_checkout(self, url, dest): def get_git_version(self): VERSION_PFX = 'git version ' - version = self.run_command(['version'], show_stdout=False) + version = self.run_command(['version']) if version.startswith(VERSION_PFX): version = version[len(VERSION_PFX):].split()[0] else: @@ -101,7 +101,7 @@ def get_current_branch(cls, location): # and to suppress the message to stderr. args = ['symbolic-ref', '-q', 'HEAD'] output = cls.run_command( - args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + args, extra_ok_returncodes=(1, ), cwd=location, ) ref = output.strip() @@ -120,7 +120,7 @@ def export(self, location, url): self.unpack(temp_dir.path, url=url) self.run_command( ['checkout-index', '-a', '-f', '--prefix', location], - show_stdout=False, cwd=temp_dir.path + cwd=temp_dir.path ) @classmethod @@ -134,8 +134,13 @@ def get_revision_sha(cls, dest, rev): rev: the revision name. """ # Pass rev to pre-filter the list. - output = cls.run_command(['show-ref', rev], cwd=dest, - show_stdout=False, on_returncode='ignore') + + output = '' + try: + output = cls.run_command(['show-ref', rev], cwd=dest) + except SubProcessError: + pass + refs = {} for line in output.strip().splitlines(): try: @@ -286,7 +291,7 @@ def get_remote_url(cls, location): # exits with return code 1 if there are no matching lines. stdout = cls.run_command( ['config', '--get-regexp', r'remote\..*\.url'], - extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + extra_ok_returncodes=(1, ), cwd=location, ) remotes = stdout.splitlines() try: @@ -306,7 +311,7 @@ def get_revision(cls, location, rev=None): if rev is None: rev = 'HEAD' current_rev = cls.run_command( - ['rev-parse', rev], show_stdout=False, cwd=location, + ['rev-parse', rev], cwd=location, ) return current_rev.strip() @@ -319,7 +324,7 @@ def get_subdirectory(cls, location): # find the repo root git_dir = cls.run_command( ['rev-parse', '--git-dir'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) repo_root = os.path.abspath(os.path.join(git_dir, '..')) @@ -378,15 +383,13 @@ def get_repository_root(cls, location): r = cls.run_command( ['rev-parse', '--show-toplevel'], cwd=location, - show_stdout=False, - on_returncode='raise', log_failed_cmd=False, ) except BadCommand: logger.debug("could not determine if %s is under git control " "because git is not available", location) return None - except InstallationError: + except SubProcessError: return None return os.path.normpath(r.rstrip('\r\n')) diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 75e903cc8a6..69763feaea4 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -8,7 +8,7 @@ from pip._vendor.six.moves import configparser -from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.exceptions import BadCommand, SubProcessError from pip._internal.utils.misc import display_path from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory @@ -47,7 +47,7 @@ def export(self, location, url): self.unpack(temp_dir.path, url=url) self.run_command( - ['archive', location], show_stdout=False, cwd=temp_dir.path + ['archive', location], cwd=temp_dir.path ) def fetch_new(self, dest, url, rev_options): @@ -92,7 +92,7 @@ def update(self, dest, url, rev_options): def get_remote_url(cls, location): url = cls.run_command( ['showconfig', 'paths.default'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() if cls._is_local_repository(url): url = path_to_url(url) return url.strip() @@ -103,8 +103,7 @@ def get_revision(cls, location): Return the repository-local changeset revision number, as an integer. """ current_revision = cls.run_command( - ['parents', '--template={rev}'], - show_stdout=False, cwd=location).strip() + ['parents', '--template={rev}'], cwd=location).strip() return current_revision @classmethod @@ -115,7 +114,7 @@ def get_requirement_revision(cls, location): """ current_rev_hash = cls.run_command( ['parents', '--template={node}'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() return current_rev_hash @classmethod @@ -131,7 +130,7 @@ def get_subdirectory(cls, location): """ # find the repo root repo_root = cls.run_command( - ['root'], show_stdout=False, cwd=location).strip() + ['root'], cwd=location).strip() if not os.path.isabs(repo_root): repo_root = os.path.abspath(os.path.join(location, repo_root)) return find_path_to_setup_from_repo_root(location, repo_root) @@ -145,15 +144,13 @@ def get_repository_root(cls, location): r = cls.run_command( ['root'], cwd=location, - show_stdout=False, - on_returncode='raise', log_failed_cmd=False, ) except BadCommand: logger.debug("could not determine if %s is under hg control " "because hg is not available", location) return None - except InstallationError: + except SubProcessError: return None return os.path.normpath(r.rstrip('\r\n')) diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 0ec65974492..4324a5d9f82 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -132,7 +132,7 @@ def get_remote_url(cls, location): @classmethod def _get_svn_url_rev(cls, location): - from pip._internal.exceptions import InstallationError + from pip._internal.exceptions import SubProcessError entries_path = os.path.join(location, cls.dirname, 'entries') if os.path.exists(entries_path): @@ -165,13 +165,12 @@ def _get_svn_url_rev(cls, location): # are only potentially needed for remote server requests. xml = cls.run_command( ['info', '--xml', location], - show_stdout=False, ) url = _svn_info_xml_url_re.search(xml).group(1) revs = [ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) ] - except InstallationError: + except SubProcessError: url, revs = None, [] if revs: @@ -215,7 +214,8 @@ def call_vcs_version(self): # svn, version 1.7.14 (r1542130) # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu version_prefix = 'svn, version ' - version = self.run_command(['--version'], show_stdout=False) + version = self.run_command(['--version']) + if not version.startswith(version_prefix): return () @@ -297,7 +297,7 @@ def export(self, location, url): 'export', self.get_remote_call_options(), rev_options.to_args(), url, location, ) - self.run_command(cmd_args, show_stdout=False) + self.run_command(cmd_args) def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py index 71b4650a252..96f830f9918 100644 --- a/src/pip/_internal/vcs/versioncontrol.py +++ b/src/pip/_internal/vcs/versioncontrol.py @@ -6,13 +6,19 @@ import logging import os import shutil +import subprocess import sys from pip._vendor import pkg_resources from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._internal.exceptions import BadCommand, InstallationError -from pip._internal.utils.compat import samefile +from pip._internal.exceptions import ( + BadCommand, + InstallationError, + SubProcessError, +) +from pip._internal.utils.compat import console_to_str, samefile +from pip._internal.utils.logging import subprocess_logger from pip._internal.utils.misc import ( ask_path_exists, backup_dir, @@ -21,16 +27,20 @@ hide_value, rmtree, ) -from pip._internal.utils.subprocess import call_subprocess, make_command +from pip._internal.utils.subprocess import ( + format_command_args, + make_command, + make_subprocess_output_error, + reveal_command_args, +) from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.urls import get_url_scheme if MYPY_CHECK_RUNNING: from typing import ( - Any, Dict, Iterable, Iterator, List, Mapping, Optional, Text, Tuple, - Type, Union + Dict, Iterable, Iterator, List, Optional, Text, Tuple, + Type, Union, Mapping, Any ) - from pip._internal.cli.spinners import SpinnerInterface from pip._internal.utils.misc import HiddenText from pip._internal.utils.subprocess import CommandArgs @@ -71,6 +81,92 @@ def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): return req +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + cwd=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + log_failed_cmd=True # type: Optional[bool] +): + # type: (...) -> Text + """ + Args: + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + log_failed_cmd: if false, failed commands are not logged, + only raised. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + + # log the subprocess output at DEBUG level. + log_subprocess = subprocess_logger.debug + + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + # Whether the subprocess will be visible in the console. + showing_subprocess = True + + command_desc = format_command_args(cmd) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd + ) + if proc.stdin: + proc.stdin.close() + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + # The "line" value is a unicode string in Python 2. + line = None + if proc.stdout: + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + + # Show the line immediately. + log_subprocess(line) + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + + proc_had_error = ( + proc.returncode and proc.returncode not in extra_ok_returncodes + ) + if proc_had_error: + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + exc_msg = ( + 'Command errored out with exit status {}: {} ' + 'Check the logs for full command output.' + ).format(proc.returncode, command_desc) + raise SubProcessError(exc_msg) + return ''.join(all_output) + + def find_path_to_setup_from_repo_root(location, repo_root): # type: (str, str) -> Optional[str] """ @@ -659,13 +755,9 @@ def get_revision(cls, location): def run_command( cls, cmd, # type: Union[List[str], CommandArgs] - show_stdout=True, # type: bool cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] - spinner=None, # type: Optional[SpinnerInterface] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] log_failed_cmd=True # type: bool ): # type: (...) -> Text @@ -676,13 +768,9 @@ def run_command( """ cmd = make_command(cls.name, *cmd) try: - return call_subprocess(cmd, show_stdout, cwd, - on_returncode=on_returncode, - extra_ok_returncodes=extra_ok_returncodes, - command_desc=command_desc, + return call_subprocess(cmd, cwd, extra_environ=extra_environ, - unset_environ=cls.unset_environ, - spinner=spinner, + extra_ok_returncodes=extra_ok_returncodes, log_failed_cmd=log_failed_cmd) except OSError as e: # errno.ENOENT = no such file or directory diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index fcaeeb6c3f4..b5e8bf33924 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -36,7 +36,8 @@ def _contains_egg_info( - s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): + s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', + re.IGNORECASE)): # type: (str, Pattern[str]) -> bool """Determine whether the string looks like an egg_info. @@ -118,11 +119,8 @@ def _should_cache( wheel cache, assuming the wheel cache is available, and _should_build() has determined a wheel needs to be built. """ - if not should_build_for_install_command( - req, check_binary_allowed=_always_true - ): - # never cache if pip install would not have built - # (editable mode, etc) + if req.editable or not req.source_dir: + # never cache editable requirements return False if req.link and req.link.is_vcs: diff --git a/tests/conftest.py b/tests/conftest.py index bf8cd7975c7..c5f369cb8d4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,6 +41,12 @@ def pytest_addoption(parser): default=False, help="use new resolver in tests", ) + parser.addoption( + "--new-resolver-runtests", + action="store_true", + default=False, + help="run the skipped tests for the new resolver", + ) parser.addoption( "--use-venv", action="store_true", @@ -59,6 +65,12 @@ def pytest_collection_modifyitems(config, items): "CI" in os.environ): item.add_marker(pytest.mark.flaky(reruns=3)) + if (item.get_closest_marker('fails_on_new_resolver') and + config.getoption("--new-resolver") and + not config.getoption("--new-resolver-runtests")): + item.add_marker(pytest.mark.skip( + 'This test does not work with the new resolver')) + if six.PY3: if (item.get_closest_marker('incompatible_with_test_venv') and config.getoption("--use-venv")): diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index 4e2390aa79d..e30b2c07987 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -170,7 +170,7 @@ def test_cache_list_name_and_version_match(script): assert not list_matches_wheel('zzz-7.8.9', result) -@pytest.mark.usefixture("populate_wheel_cache") +@pytest.mark.usefixtures("populate_wheel_cache") def test_cache_remove_no_arguments(script): """Running `pip cache remove` with no arguments should cause an error.""" script.pip('cache', 'remove', expect_error=True) diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 05b97ab3aec..83e2aae265f 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -669,6 +669,7 @@ def test_download_exit_status_code_when_blank_requirements_file(script): script.pip('download', '-r', 'blank.txt') +@pytest.mark.fails_on_new_resolver def test_download_prefer_binary_when_tarball_higher_than_wheel(script, data): fake_wheel(data, 'source-0.8-py2.py3-none-any.whl') result = script.pip( @@ -688,6 +689,30 @@ def test_download_prefer_binary_when_tarball_higher_than_wheel(script, data): ) +def test_prefer_binary_tarball_higher_than_wheel_req_file(script, data): + fake_wheel(data, 'source-0.8-py2.py3-none-any.whl') + script.scratch_path.joinpath("test-req.txt").write_text(textwrap.dedent(""" + --prefer-binary + source + """)) + result = script.pip( + 'download', + '-r', script.scratch_path / 'test-req.txt', + '--no-index', + '-f', data.packages, + '-d', '.' + ) + + assert ( + Path('scratch') / 'source-0.8-py2.py3-none-any.whl' + in result.files_created + ) + assert ( + Path('scratch') / 'source-1.0.tar.gz' + not in result.files_created + ) + + def test_download_prefer_binary_when_wheel_doesnt_satisfy_req(script, data): fake_wheel(data, 'source-0.8-py2.py3-none-any.whl') script.scratch_path.joinpath("test-req.txt").write_text(textwrap.dedent(""" @@ -712,6 +737,30 @@ def test_download_prefer_binary_when_wheel_doesnt_satisfy_req(script, data): ) +def test_prefer_binary_when_wheel_doesnt_satisfy_req_req_file(script, data): + fake_wheel(data, 'source-0.8-py2.py3-none-any.whl') + script.scratch_path.joinpath("test-req.txt").write_text(textwrap.dedent(""" + --prefer-binary + source>0.9 + """)) + + result = script.pip( + 'download', + '--no-index', + '-f', data.packages, + '-d', '.', + '-r', script.scratch_path / 'test-req.txt' + ) + assert ( + Path('scratch') / 'source-1.0.tar.gz' + in result.files_created + ) + assert ( + Path('scratch') / 'source-0.8-py2.py3-none-any.whl' + not in result.files_created + ) + + def test_download_prefer_binary_when_only_tarball_exists(script, data): result = script.pip( 'download', @@ -726,6 +775,24 @@ def test_download_prefer_binary_when_only_tarball_exists(script, data): ) +def test_prefer_binary_when_only_tarball_exists_req_file(script, data): + script.scratch_path.joinpath("test-req.txt").write_text(textwrap.dedent(""" + --prefer-binary + source + """)) + result = script.pip( + 'download', + '--no-index', + '-f', data.packages, + '-d', '.', + '-r', script.scratch_path / 'test-req.txt' + ) + assert ( + Path('scratch') / 'source-1.0.tar.gz' + in result.files_created + ) + + @pytest.fixture(scope="session") def shared_script(tmpdir_factory, script_factory): tmpdir = Path(str(tmpdir_factory.mktemp("download_shared_script"))) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 6e19ef50699..4decbd2d81f 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -148,6 +148,7 @@ def test_pep518_with_user_pip(script, pip_src, data, common_wheels): ) +@pytest.mark.fails_on_new_resolver def test_pep518_with_extra_and_markers(script, data, common_wheels): script.pip( 'wheel', '--no-index', @@ -532,6 +533,7 @@ def assert_re_match(pattern, text): @pytest.mark.network +@pytest.mark.fails_on_new_resolver def test_hashed_install_failure_later_flag(script, tmpdir): with requirements_file( "blessings==1.0\n" @@ -937,6 +939,7 @@ def test_install_nonlocal_compatible_wheel(script, data): assert result.returncode == ERROR +@pytest.mark.fails_on_new_resolver def test_install_nonlocal_compatible_wheel_path(script, data): target_dir = script.scratch_path / 'target' @@ -1491,14 +1494,21 @@ def test_double_install(script): assert msg not in result.stderr -def test_double_install_fail(script): +def test_double_install_fail(script, use_new_resolver): """ Test double install failing with two different version requirements """ - result = script.pip('install', 'pip==*', 'pip==7.1.2', expect_error=True) - msg = ("Double requirement given: pip==7.1.2 (already in pip==*, " - "name='pip')") - assert msg in result.stderr + result = script.pip( + 'install', + 'pip==7.*', + 'pip==7.1.2', + # The new resolver is perfectly capable of handling this + expect_error=(not use_new_resolver) + ) + if not use_new_resolver: + msg = ("Double requirement given: pip==7.1.2 (already in pip==7.*, " + "name='pip')") + assert msg in result.stderr def _get_expected_error_text(): @@ -1746,6 +1756,7 @@ def test_user_config_accepted(script): ] ) @pytest.mark.parametrize("use_module", [True, False]) +@pytest.mark.fails_on_new_resolver def test_install_pip_does_not_modify_pip_when_satisfied( script, install_args, expected_message, use_module): """ @@ -1757,6 +1768,7 @@ def test_install_pip_does_not_modify_pip_when_satisfied( assert expected_message in result.stdout, str(result) +@pytest.mark.fails_on_new_resolver def test_ignore_yanked_file(script, data): """ Test ignore a "yanked" file. @@ -1794,6 +1806,7 @@ def test_valid_index_url_argument(script, shared_data): assert 'Successfully installed Dinner' in result.stdout, str(result) +@pytest.mark.fails_on_new_resolver def test_install_yanked_file_and_print_warning(script, data): """ Test install a "yanked" file and print a warning. @@ -1871,28 +1884,3 @@ def test_install_skip_work_dir_pkg(script, data): assert 'Requirement already satisfied: simple' not in result.stdout assert 'Successfully installed simple' in result.stdout - - -def test_install_include_work_dir_pkg(script, data): - """ - Test that install of a package in working directory - should fail on the second attempt after an install - if working directory is added in PYTHONPATH - """ - - # Create a test package, install it and then uninstall it - pkg_path = create_test_package_with_setup( - script, name='simple', version='1.0') - script.pip('install', '-e', '.', - expect_stderr=True, cwd=pkg_path) - script.pip('uninstall', 'simple', '-y') - - script.environ.update({'PYTHONPATH': pkg_path}) - - # Running the install command again from the working directory - # will be a no-op, as the package is found to be installed, - # when the package directory is in PYTHONPATH - result = script.pip('install', '--find-links', - data.find_links, 'simple', - expect_stderr=True, cwd=pkg_path) - assert 'Requirement already satisfied: simple' in result.stdout diff --git a/tests/functional/test_install_cleanup.py b/tests/functional/test_install_cleanup.py index 131caf681e3..ece2161cfb2 100644 --- a/tests/functional/test_install_cleanup.py +++ b/tests/functional/test_install_cleanup.py @@ -4,24 +4,6 @@ import pytest from pip._internal.cli.status_codes import PREVIOUS_BUILD_DIR_ERROR -from tests.lib import need_mercurial, windows_workaround_7667 -from tests.lib.local_repos import local_checkout - - -def test_cleanup_after_install(script, data): - """ - Test clean up after installing a package. - """ - script.pip( - 'install', '--no-index', - '--find-links={}'.format(data.find_links), - 'simple' - ) - build = script.venv_path / "build" - src = script.venv_path / "src" - assert not exists(build), "build/ dir still exists: {}".format(build) - assert not exists(src), "unexpected src/ dir exists: {}" .format(src) - script.assert_no_temp() @pytest.mark.network @@ -37,89 +19,6 @@ def test_no_clean_option_blocks_cleaning_after_install(script, data): assert exists(build) -@pytest.mark.network -@need_mercurial -@windows_workaround_7667 -def test_cleanup_after_install_editable_from_hg(script, tmpdir): - """ - Test clean up after cloning from Mercurial. - - """ - requirement = '{}#egg=ScriptTest'.format( - local_checkout('hg+https://bitbucket.org/ianb/scripttest', tmpdir) - ) - script.pip('install', '-e', requirement) - build = script.venv_path / 'build' - src = script.venv_path / 'src' - assert not exists(build), "build/ dir still exists: {}".format(build) - assert exists(src), "expected src/ dir doesn't exist: {}".format(src) - script.assert_no_temp() - - -def test_cleanup_after_install_from_local_directory(script, data): - """ - Test clean up after installing from a local directory. - """ - to_install = data.packages.joinpath("FSPkg") - script.pip('install', to_install) - build = script.venv_path / 'build' - src = script.venv_path / 'src' - assert not exists(build), "unexpected build/ dir exists: {}".format(build) - assert not exists(src), "unexpected src/ dir exist: {}".format(src) - script.assert_no_temp() - - -def test_cleanup_req_satisfied_no_name(script, data): - """ - Test cleanup when req is already satisfied, and req has no 'name' - """ - # this test confirms Issue #420 is fixed - # reqs with no 'name' that were already satisfied were leaving behind tmp - # build dirs - # 2 examples of reqs that would do this - # 1) https://bitbucket.org/ianb/initools/get/tip.zip - # 2) parent-0.1.tar.gz - dist = data.packages.joinpath("parent-0.1.tar.gz") - - script.pip('install', dist) - script.pip('install', dist) - - build = script.venv_path / 'build' - assert not exists(build), \ - "unexpected build/ dir exists: {build}".format(**locals()) - script.assert_no_temp() - - -def test_cleanup_after_install_exception(script, data): - """ - Test clean up after a 'setup.py install' exception. - """ - # broken==0.2broken fails during install; see packages readme file - result = script.pip( - 'install', '-f', data.find_links, '--no-index', 'broken==0.2broken', - expect_error=True, - ) - build = script.venv_path / 'build' - assert not exists(build), \ - "build/ dir still exists: {result.stdout}".format(**locals()) - script.assert_no_temp() - - -def test_cleanup_after_egg_info_exception(script, data): - """ - Test clean up after a 'setup.py egg_info' exception. - """ - # brokenegginfo fails during egg_info; see packages readme file - result = script.pip( - 'install', '-f', data.find_links, '--no-index', 'brokenegginfo==0.1', - expect_error=True, - ) - build = script.venv_path / 'build' - assert not exists(build), \ - "build/ dir still exists: {result.stdout}".format(**locals()) - script.assert_no_temp() - - @pytest.mark.network def test_cleanup_prevented_upon_build_dir_exception(script, data): """ diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 088016a9f1b..6cd283f077f 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -15,10 +15,9 @@ def test_options_from_env_vars(script): script.environ['PIP_NO_INDEX'] = '1' result = script.pip('install', '-vvv', 'INITools', expect_error=True) assert "Ignoring indexes:" in result.stdout, str(result) - assert ( - "DistributionNotFound: No matching distribution found for INITools" - in result.stdout - ) + msg = "DistributionNotFound: No matching distribution found for INITools" + # Case insensitive as the new resolver canonicalises the project name + assert msg.lower() in result.stdout.lower(), str(result) def test_command_line_options_override_env_vars(script, virtualenv): @@ -59,10 +58,9 @@ def test_env_vars_override_config_file(script, virtualenv): no-index = 1 """)) result = script.pip('install', '-vvv', 'INITools', expect_error=True) - assert ( - "DistributionNotFound: No matching distribution found for INITools" - in result.stdout - ) + msg = "DistributionNotFound: No matching distribution found for INITools" + # Case insensitive as the new resolver canonicalises the project name + assert msg.lower() in result.stdout.lower(), str(result) script.environ['PIP_NO_INDEX'] = '0' virtualenv.clear() result = script.pip('install', '-vvv', 'INITools') @@ -186,10 +184,9 @@ def test_options_from_venv_config(script, virtualenv): f.write(conf) result = script.pip('install', '-vvv', 'INITools', expect_error=True) assert "Ignoring indexes:" in result.stdout, str(result) - assert ( - "DistributionNotFound: No matching distribution found for INITools" - in result.stdout - ) + msg = "DistributionNotFound: No matching distribution found for INITools" + # Case insensitive as the new resolver canonicalises the project name + assert msg.lower() in result.stdout.lower(), str(result) def test_install_no_binary_via_config_disables_cached_wheels( diff --git a/tests/functional/test_install_direct_url.py b/tests/functional/test_install_direct_url.py index ec1e927ebf8..4afd3925e6f 100644 --- a/tests/functional/test_install_direct_url.py +++ b/tests/functional/test_install_direct_url.py @@ -1,5 +1,7 @@ import re +import pytest + from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl from tests.lib import _create_test_package, path_to_url @@ -30,6 +32,7 @@ def test_install_vcs_editable_no_direct_url(script, with_wheel): assert not _get_created_direct_url(result, "testpkg") +@pytest.mark.fails_on_new_resolver def test_install_vcs_non_editable_direct_url(script, with_wheel): pkg_path = _create_test_package(script, name="testpkg") url = path_to_url(pkg_path) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index 3c0359a73f1..dfde7d1b676 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -136,6 +136,7 @@ def test_install_special_extra(script): pytest.param('[extra2]', '1.0', marks=pytest.mark.xfail), pytest.param('[extra1,extra2]', '1.0', marks=pytest.mark.xfail), ]) +@pytest.mark.fails_on_new_resolver def test_install_extra_merging(script, data, extra_to_install, simple_version): # Check that extra specifications in the extras section are honoured. pkga_path = script.scratch_path / 'pkga' diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 0c00060a23d..d39263b98ea 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -189,6 +189,7 @@ def test_respect_order_in_requirements_file(script, data): ) +@pytest.mark.fails_on_new_resolver def test_install_local_editable_with_extras(script, data): to_install = data.packages.joinpath("LocalExtras") res = script.pip_install_local( @@ -336,6 +337,7 @@ def test_constraints_local_install_causes_error(script, data): assert 'Could not satisfy constraints for' in result.stderr +@pytest.mark.fails_on_new_resolver def test_constraints_constrain_to_local_editable(script, data): to_install = data.src.joinpath("singlemodule") script.scratch_path.joinpath("constraints.txt").write_text( @@ -347,6 +349,7 @@ def test_constraints_constrain_to_local_editable(script, data): assert 'Running setup.py develop for singlemodule' in result.stdout +@pytest.mark.fails_on_new_resolver def test_constraints_constrain_to_local(script, data): to_install = data.src.joinpath("singlemodule") script.scratch_path.joinpath("constraints.txt").write_text( @@ -358,6 +361,7 @@ def test_constraints_constrain_to_local(script, data): assert 'Running setup.py install for singlemodule' in result.stdout +@pytest.mark.fails_on_new_resolver def test_constrained_to_url_install_same_url(script, data): to_install = data.src.joinpath("singlemodule") constraints = path_to_url(to_install) + "#egg=singlemodule" @@ -403,6 +407,7 @@ def test_double_install_spurious_hash_mismatch( assert 'Successfully installed simple-1.0' in str(result) +@pytest.mark.fails_on_new_resolver def test_install_with_extras_from_constraints(script, data): to_install = data.packages.joinpath("LocalExtras") script.scratch_path.joinpath("constraints.txt").write_text( @@ -413,6 +418,7 @@ def test_install_with_extras_from_constraints(script, data): assert script.site_packages / 'simple' in result.files_created +@pytest.mark.fails_on_new_resolver def test_install_with_extras_from_install(script, data): to_install = data.packages.joinpath("LocalExtras") script.scratch_path.joinpath("constraints.txt").write_text( @@ -420,9 +426,10 @@ def test_install_with_extras_from_install(script, data): ) result = script.pip_install_local( '-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]') - assert script.site_packages / 'singlemodule.py'in result.files_created + assert script.site_packages / 'singlemodule.py' in result.files_created +@pytest.mark.fails_on_new_resolver def test_install_with_extras_joined(script, data): to_install = data.packages.joinpath("LocalExtras") script.scratch_path.joinpath("constraints.txt").write_text( @@ -432,9 +439,10 @@ def test_install_with_extras_joined(script, data): '-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]' ) assert script.site_packages / 'simple' in result.files_created - assert script.site_packages / 'singlemodule.py'in result.files_created + assert script.site_packages / 'singlemodule.py' in result.files_created +@pytest.mark.fails_on_new_resolver def test_install_with_extras_editable_joined(script, data): to_install = data.packages.joinpath("LocalExtras") script.scratch_path.joinpath("constraints.txt").write_text( @@ -443,7 +451,7 @@ def test_install_with_extras_editable_joined(script, data): result = script.pip_install_local( '-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]') assert script.site_packages / 'simple' in result.files_created - assert script.site_packages / 'singlemodule.py'in result.files_created + assert script.site_packages / 'singlemodule.py' in result.files_created def test_install_distribution_full_union(script, data): @@ -465,6 +473,7 @@ def test_install_distribution_duplicate_extras(script, data): assert expected in result.stderr +@pytest.mark.fails_on_new_resolver def test_install_distribution_union_with_constraints(script, data): to_install = data.packages.joinpath("LocalExtras") script.scratch_path.joinpath("constraints.txt").write_text( @@ -475,6 +484,7 @@ def test_install_distribution_union_with_constraints(script, data): assert script.site_packages / 'singlemodule.py' in result.files_created +@pytest.mark.fails_on_new_resolver def test_install_distribution_union_with_versions(script, data): to_install_001 = data.packages.joinpath("LocalExtras") to_install_002 = data.packages.joinpath("LocalExtras-0.0.2") @@ -497,6 +507,7 @@ def test_install_distribution_union_conflicting_extras(script, data): assert "Conflict" in result.stderr +@pytest.mark.fails_on_new_resolver def test_install_unsupported_wheel_link_with_marker(script): script.scratch_path.joinpath("with-marker.txt").write_text( textwrap.dedent("""\ @@ -515,6 +526,7 @@ def test_install_unsupported_wheel_link_with_marker(script): assert len(result.files_created) == 0 +@pytest.mark.fails_on_new_resolver def test_install_unsupported_wheel_file(script, data): # Trying to install a local wheel with an incompatible version/type # should fail. diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py index f5445a0b3e6..604d2afa812 100644 --- a/tests/functional/test_install_upgrade.py +++ b/tests/functional/test_install_upgrade.py @@ -36,6 +36,7 @@ def test_invalid_upgrade_strategy_causes_error(script): assert "invalid choice" in result.stderr +@pytest.mark.fails_on_new_resolver def test_only_if_needed_does_not_upgrade_deps_when_satisfied(script): """ It doesn't upgrade a dependency if it already satisfies the requirements. @@ -181,6 +182,7 @@ def test_upgrade_if_requested(script): ) +@pytest.mark.fails_on_new_resolver def test_upgrade_with_newest_already_installed(script, data): """ If the newest version of a package is already installed, the package should @@ -249,6 +251,7 @@ def test_uninstall_before_upgrade_from_url(script): @pytest.mark.network +@pytest.mark.fails_on_new_resolver def test_upgrade_to_same_version_from_url(script): """ When installing from a URL the same version that is already installed, no diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index 09dbdf4912a..c885bf4b6ef 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -126,6 +126,7 @@ def test_install_user_conflict_in_usersite(self, script): @pytest.mark.network @pytest.mark.incompatible_with_test_venv + @pytest.mark.fails_on_new_resolver def test_install_user_conflict_in_globalsite(self, virtualenv, script): """ Test user install with conflict in global site ignores site and @@ -158,6 +159,7 @@ def test_install_user_conflict_in_globalsite(self, virtualenv, script): @pytest.mark.network @pytest.mark.incompatible_with_test_venv + @pytest.mark.fails_on_new_resolver def test_upgrade_user_conflict_in_globalsite(self, virtualenv, script): """ Test user install/upgrade with conflict in global site ignores site and @@ -189,6 +191,7 @@ def test_upgrade_user_conflict_in_globalsite(self, virtualenv, script): @pytest.mark.network @pytest.mark.incompatible_with_test_venv + @pytest.mark.fails_on_new_resolver def test_install_user_conflict_in_globalsite_and_usersite( self, virtualenv, script): """ diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 6c6f5a0c7d7..97b792c0135 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -495,6 +495,7 @@ def test_install_git_branch_not_cached(script, with_wheel): ), result.stdout +@pytest.mark.fails_on_new_resolver def test_install_git_sha_cached(script, with_wheel): """ Installing git urls with a sha revision does cause wheel caching. diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index 6f7b77597ed..a9293438c11 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + import distutils import glob import os @@ -125,6 +127,36 @@ def test_basic_install_from_wheel_file(script, data): result.stdout) +# Installation seems to work, but scripttest fails to check. +# I really don't care now since we're desupporting it soon anyway. +@skip_if_python2 +def test_basic_install_from_unicode_wheel(script, data): + """ + Test installing from a wheel (that has a script) + """ + make_wheel( + 'unicode_package', + '1.0', + extra_files={ + 'வணக்கம்/__init__.py': b'', + 'வணக்கம்/નમસ્તે.py': b'', + }, + ).save_to_dir(script.scratch_path) + + result = script.pip( + 'install', 'unicode_package==1.0', '--no-index', + '--find-links', script.scratch_path, + ) + dist_info_folder = script.site_packages / 'unicode_package-1.0.dist-info' + assert dist_info_folder in result.files_created, str(result) + + file1 = script.site_packages.joinpath('வணக்கம்', '__init__.py') + assert file1 in result.files_created, str(result) + + file2 = script.site_packages.joinpath('வணக்கம்', 'નમસ્તે.py') + assert file2 in result.files_created, str(result) + + def test_install_from_wheel_with_headers(script, data): """ Test installing from a wheel file with headers @@ -587,3 +619,16 @@ def test_wheel_install_fails_with_badly_encoded_metadata(script): assert "Error decoding metadata for" in result.stderr assert "simple-0.1.0-py2.py3-none-any.whl" in result.stderr assert "METADATA" in result.stderr + + +@pytest.mark.parametrize( + 'package_name', + ['simple-package', 'simple_package'], +) +def test_correct_package_name_while_creating_wheel_bug(script, package_name): + """Check that the package name is correctly named while creating + a .whl file with a given format + """ + package = create_basic_wheel_for_package(script, package_name, '1.0') + wheel_name = os.path.basename(package) + assert wheel_name == 'simple_package-1.0-py2.py3-none-any.whl' diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index dace95968bd..693ede21a78 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -189,7 +189,40 @@ def test_new_resolver_ignore_dependencies(script): assert_not_installed(script, "dep") -def test_new_resolver_installs_extras(script): +@pytest.mark.parametrize( + "root_dep", + [ + "base[add]", + "base[add] >= 0.1.0", + # Non-standard syntax. To deprecate, see pypa/pip#8288. + "base >= 0.1.0[add]", + ], +) +def test_new_resolver_installs_extras(tmpdir, script, root_dep): + req_file = tmpdir.joinpath("requirements.txt") + req_file.write_text(root_dep) + + create_basic_wheel_for_package( + script, + "base", + "0.1.0", + extras={"add": ["dep"]}, + ) + create_basic_wheel_for_package( + script, + "dep", + "0.1.0", + ) + script.pip( + "install", "--unstable-feature=resolver", + "--no-cache-dir", "--no-index", + "--find-links", script.scratch_path, + "-r", req_file, + ) + assert_installed(script, base="0.1.0", dep="0.1.0") + + +def test_new_resolver_installs_extras_warn_missing(script): create_basic_wheel_for_package( script, "base", @@ -531,6 +564,29 @@ def test_new_resolver_handles_prerelease( assert_installed(script, pkg=expected_version) +@pytest.mark.parametrize( + "pkg_deps, root_deps", + [ + # This tests the marker is picked up from a transitive dependency. + (["dep; os_name == 'nonexist_os'"], ["pkg"]), + # This tests the marker is picked up from a root dependency. + ([], ["pkg", "dep; os_name == 'nonexist_os'"]), + ] +) +def test_new_reolver_skips_marker(script, pkg_deps, root_deps): + create_basic_wheel_for_package(script, "pkg", "1.0", depends=pkg_deps) + create_basic_wheel_for_package(script, "dep", "1.0") + + script.pip( + "install", "--unstable-feature=resolver", + "--no-cache-dir", "--no-index", + "--find-links", script.scratch_path, + *root_deps + ) + assert_installed(script, pkg="1.0") + assert_not_installed(script, "dep") + + @pytest.mark.parametrize( "constraints", [ @@ -769,9 +825,7 @@ def _wheel_from_index(script, name, version, requires, extras): @pytest.mark.parametrize( "pkg_builder", [ - pytest.param( - _local_with_setup, marks=pytest.mark.xfail(strict=True), - ), + _local_with_setup, _direct_wheel, _wheel_from_index, ], diff --git a/tests/functional/test_uninstall_user.py b/tests/functional/test_uninstall_user.py index df635ccf8f8..a367796fdfd 100644 --- a/tests/functional/test_uninstall_user.py +++ b/tests/functional/test_uninstall_user.py @@ -22,6 +22,7 @@ def test_uninstall_from_usersite(self, script): result2 = script.pip('uninstall', '-y', 'INITools') assert_all_changes(result1, result2, [script.venv / 'build', 'cache']) + @pytest.mark.fails_on_new_resolver def test_uninstall_from_usersite_with_dist_in_global_site( self, virtualenv, script): """ diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index 545c50ac9a8..6bf0486819a 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -62,6 +62,7 @@ def test_pip_wheel_success(script, data): assert "Successfully built simple" in result.stdout, result.stdout +@pytest.mark.fails_on_new_resolver def test_pip_wheel_build_cache(script, data): """ Test 'pip wheel' builds and caches. diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index f51cce1e23a..d08e1f3613f 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -353,6 +353,27 @@ def assert_installed(self, pkg_name, editable=True, with_files=[], .format(**locals()) ) + def did_create(self, path, message=None): + assert str(path) in self.files_created, _one_or_both(message, self) + + def did_not_create(self, path, message=None): + assert str(path) not in self.files_created, _one_or_both(message, self) + + def did_update(self, path, message=None): + assert str(path) in self.files_updated, _one_or_both(message, self) + + def did_not_update(self, path, message=None): + assert str(path) not in self.files_updated, _one_or_both(message, self) + + +def _one_or_both(a, b): + """Returns f"{a}\n{b}" if a is truthy, else returns str(b). + """ + if not a: + return str(b) + + return "{a}\n{b}".format(a=a, b=b) + def make_check_stderr_message(stderr, line, reason): """ @@ -1002,6 +1023,9 @@ def create_basic_wheel_for_package( if extra_files is None: extra_files = {} + # Fix wheel distribution name by replacing runs of non-alphanumeric + # characters with an underscore _ as per PEP 491 + name = re.sub(r"[^\w\d.]+", "_", name, re.UNICODE) archive_name = "{}-{}-py2.py3-none-any.whl".format(name, version) archive_path = script.scratch_path / archive_name diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index cfc2af1c07a..0387813ad0a 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -476,6 +476,36 @@ def test_get_html_page_invalid_scheme(caplog, url, vcs_scheme): ] +@pytest.mark.parametrize( + "content_type", + [ + "application/xhtml+xml", + "application/json", + ], +) +def test_get_html_page_invalid_content_type(caplog, content_type): + """`_get_html_page()` should warn if an invalid content-type is given. + Only text/html is allowed. + """ + caplog.set_level(logging.DEBUG) + url = 'https://pypi.org/simple/pip' + link = Link(url) + + session = mock.Mock(PipSession) + session.get.return_value = mock.Mock(**{ + "request.method": "GET", + "headers": {"Content-Type": content_type}, + }) + + assert _get_html_page(link, session=session) is None + assert ('pip._internal.index.collector', + logging.WARNING, + 'Skipping page {} because the GET request got Content-Type: {}.' + 'The only supported Content-Type is text/html'.format( + url, content_type)) \ + in caplog.record_tuples + + def make_fake_html_response(url): """ Create a fake requests.Response object. diff --git a/tests/unit/test_format_control.py b/tests/unit/test_format_control.py index 0b0e2bde221..0e152798184 100644 --- a/tests/unit/test_format_control.py +++ b/tests/unit/test_format_control.py @@ -9,6 +9,8 @@ class SimpleCommand(Command): def __init__(self): super(SimpleCommand, self).__init__('fake', 'fake summary') + + def add_options(self): self.cmd_opts.add_option(cmdoptions.no_binary()) self.cmd_opts.add_option(cmdoptions.only_binary()) diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 1fadd1db348..08320cfa143 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -123,6 +123,26 @@ def ask_input(prompt): assert actual == ("user", "user!netloc", False) +def test_keyring_get_password_after_prompt_when_none(monkeypatch): + keyring = KeyringModuleV1() + monkeypatch.setattr('pip._internal.network.auth.keyring', keyring) + auth = MultiDomainBasicAuth() + + def ask_input(prompt): + assert prompt == "User for unknown.com: " + return "user" + + def ask_password(prompt): + assert prompt == "Password: " + return "fake_password" + + monkeypatch.setattr('pip._internal.network.auth.ask_input', ask_input) + monkeypatch.setattr( + 'pip._internal.network.auth.ask_password', ask_password) + actual = auth._prompt_for_password("unknown.com") + assert actual == ("user", "fake_password", True) + + def test_keyring_get_password_username_in_index(monkeypatch): keyring = KeyringModuleV1() monkeypatch.setattr('pip._internal.network.auth.keyring', keyring) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index 73733d46235..2da0b62dbfc 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -583,10 +583,10 @@ def test_parse_editable_local( exists_mock.return_value = isdir_mock.return_value = True # mocks needed to support path operations on windows tests abspath_mock.return_value = "/some/path" - assert parse_editable('.') == (None, 'file:///some/path', None) + assert parse_editable('.') == (None, 'file:///some/path', set()) abspath_mock.return_value = "/some/path/foo" assert parse_editable('foo') == ( - None, 'file:///some/path/foo', None, + None, 'file:///some/path/foo', set(), ) @@ -594,7 +594,7 @@ def test_parse_editable_explicit_vcs(): assert parse_editable('svn+https://foo#egg=foo') == ( 'foo', 'svn+https://foo#egg=foo', - None, + set(), ) @@ -602,7 +602,7 @@ def test_parse_editable_vcs_extras(): assert parse_editable('svn+https://foo#egg=foo[extras]') == ( 'foo[extras]', 'svn+https://foo#egg=foo[extras]', - None, + set(), ) diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index b64d4cef312..2834b18f087 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + """Tests for wheel binary packages and .dist-info.""" import csv import logging @@ -114,8 +116,8 @@ def test_raise_for_invalid_entrypoint_fail(entrypoint): @pytest.mark.parametrize("outrows, expected", [ ([ - ('', '', 'a'), - ('', '', ''), + (u'', '', 'a'), + (u'', '', ''), ], [ ('', '', ''), ('', '', 'a'), @@ -123,15 +125,23 @@ def test_raise_for_invalid_entrypoint_fail(entrypoint): ([ # Include an int to check avoiding the following error: # > TypeError: '<' not supported between instances of 'str' and 'int' - ('', '', 1), + (u'', '', 1), + (u'', '', ''), + ], [ ('', '', ''), + ('', '', '1'), + ]), + ([ + # Test the normalization correctly encode everything for csv.writer(). + (u'😉', '', 1), + (u'', '', ''), ], [ ('', '', ''), - ('', '', 1), + ('😉', '', '1'), ]), ]) -def test_sorted_outrows(outrows, expected): - actual = wheel.sorted_outrows(outrows) +def test_normalized_outrows(outrows, expected): + actual = wheel._normalized_outrows(outrows) assert actual == expected @@ -141,7 +151,7 @@ def call_get_csv_rows_for_installed(tmpdir, text): # Test that an installed file appearing in RECORD has its filename # updated in the new RECORD file. - installed = {'a': 'z'} + installed = {u'a': 'z'} changed = set() generated = [] lib_dir = '/lib/dir' @@ -180,9 +190,9 @@ def test_get_csv_rows_for_installed__long_lines(tmpdir, caplog): outrows = call_get_csv_rows_for_installed(tmpdir, text) expected = [ - ('z', 'b', 'c', 'd'), + ('z', 'b', 'c'), ('e', 'f', 'g'), - ('h', 'i', 'j', 'k'), + ('h', 'i', 'j'), ] assert outrows == expected diff --git a/tools/requirements/tests.txt b/tools/requirements/tests.txt index 00a306558b8..0c84f20aa47 100644 --- a/tools/requirements/tests.txt +++ b/tools/requirements/tests.txt @@ -4,13 +4,13 @@ enum34; python_version < '3.4' freezegun mock pretend -pytest==3.8.2 +# pytest 5.x only supports python 3.5+ +pytest<5.0.0 pytest-cov -# Prevent installing 7.0 which has install_requires "pytest >= 3.10". -pytest-rerunfailures<7.0 +# Prevent installing 9.0 which has install_requires "pytest >= 5.0". +pytest-rerunfailures<9.0 pytest-timeout -# Prevent installing 1.28.0 which has install_requires "pytest >= 4.4.0". -pytest-xdist<1.28.0 +pytest-xdist pyyaml setuptools>=39.2.0 # Needed for `setuptools.wheel.Wheel` support. scripttest diff --git a/tools/travis/run.sh b/tools/travis/run.sh index 90e7d570860..a531cbb56fd 100755 --- a/tools/travis/run.sh +++ b/tools/travis/run.sh @@ -55,6 +55,10 @@ elif [[ "$GROUP" == "2" ]]; then # Separate Job for running integration tests for 'pip install' tox -- -m integration -n auto --duration=5 -k "test_install" \ --use-venv $RESOLVER_SWITCH +elif [[ "$GROUP" == "3" ]]; then + # Separate Job for tests that fail with the new resolver + tox -- -m fails_on_new_resolver -n auto --duration=5 \ + --use-venv $RESOLVER_SWITCH --new-resolver-runtests else # Non-Testing Jobs should run once tox