summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/label-merge-conflicts.yml19
-rw-r--r--.pre-commit-config.yaml14
-rw-r--r--docs/html/getting-started.md2
-rw-r--r--docs/html/reference/build-system/pyproject-toml.md6
-rw-r--r--docs/html/reference/build-system/setup-py.md9
-rw-r--r--docs/html/user_guide.rst8
-rw-r--r--docs/pip_sphinxext.py1
-rw-r--r--docs/requirements.txt2
-rw-r--r--news/10476.feature.rst1
-rw-r--r--news/11325.feature.rst1
-rw-r--r--news/11453.removal.rst2
-rw-r--r--news/11719.bugfix.rst1
-rw-r--r--news/11889.bugfix.rst4
-rw-r--r--news/11908.feature.rst1
-rw-r--r--news/11935.feature.rst1
-rw-r--r--news/11936.bugfix.rst1
-rw-r--r--news/11938.bugfix.rst3
-rw-r--r--news/11941.feature.rst4
-rw-r--r--news/11946.bugfix.rst2
-rw-r--r--news/11948.bugfix.rst3
-rw-r--r--news/11954.doc.rst1
-rw-r--r--news/5037.feature.rst1
-rw-r--r--news/8368.removal.rst2
-rw-r--r--news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst0
-rw-r--r--news/msgpack.vendor.rst1
-rw-r--r--news/platformdirs.vendor.rst1
-rw-r--r--news/pygments.vendor.rst1
-rw-r--r--news/rich.vendor.rst1
-rw-r--r--news/setuptools.vendor.rst2
-rw-r--r--news/tenacity.vendor.rst1
-rw-r--r--news/typing_extensions.vendor.rst1
-rw-r--r--news/urllib3.vendor.rst1
-rw-r--r--noxfile.py1
-rw-r--r--src/pip/_internal/cache.py41
-rw-r--r--src/pip/_internal/cli/base_command.py9
-rw-r--r--src/pip/_internal/cli/cmdoptions.py11
-rw-r--r--src/pip/_internal/cli/req_command.py4
-rw-r--r--src/pip/_internal/commands/cache.py1
-rw-r--r--src/pip/_internal/commands/check.py1
-rw-r--r--src/pip/_internal/commands/download.py1
-rw-r--r--src/pip/_internal/commands/install.py39
-rw-r--r--src/pip/_internal/commands/wheel.py22
-rw-r--r--src/pip/_internal/exceptions.py14
-rw-r--r--src/pip/_internal/index/sources.py1
-rw-r--r--src/pip/_internal/models/direct_url.py31
-rw-r--r--src/pip/_internal/models/installation_report.py2
-rw-r--r--src/pip/_internal/models/link.py35
-rw-r--r--src/pip/_internal/models/search_scope.py1
-rw-r--r--src/pip/_internal/network/session.py1
-rw-r--r--src/pip/_internal/operations/freeze.py7
-rw-r--r--src/pip/_internal/operations/install/legacy.py118
-rw-r--r--src/pip/_internal/operations/install/wheel.py8
-rw-r--r--src/pip/_internal/operations/prepare.py64
-rw-r--r--src/pip/_internal/req/constructors.py6
-rw-r--r--src/pip/_internal/req/req_file.py11
-rw-r--r--src/pip/_internal/req/req_install.py115
-rw-r--r--src/pip/_internal/req/req_uninstall.py24
-rw-r--r--src/pip/_internal/resolution/legacy/resolver.py4
-rw-r--r--src/pip/_internal/resolution/resolvelib/candidates.py13
-rw-r--r--src/pip/_internal/resolution/resolvelib/factory.py3
-rw-r--r--src/pip/_internal/resolution/resolvelib/requirements.py1
-rw-r--r--src/pip/_internal/resolution/resolvelib/resolver.py4
-rw-r--r--src/pip/_internal/utils/deprecation.py41
-rw-r--r--src/pip/_internal/utils/hashes.py7
-rw-r--r--src/pip/_internal/utils/setuptools_build.py45
-rw-r--r--src/pip/_vendor/README.rst24
-rw-r--r--src/pip/_vendor/msgpack/__init__.py4
-rw-r--r--src/pip/_vendor/msgpack/ext.py2
-rw-r--r--src/pip/_vendor/msgpack/fallback.py2
-rw-r--r--src/pip/_vendor/pkg_resources/__init__.py599
-rw-r--r--src/pip/_vendor/platformdirs/__init__.py223
-rw-r--r--src/pip/_vendor/platformdirs/__main__.py1
-rw-r--r--src/pip/_vendor/platformdirs/android.py10
-rw-r--r--src/pip/_vendor/platformdirs/api.py25
-rw-r--r--src/pip/_vendor/platformdirs/macos.py20
-rw-r--r--src/pip/_vendor/platformdirs/unix.py15
-rw-r--r--src/pip/_vendor/platformdirs/version.py4
-rw-r--r--src/pip/_vendor/platformdirs/windows.py15
-rw-r--r--src/pip/_vendor/pygments/__init__.py2
-rw-r--r--src/pip/_vendor/pygments/formatters/__init__.py1
-rw-r--r--src/pip/_vendor/pygments/formatters/html.py6
-rw-r--r--src/pip/_vendor/pygments/formatters/irc.py35
-rw-r--r--src/pip/_vendor/pygments/lexer.py7
-rw-r--r--src/pip/_vendor/pygments/lexers/__init__.py1
-rw-r--r--src/pip/_vendor/pygments/lexers/_mapping.py24
-rw-r--r--src/pip/_vendor/pygments/lexers/python.py64
-rw-r--r--src/pip/_vendor/pygments/sphinxext.py62
-rw-r--r--src/pip/_vendor/rich/_export_format.py4
-rw-r--r--src/pip/_vendor/rich/_fileno.py24
-rw-r--r--src/pip/_vendor/rich/_null_file.py14
-rw-r--r--src/pip/_vendor/rich/align.py2
-rw-r--r--src/pip/_vendor/rich/ansi.py3
-rw-r--r--src/pip/_vendor/rich/cells.py2
-rw-r--r--src/pip/_vendor/rich/color.py16
-rw-r--r--src/pip/_vendor/rich/console.py59
-rw-r--r--src/pip/_vendor/rich/default_styles.py10
-rw-r--r--src/pip/_vendor/rich/file_proxy.py5
-rw-r--r--src/pip/_vendor/rich/highlighter.py2
-rw-r--r--src/pip/_vendor/rich/json.py4
-rw-r--r--src/pip/_vendor/rich/live.py2
-rw-r--r--src/pip/_vendor/rich/pretty.py103
-rw-r--r--src/pip/_vendor/rich/progress.py21
-rw-r--r--src/pip/_vendor/rich/repr.py2
-rw-r--r--src/pip/_vendor/rich/rule.py6
-rw-r--r--src/pip/_vendor/rich/segment.py8
-rw-r--r--src/pip/_vendor/rich/spinner.py23
-rw-r--r--src/pip/_vendor/rich/style.py23
-rw-r--r--src/pip/_vendor/rich/syntax.py11
-rw-r--r--src/pip/_vendor/rich/text.py8
-rw-r--r--src/pip/_vendor/rich/theme.py7
-rw-r--r--src/pip/_vendor/rich/traceback.py139
-rw-r--r--src/pip/_vendor/tenacity/__init__.py219
-rw-r--r--src/pip/_vendor/tenacity/_asyncio.py38
-rw-r--r--src/pip/_vendor/tenacity/_utils.py8
-rw-r--r--src/pip/_vendor/tenacity/after.py7
-rw-r--r--src/pip/_vendor/tenacity/before.py7
-rw-r--r--src/pip/_vendor/tenacity/before_sleep.py17
-rw-r--r--src/pip/_vendor/tenacity/retry.py38
-rw-r--r--src/pip/_vendor/tenacity/stop.py11
-rw-r--r--src/pip/_vendor/tenacity/tornadoweb.py6
-rw-r--r--src/pip/_vendor/tenacity/wait.py44
-rw-r--r--src/pip/_vendor/typing_extensions.py119
-rw-r--r--src/pip/_vendor/urllib3/_version.py2
-rw-r--r--src/pip/_vendor/urllib3/connection.py5
-rw-r--r--src/pip/_vendor/urllib3/connectionpool.py2
-rw-r--r--src/pip/_vendor/urllib3/util/timeout.py9
-rw-r--r--src/pip/_vendor/urllib3/util/url.py4
-rw-r--r--src/pip/_vendor/vendor.txt16
-rw-r--r--tests/conftest.py1
-rw-r--r--tests/functional/test_build_env.py2
-rw-r--r--tests/functional/test_cache.py6
-rw-r--r--tests/functional/test_config_settings.py196
-rw-r--r--tests/functional/test_install.py227
-rw-r--r--tests/functional/test_install_config.py16
-rw-r--r--tests/functional/test_install_report.py125
-rw-r--r--tests/functional/test_install_reqs.py113
-rw-r--r--tests/functional/test_install_vcs_git.py2
-rw-r--r--tests/functional/test_show.py28
-rw-r--r--tests/unit/test_base_command.py1
-rw-r--r--tests/unit/test_cache.py11
-rw-r--r--tests/unit/test_collector.py35
-rw-r--r--tests/unit/test_direct_url.py30
-rw-r--r--tests/unit/test_link.py2
-rw-r--r--tests/unit/test_network_cache.py1
-rw-r--r--tests/unit/test_options.py1
-rw-r--r--tests/unit/test_req.py11
-rw-r--r--tests/unit/test_req_file.py28
-rw-r--r--tests/unit/test_req_uninstall.py32
-rw-r--r--tests/unit/test_utils.py9
-rw-r--r--tests/unit/test_wheel.py7
-rw-r--r--tools/vendoring/patches/pkg_resources.patch22
-rw-r--r--tools/vendoring/patches/pygments.patch22
-rw-r--r--tools/vendoring/patches/tenacity.patch8
153 files changed, 2524 insertions, 1392 deletions
diff --git a/.github/workflows/label-merge-conflicts.yml b/.github/workflows/label-merge-conflicts.yml
deleted file mode 100644
index 1de897ca1..000000000
--- a/.github/workflows/label-merge-conflicts.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-name: Autolabel merge conflicts
-
-permissions:
- issues: write
- pull-requests: write
-
-on:
- push:
- branches: [main]
-
-jobs:
- label-merge-conflicts:
- if: github.repository_owner == 'pypa'
- runs-on: ubuntu-latest
- steps:
- - uses: pradyunsg/auto-label-merge-conflicts@v3
- with:
- CONFLICT_LABEL_NAME: "needs rebase or merge"
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7a37d18c5..2fc455b9d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,7 +2,7 @@ exclude: 'src/pip/_vendor/'
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.3.0
+ rev: v4.4.0
hooks:
- id: check-builtin-literals
- id: check-added-large-files
@@ -17,18 +17,18 @@ repos:
exclude: .patch
- repo: https://github.com/psf/black
- rev: 22.6.0
+ rev: 23.1.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
- rev: 4.0.1
+ rev: 6.0.0
hooks:
- id: flake8
additional_dependencies: [
- 'flake8-bugbear==22.10.27',
- 'flake8-logging-format==0.9.0',
- 'flake8-implicit-str-concat==0.3.0',
+ 'flake8-bugbear',
+ 'flake8-logging-format',
+ 'flake8-implicit-str-concat',
]
exclude: tests/data
@@ -56,7 +56,7 @@ repos:
]
- repo: https://github.com/pre-commit/pygrep-hooks
- rev: v1.9.0
+ rev: v1.10.0
hooks:
- id: python-no-log-warn
- id: python-no-eval
diff --git a/docs/html/getting-started.md b/docs/html/getting-started.md
index 0967b0eb9..2b3f0bc93 100644
--- a/docs/html/getting-started.md
+++ b/docs/html/getting-started.md
@@ -98,5 +98,5 @@ Successfully uninstalled sampleproject
## Next Steps
It is recommended to learn about what virtual environments are and how to use
-them. This is covered in the ["Installing Packages"](pypug:tutorials/installing-packages)
+them. This is covered in the {doc}`Installing Packages <pypug:tutorials/installing-packages>`
tutorial on packaging.python.org.
diff --git a/docs/html/reference/build-system/pyproject-toml.md b/docs/html/reference/build-system/pyproject-toml.md
index d2ec0323e..a42a3b8c4 100644
--- a/docs/html/reference/build-system/pyproject-toml.md
+++ b/docs/html/reference/build-system/pyproject-toml.md
@@ -116,6 +116,12 @@ multiple times, in order to specify multiple settings).
The supplied configuration settings are passed to every backend hook call.
+Configuration settings provided via `--config-settings` command line options (or the
+equivalent environment variables or configuration file entries) are passed to the build
+of requirements explicitly provided as pip command line arguments. They are not passed
+to the build of dependencies, or to the build of requirements provided in requirement
+files.
+
## Build output
It is the responsibility of the build backend to ensure that the output is
diff --git a/docs/html/reference/build-system/setup-py.md b/docs/html/reference/build-system/setup-py.md
index 53917b8a4..0103a3a6a 100644
--- a/docs/html/reference/build-system/setup-py.md
+++ b/docs/html/reference/build-system/setup-py.md
@@ -24,8 +24,6 @@ The overall process for building a package is:
- Generate the package's metadata.
- Generate a wheel for the package.
- - If this fails and we're trying to install the package, attempt a direct
- installation.
The wheel can then be used to perform an installation, if necessary.
@@ -58,13 +56,6 @@ If this wheel generation fails, pip runs `setup.py clean` to clean up any build
artifacts that may have been generated. After that, pip will attempt a direct
installation.
-### Direct Installation
-
-When all else fails, pip will invoke `setup.py install` to install a package
-using setuptools' mechanisms to perform the installation. This is currently the
-last-resort fallback for projects that cannot be built into wheels, and may not
-be supported in the future.
-
### Editable Installation
For installing packages in "editable" mode
diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst
index 966b200f4..9a6f2901c 100644
--- a/docs/html/user_guide.rst
+++ b/docs/html/user_guide.rst
@@ -127,6 +127,10 @@ Logically, a Requirements file is just a list of :ref:`pip install` arguments
placed in a file. Note that you should not rely on the items in the file being
installed by pip in any particular order.
+Requirements files can also be served via a URL, e.g.
+http://example.com/requirements.txt besides as local files, so that they can
+be stored and served in a centralized place.
+
In practice, there are 4 common uses of Requirements files:
1. Requirements files are used to hold the result from :ref:`pip freeze` for the
@@ -248,6 +252,10 @@ undocumented and unsupported quirks from the previous implementation,
and stripped constraints files down to being purely a way to specify
global (version) limits for packages.
+Same as requirements files, constraints files can also be served via a URL,
+e.g. http://example.com/constraints.txt, so that your organization can store and
+serve them in a centralized place.
+
.. _`Installing from Wheels`:
diff --git a/docs/pip_sphinxext.py b/docs/pip_sphinxext.py
index f398b7d09..2e5597022 100644
--- a/docs/pip_sphinxext.py
+++ b/docs/pip_sphinxext.py
@@ -254,7 +254,6 @@ class PipCLIDirective(rst.Directive):
lines = []
# Create a tab for each OS
for os, variant in os_variants.items():
-
# Unpack the values
prompt = variant["prompt"]
highlighter = variant["highlighter"]
diff --git a/docs/requirements.txt b/docs/requirements.txt
index fa3a7390c..ef72c8fb7 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,4 +1,4 @@
-sphinx ~= 4.2, != 4.4.0
+sphinx ~= 6.0
towncrier
furo
myst_parser
diff --git a/news/10476.feature.rst b/news/10476.feature.rst
new file mode 100644
index 000000000..7c2757771
--- /dev/null
+++ b/news/10476.feature.rst
@@ -0,0 +1 @@
+Specify egg-link location in assertion message when it does not match installed location to provide better error message for debugging.
diff --git a/news/11325.feature.rst b/news/11325.feature.rst
new file mode 100644
index 000000000..282310816
--- /dev/null
+++ b/news/11325.feature.rst
@@ -0,0 +1 @@
+Support a per-requirement ``--config-settings`` option in requirements files.
diff --git a/news/11453.removal.rst b/news/11453.removal.rst
new file mode 100644
index 000000000..91ebfda04
--- /dev/null
+++ b/news/11453.removal.rst
@@ -0,0 +1,2 @@
+``--no-binary`` does not disable the cache of locally built wheels anymore. It only
+means "don't download wheels".
diff --git a/news/11719.bugfix.rst b/news/11719.bugfix.rst
new file mode 100644
index 000000000..c2ae8bc1d
--- /dev/null
+++ b/news/11719.bugfix.rst
@@ -0,0 +1 @@
+Normalize paths before checking if installed scripts are on PATH.
diff --git a/news/11889.bugfix.rst b/news/11889.bugfix.rst
new file mode 100644
index 000000000..e760fd1fb
--- /dev/null
+++ b/news/11889.bugfix.rst
@@ -0,0 +1,4 @@
+The ``uninstall`` and ``install --force-reinstall`` commands no longer call
+``normalize_path()`` repeatedly on the same paths. Instead, these results are
+cached for the duration of an uninstall operation, resulting in improved
+performance, particularly on Windows.
diff --git a/news/11908.feature.rst b/news/11908.feature.rst
new file mode 100644
index 000000000..2b9ec18d9
--- /dev/null
+++ b/news/11908.feature.rst
@@ -0,0 +1 @@
+Reduce the number of resolver rounds, since backjumping makes the resolver more efficient in finding solutions. This also makes pathological cases fail quicker.
diff --git a/news/11935.feature.rst b/news/11935.feature.rst
new file mode 100644
index 000000000..b170ca1d8
--- /dev/null
+++ b/news/11935.feature.rst
@@ -0,0 +1 @@
+Warn if ``--hash`` is used on a line without requirement in a requirements file.
diff --git a/news/11936.bugfix.rst b/news/11936.bugfix.rst
new file mode 100644
index 000000000..4ae3ad69a
--- /dev/null
+++ b/news/11936.bugfix.rst
@@ -0,0 +1 @@
+Fix and improve the parsing of hashes embedded in URL fragments.
diff --git a/news/11938.bugfix.rst b/news/11938.bugfix.rst
new file mode 100644
index 000000000..b299f8e4f
--- /dev/null
+++ b/news/11938.bugfix.rst
@@ -0,0 +1,3 @@
+When package A depends on package B provided as a direct URL dependency including a hash
+embedded in the link, the ``--require-hashes`` option did not warn when user supplied hashes
+were missing for package B.
diff --git a/news/11941.feature.rst b/news/11941.feature.rst
new file mode 100644
index 000000000..404f2cb2d
--- /dev/null
+++ b/news/11941.feature.rst
@@ -0,0 +1,4 @@
+Stop propagating CLI ``--config-settings`` to the build dependencies. They already did
+not propagate to requirements provided in requirement files. To pass the same config
+settings to several requirements, users should provide the requirements as CLI
+arguments.
diff --git a/news/11946.bugfix.rst b/news/11946.bugfix.rst
new file mode 100644
index 000000000..74437dc82
--- /dev/null
+++ b/news/11946.bugfix.rst
@@ -0,0 +1,2 @@
+Correctly report ``requested_extras`` in the installation report when extras are
+specified for a local directory installation.
diff --git a/news/11948.bugfix.rst b/news/11948.bugfix.rst
new file mode 100644
index 000000000..74af91381
--- /dev/null
+++ b/news/11948.bugfix.rst
@@ -0,0 +1,3 @@
+When installing an archive from a direct URL or local file, populate
+``download_info.info.hashes`` in the installation report, in addition to the legacy
+``download_info.info.hash`` key.
diff --git a/news/11954.doc.rst b/news/11954.doc.rst
new file mode 100644
index 000000000..946b4057f
--- /dev/null
+++ b/news/11954.doc.rst
@@ -0,0 +1 @@
+Make it clear that requirements/constraints file can be a URL
diff --git a/news/5037.feature.rst b/news/5037.feature.rst
new file mode 100644
index 000000000..fe4637b6c
--- /dev/null
+++ b/news/5037.feature.rst
@@ -0,0 +1 @@
+Support wheel cache when using ``--require-hashes``.
diff --git a/news/8368.removal.rst b/news/8368.removal.rst
new file mode 100644
index 000000000..44ee33aa7
--- /dev/null
+++ b/news/8368.removal.rst
@@ -0,0 +1,2 @@
+Remove ``setup.py install`` fallback when building a wheel failed for projects without
+``pyproject.toml``.
diff --git a/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst b/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst
diff --git a/news/msgpack.vendor.rst b/news/msgpack.vendor.rst
new file mode 100644
index 000000000..9193b7ce5
--- /dev/null
+++ b/news/msgpack.vendor.rst
@@ -0,0 +1 @@
+Upgrade msgpack to 1.0.5
diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst
new file mode 100644
index 000000000..939253e14
--- /dev/null
+++ b/news/platformdirs.vendor.rst
@@ -0,0 +1 @@
+Upgrade platformdirs to 3.2.0
diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst
new file mode 100644
index 000000000..a6c8edafc
--- /dev/null
+++ b/news/pygments.vendor.rst
@@ -0,0 +1 @@
+Upgrade pygments to 2.14.0
diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst
new file mode 100644
index 000000000..0bedd3bb4
--- /dev/null
+++ b/news/rich.vendor.rst
@@ -0,0 +1 @@
+Upgrade rich to 13.3.3
diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst
index f86cecbca..9cf3f49e2 100644
--- a/news/setuptools.vendor.rst
+++ b/news/setuptools.vendor.rst
@@ -1 +1 @@
-Update pkg_resources (via setuptools) to 65.6.3
+Upgrade setuptools to 67.6.1
diff --git a/news/tenacity.vendor.rst b/news/tenacity.vendor.rst
new file mode 100644
index 000000000..493d38d01
--- /dev/null
+++ b/news/tenacity.vendor.rst
@@ -0,0 +1 @@
+Upgrade tenacity to 8.2.2
diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst
new file mode 100644
index 000000000..e71aeb663
--- /dev/null
+++ b/news/typing_extensions.vendor.rst
@@ -0,0 +1 @@
+Upgrade typing_extensions to 4.5.0
diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst
new file mode 100644
index 000000000..09e82a8f2
--- /dev/null
+++ b/news/urllib3.vendor.rst
@@ -0,0 +1 @@
+Upgrade urllib3 to 1.26.15
diff --git a/noxfile.py b/noxfile.py
index 5c4683b7d..565a50399 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -133,6 +133,7 @@ def docs(session: nox.Session) -> None:
# fmt: off
return [
"sphinx-build",
+ "--keep-going",
"-W",
"-c", "docs/html", # see note above
"-d", "docs/build/doctrees/" + kind,
diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py
index c53b7f023..05f0a9acb 100644
--- a/src/pip/_internal/cache.py
+++ b/src/pip/_internal/cache.py
@@ -6,14 +6,13 @@ import json
import logging
import os
from pathlib import Path
-from typing import Any, Dict, List, Optional, Set
+from typing import Any, Dict, List, Optional
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.exceptions import InvalidWheelFilename
from pip._internal.models.direct_url import DirectUrl
-from pip._internal.models.format_control import FormatControl
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
@@ -33,25 +32,13 @@ def _hash_dict(d: Dict[str, str]) -> str:
class Cache:
"""An abstract class - provides cache directories for data from links
-
:param cache_dir: The root of the cache.
- :param format_control: An object of FormatControl class to limit
- binaries being read from the cache.
- :param allowed_formats: which formats of files the cache should store.
- ('binary' and 'source' are the only allowed values)
"""
- def __init__(
- self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str]
- ) -> None:
+ def __init__(self, cache_dir: str) -> None:
super().__init__()
assert not cache_dir or os.path.isabs(cache_dir)
self.cache_dir = cache_dir or None
- self.format_control = format_control
- self.allowed_formats = allowed_formats
-
- _valid_formats = {"source", "binary"}
- assert self.allowed_formats.union(_valid_formats) == _valid_formats
def _get_cache_path_parts(self, link: Link) -> List[str]:
"""Get parts of part that must be os.path.joined with cache_dir"""
@@ -91,10 +78,6 @@ class Cache:
if can_not_cache:
return []
- formats = self.format_control.get_allowed_formats(canonical_package_name)
- if not self.allowed_formats.intersection(formats):
- return []
-
candidates = []
path = self.get_path_for_link(link)
if os.path.isdir(path):
@@ -121,8 +104,8 @@ class Cache:
class SimpleWheelCache(Cache):
"""A cache of wheels for future installs."""
- def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
- super().__init__(cache_dir, format_control, {"binary"})
+ def __init__(self, cache_dir: str) -> None:
+ super().__init__(cache_dir)
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached wheels for link
@@ -191,13 +174,13 @@ class SimpleWheelCache(Cache):
class EphemWheelCache(SimpleWheelCache):
"""A SimpleWheelCache that creates it's own temporary cache directory"""
- def __init__(self, format_control: FormatControl) -> None:
+ def __init__(self) -> None:
self._temp_dir = TempDirectory(
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
globally_managed=True,
)
- super().__init__(self._temp_dir.path, format_control)
+ super().__init__(self._temp_dir.path)
class CacheEntry:
@@ -221,14 +204,10 @@ class WheelCache(Cache):
when a certain link is not found in the simple wheel cache first.
"""
- def __init__(
- self, cache_dir: str, format_control: Optional[FormatControl] = None
- ) -> None:
- if format_control is None:
- format_control = FormatControl()
- super().__init__(cache_dir, format_control, {"binary"})
- self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
- self._ephem_cache = EphemWheelCache(format_control)
+ def __init__(self, cache_dir: str) -> None:
+ super().__init__(cache_dir)
+ self._wheel_cache = SimpleWheelCache(cache_dir)
+ self._ephem_cache = EphemWheelCache()
def get_path_for_link(self, link: Link) -> str:
return self._wheel_cache.get_path_for_link(link)
diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py
index 5bd7e67e6..637fba18c 100644
--- a/src/pip/_internal/cli/base_command.py
+++ b/src/pip/_internal/cli/base_command.py
@@ -122,6 +122,15 @@ class Command(CommandContextMixIn):
user_log_file=options.log,
)
+ always_enabled_features = set(options.features_enabled) & set(
+ cmdoptions.ALWAYS_ENABLED_FEATURES
+ )
+ if always_enabled_features:
+ logger.warning(
+ "The following features are always enabled: %s. ",
+ ", ".join(sorted(always_enabled_features)),
+ )
+
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.
diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py
index c27ba1c6a..02ba60827 100644
--- a/src/pip/_internal/cli/cmdoptions.py
+++ b/src/pip/_internal/cli/cmdoptions.py
@@ -257,7 +257,7 @@ keyring_provider: Callable[..., Option] = partial(
"--keyring-provider",
dest="keyring_provider",
choices=["auto", "disabled", "import", "subprocess"],
- default="disabled",
+ default="auto",
help=(
"Enable the credential lookup via the keyring library if user input is allowed."
" Specify which mechanism to use [disabled, import, subprocess]."
@@ -994,6 +994,11 @@ no_python_version_warning: Callable[..., Option] = partial(
)
+# Features that are now always on. A warning is printed if they are used.
+ALWAYS_ENABLED_FEATURES = [
+ "no-binary-enable-wheel-cache", # always on since 23.1
+]
+
use_new_feature: Callable[..., Option] = partial(
Option,
"--use-feature",
@@ -1004,8 +1009,8 @@ use_new_feature: Callable[..., Option] = partial(
choices=[
"fast-deps",
"truststore",
- "no-binary-enable-wheel-cache",
- ],
+ ]
+ + ALWAYS_ENABLED_FEATURES,
help="Enable new functionality, that may be backward incompatible.",
)
diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py
index bb3340319..c2f4e38be 100644
--- a/src/pip/_internal/cli/req_command.py
+++ b/src/pip/_internal/cli/req_command.py
@@ -344,7 +344,6 @@ class RequirementCommand(IndexGroupCommand):
install_req_from_req_string,
isolated=options.isolated_mode,
use_pep517=use_pep517,
- config_settings=getattr(options, "config_settings", None),
)
resolver_variant = cls.determine_resolver_variant(options)
# The long import name and duplicated invocation is needed to convince
@@ -439,6 +438,9 @@ class RequirementCommand(IndexGroupCommand):
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
+ config_settings=parsed_req.options.get("config_settings")
+ if parsed_req.options
+ else None,
)
requirements.append(req_to_add)
diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py
index c5f03302d..e96d2b492 100644
--- a/src/pip/_internal/commands/cache.py
+++ b/src/pip/_internal/commands/cache.py
@@ -37,7 +37,6 @@ class CacheCommand(Command):
"""
def add_options(self) -> None:
-
self.cmd_opts.add_option(
"--format",
action="store",
diff --git a/src/pip/_internal/commands/check.py b/src/pip/_internal/commands/check.py
index 3864220b2..584df9f55 100644
--- a/src/pip/_internal/commands/check.py
+++ b/src/pip/_internal/commands/check.py
@@ -20,7 +20,6 @@ class CheckCommand(Command):
%prog [options]"""
def run(self, options: Values, args: List[str]) -> int:
-
package_set, parsing_probs = create_package_set_from_installed()
missing, conflicting = check_package_set(package_set)
diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py
index 90388d118..36e947c8c 100644
--- a/src/pip/_internal/commands/download.py
+++ b/src/pip/_internal/commands/download.py
@@ -76,7 +76,6 @@ class DownloadCommand(RequirementCommand):
@with_cleanup
def run(self, options: Values, args: List[str]) -> int:
-
options.ignore_installed = True
# editable doesn't really make sense for `pip download`, but the bowels
# of the RequirementSet code require that property.
diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py
index 4783f807f..3c15ed415 100644
--- a/src/pip/_internal/commands/install.py
+++ b/src/pip/_internal/commands/install.py
@@ -30,10 +30,6 @@ from pip._internal.req.req_install import (
check_legacy_setup_py_options,
)
from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.deprecation import (
- LegacyInstallReasonFailedBdistWheel,
- deprecated,
-)
from pip._internal.utils.filesystem import test_writable_dir
from pip._internal.utils.logging import getLogger
from pip._internal.utils.misc import (
@@ -346,24 +342,7 @@ class InstallCommand(RequirementCommand):
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(options, reqs)
- if "no-binary-enable-wheel-cache" in options.features_enabled:
- # TODO: remove format_control from WheelCache when the deprecation cycle
- # is over
- wheel_cache = WheelCache(options.cache_dir)
- else:
- if options.format_control.no_binary:
- deprecated(
- reason=(
- "--no-binary currently disables reading from "
- "the cache of locally built wheels. In the future "
- "--no-binary will not influence the wheel cache."
- ),
- replacement="to use the --no-cache-dir option",
- feature_flag="no-binary-enable-wheel-cache",
- issue=11453,
- gone_in="23.1",
- )
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
+ wheel_cache = WheelCache(options.cache_dir)
# Only when installing is it permitted to use PEP 660.
# In other circumstances (pip wheel, pip download) we generate
@@ -443,26 +422,14 @@ class InstallCommand(RequirementCommand):
global_options=global_options,
)
- # If we're using PEP 517, we cannot do a legacy setup.py install
- # so we fail here.
- pep517_build_failure_names: List[str] = [
- r.name for r in build_failures if r.use_pep517 # type: ignore
- ]
- if pep517_build_failure_names:
+ if build_failures:
raise InstallationError(
"Could not build wheels for {}, which is required to "
"install pyproject.toml-based projects".format(
- ", ".join(pep517_build_failure_names)
+ ", ".join(r.name for r in build_failures) # type: ignore
)
)
- # For now, we just warn about failures building legacy
- # requirements, as we'll fall through to a setup.py install for
- # those.
- for r in build_failures:
- if not r.use_pep517:
- r.legacy_install_reason = LegacyInstallReasonFailedBdistWheel
-
to_install = resolver.get_installation_order(requirement_set)
# Check for conflicts in the package set we're installing.
diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py
index a8483559c..c6a588ff0 100644
--- a/src/pip/_internal/commands/wheel.py
+++ b/src/pip/_internal/commands/wheel.py
@@ -14,7 +14,6 @@ from pip._internal.req.req_install import (
InstallRequirement,
check_legacy_setup_py_options,
)
-from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import ensure_dir, normalize_path
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.wheel_builder import build, should_build_for_wheel_command
@@ -43,7 +42,6 @@ class WheelCommand(RequirementCommand):
%prog [options] <archive url/path> ..."""
def add_options(self) -> None:
-
self.cmd_opts.add_option(
"-w",
"--wheel-dir",
@@ -107,7 +105,6 @@ class WheelCommand(RequirementCommand):
session = self.get_default_session(options)
finder = self._build_package_finder(options, session)
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
options.wheel_dir = normalize_path(options.wheel_dir)
ensure_dir(options.wheel_dir)
@@ -123,24 +120,7 @@ class WheelCommand(RequirementCommand):
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(options, reqs)
- if "no-binary-enable-wheel-cache" in options.features_enabled:
- # TODO: remove format_control from WheelCache when the deprecation cycle
- # is over
- wheel_cache = WheelCache(options.cache_dir)
- else:
- if options.format_control.no_binary:
- deprecated(
- reason=(
- "--no-binary currently disables reading from "
- "the cache of locally built wheels. In the future "
- "--no-binary will not influence the wheel cache."
- ),
- replacement="to use the --no-cache-dir option",
- feature_flag="no-binary-enable-wheel-cache",
- issue=11453,
- gone_in="23.1",
- )
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
+ wheel_cache = WheelCache(options.cache_dir)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py
index d4527295d..7d92ba699 100644
--- a/src/pip/_internal/exceptions.py
+++ b/src/pip/_internal/exceptions.py
@@ -361,20 +361,6 @@ class MetadataInconsistent(InstallationError):
)
-class LegacyInstallFailure(DiagnosticPipError):
- """Error occurred while executing `setup.py install`"""
-
- reference = "legacy-install-failure"
-
- def __init__(self, package_details: str) -> None:
- super().__init__(
- message="Encountered error while trying to install package.",
- context=package_details,
- hint_stmt="See above for output from the failure.",
- note_stmt="This is an issue with the package mentioned above, not pip.",
- )
-
-
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
"""A subprocess call failed."""
diff --git a/src/pip/_internal/index/sources.py b/src/pip/_internal/index/sources.py
index eec3f12f7..cd9cb8d40 100644
--- a/src/pip/_internal/index/sources.py
+++ b/src/pip/_internal/index/sources.py
@@ -171,7 +171,6 @@ def build_source(
expand_dir: bool,
cache_link_parsing: bool,
) -> Tuple[Optional[str], Optional[LinkSource]]:
-
path: Optional[str] = None
url: Optional[str] = None
if os.path.exists(location): # Is a local path.
diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py
index c3de70a74..e219d7384 100644
--- a/src/pip/_internal/models/direct_url.py
+++ b/src/pip/_internal/models/direct_url.py
@@ -105,22 +105,31 @@ class ArchiveInfo:
hash: Optional[str] = None,
hashes: Optional[Dict[str, str]] = None,
) -> None:
- if hash is not None:
+ # set hashes before hash, since the hash setter will further populate hashes
+ self.hashes = hashes
+ self.hash = hash
+
+ @property
+ def hash(self) -> Optional[str]:
+ return self._hash
+
+ @hash.setter
+ def hash(self, value: Optional[str]) -> None:
+ if value is not None:
# Auto-populate the hashes key to upgrade to the new format automatically.
- # We don't back-populate the legacy hash key.
+ # We don't back-populate the legacy hash key from hashes.
try:
- hash_name, hash_value = hash.split("=", 1)
+ hash_name, hash_value = value.split("=", 1)
except ValueError:
raise DirectUrlValidationError(
- f"invalid archive_info.hash format: {hash!r}"
+ f"invalid archive_info.hash format: {value!r}"
)
- if hashes is None:
- hashes = {hash_name: hash_value}
- elif hash_name not in hash:
- hashes = hashes.copy()
- hashes[hash_name] = hash_value
- self.hash = hash
- self.hashes = hashes
+ if self.hashes is None:
+ self.hashes = {hash_name: hash_value}
+ elif hash_name not in self.hashes:
+ self.hashes = self.hashes.copy()
+ self.hashes[hash_name] = hash_value
+ self._hash = value
@classmethod
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py
index b54afb109..fef3757f2 100644
--- a/src/pip/_internal/models/installation_report.py
+++ b/src/pip/_internal/models/installation_report.py
@@ -14,7 +14,7 @@ class InstallationReport:
def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
assert ireq.download_info, f"No download_info for {ireq}"
res = {
- # PEP 610 json for the download URL. download_info.archive_info.hash may
+ # PEP 610 json for the download URL. download_info.archive_info.hashes may
# be absent when the requirement was installed from the wheel cache
# and the cache entry was populated by an older pip version that did not
# record origin.json.
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index a1e4d5a08..e741c3283 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -55,25 +55,37 @@ class LinkHash:
name: str
value: str
- _hash_re = re.compile(
+ _hash_url_fragment_re = re.compile(
# NB: we do not validate that the second group (.*) is a valid hex
# digest. Instead, we simply keep that string in this class, and then check it
# against Hashes when hash-checking is needed. This is easier to debug than
# proactively discarding an invalid hex digest, as we handle incorrect hashes
# and malformed hashes in the same place.
- r"({choices})=(.*)".format(
+ r"[#&]({choices})=([^&]*)".format(
choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
),
)
def __post_init__(self) -> None:
- assert self._hash_re.match(f"{self.name}={self.value}")
+ assert self.name in _SUPPORTED_HASHES
+
+ @classmethod
+ def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]:
+ """Parse a PEP 658 data-dist-info-metadata hash."""
+ if dist_info_metadata == "true":
+ return None
+ name, sep, value = dist_info_metadata.partition("=")
+ if not sep:
+ return None
+ if name not in _SUPPORTED_HASHES:
+ return None
+ return cls(name=name, value=value)
@classmethod
@functools.lru_cache(maxsize=None)
- def split_hash_name_and_value(cls, url: str) -> Optional["LinkHash"]:
+ def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
"""Search a string for a checksum algorithm name and encoded output value."""
- match = cls._hash_re.search(url)
+ match = cls._hash_url_fragment_re.search(url)
if match is None:
return None
name, value = match.groups()
@@ -217,7 +229,7 @@ class Link(KeyBasedCompareMixin):
# trying to set a new value.
self._url = url
- link_hash = LinkHash.split_hash_name_and_value(url)
+ link_hash = LinkHash.find_hash_url_fragment(url)
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
if hashes is None:
self._hashes = hashes_from_link
@@ -402,15 +414,10 @@ class Link(KeyBasedCompareMixin):
if self.dist_info_metadata is None:
return None
metadata_url = f"{self.url_without_fragment}.metadata"
- # If data-dist-info-metadata="true" is set, then the metadata file exists,
- # but there is no information about its checksum or anything else.
- if self.dist_info_metadata != "true":
- link_hash = LinkHash.split_hash_name_and_value(self.dist_info_metadata)
- else:
- link_hash = None
- if link_hash is None:
+ metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata)
+ if metadata_link_hash is None:
return Link(metadata_url)
- return Link(metadata_url, hashes=link_hash.as_dict())
+ return Link(metadata_url, hashes=metadata_link_hash.as_dict())
def as_hashes(self) -> Hashes:
return Hashes({k: [v] for k, v in self._hashes.items()})
diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py
index a64af7389..fe61e8116 100644
--- a/src/pip/_internal/models/search_scope.py
+++ b/src/pip/_internal/models/search_scope.py
@@ -79,7 +79,6 @@ class SearchScope:
redacted_index_urls = []
if self.index_urls and self.index_urls != [PyPI.simple_url]:
for url in self.index_urls:
-
redacted_index_url = redact_auth_from_url(url)
# Parse the URL
diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py
index e512ac784..6c40ade15 100644
--- a/src/pip/_internal/network/session.py
+++ b/src/pip/_internal/network/session.py
@@ -316,7 +316,6 @@ class InsecureCacheControlAdapter(CacheControlAdapter):
class PipSession(requests.Session):
-
timeout: Optional[int] = None
def __init__(
diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py
index 930d4c600..354456845 100644
--- a/src/pip/_internal/operations/freeze.py
+++ b/src/pip/_internal/operations/freeze.py
@@ -145,9 +145,10 @@ def freeze(
def _format_as_name_version(dist: BaseDistribution) -> str:
- if isinstance(dist.version, Version):
- return f"{dist.raw_name}=={dist.version}"
- return f"{dist.raw_name}==={dist.version}"
+ dist_version = dist.version
+ if isinstance(dist_version, Version):
+ return f"{dist.raw_name}=={dist_version}"
+ return f"{dist.raw_name}==={dist_version}"
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
diff --git a/src/pip/_internal/operations/install/legacy.py b/src/pip/_internal/operations/install/legacy.py
deleted file mode 100644
index 0b108d0ca..000000000
--- a/src/pip/_internal/operations/install/legacy.py
+++ /dev/null
@@ -1,118 +0,0 @@
-"""Legacy installation process, i.e. `setup.py install`.
-"""
-
-import logging
-import os
-from typing import List, Optional, Sequence
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.exceptions import InstallationError, LegacyInstallFailure
-from pip._internal.locations.base import change_root
-from pip._internal.models.scheme import Scheme
-from pip._internal.utils.misc import ensure_dir
-from pip._internal.utils.setuptools_build import make_setuptools_install_args
-from pip._internal.utils.subprocess import runner_with_spinner_message
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-def write_installed_files_from_setuptools_record(
- record_lines: List[str],
- root: Optional[str],
- req_description: str,
-) -> None:
- def prepend_root(path: str) -> str:
- if root is None or not os.path.isabs(path):
- return path
- else:
- return change_root(root, path)
-
- for line in record_lines:
- directory = os.path.dirname(line)
- if directory.endswith(".egg-info"):
- egg_info_dir = prepend_root(directory)
- break
- else:
- message = (
- "{} did not indicate that it installed an "
- ".egg-info directory. Only setup.py projects "
- "generating .egg-info directories are supported."
- ).format(req_description)
- raise InstallationError(message)
-
- new_lines = []
- for line in record_lines:
- filename = line.strip()
- if os.path.isdir(filename):
- filename += os.path.sep
- new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir))
- new_lines.sort()
- ensure_dir(egg_info_dir)
- inst_files_path = os.path.join(egg_info_dir, "installed-files.txt")
- with open(inst_files_path, "w") as f:
- f.write("\n".join(new_lines) + "\n")
-
-
-def install(
- global_options: Sequence[str],
- root: Optional[str],
- home: Optional[str],
- prefix: Optional[str],
- use_user_site: bool,
- pycompile: bool,
- scheme: Scheme,
- setup_py_path: str,
- isolated: bool,
- req_name: str,
- build_env: BuildEnvironment,
- unpacked_source_directory: str,
- req_description: str,
-) -> bool:
-
- header_dir = scheme.headers
-
- with TempDirectory(kind="record") as temp_dir:
- try:
- record_filename = os.path.join(temp_dir.path, "install-record.txt")
- install_args = make_setuptools_install_args(
- setup_py_path,
- global_options=global_options,
- record_filename=record_filename,
- root=root,
- prefix=prefix,
- header_dir=header_dir,
- home=home,
- use_user_site=use_user_site,
- no_user_config=isolated,
- pycompile=pycompile,
- )
-
- runner = runner_with_spinner_message(
- f"Running setup.py install for {req_name}"
- )
- with build_env:
- runner(
- cmd=install_args,
- cwd=unpacked_source_directory,
- )
-
- if not os.path.exists(record_filename):
- logger.debug("Record file %s not found", record_filename)
- # Signal to the caller that we didn't install the new package
- return False
-
- except Exception as e:
- # Signal to the caller that we didn't install the new package
- raise LegacyInstallFailure(package_details=req_name) from e
-
- # At this point, we have successfully installed the requirement.
-
- # We intentionally do not use any encoding to read the file because
- # setuptools writes the file using distutils.file_util.write_file,
- # which does not specify an encoding.
- with open(record_filename) as f:
- record_lines = f.read().splitlines()
-
- write_installed_files_from_setuptools_record(record_lines, root, req_description)
- return True
diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py
index c79941398..a8cd1330f 100644
--- a/src/pip/_internal/operations/install/wheel.py
+++ b/src/pip/_internal/operations/install/wheel.py
@@ -143,16 +143,18 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
# We don't want to warn for directories that are on PATH.
not_warn_dirs = [
- os.path.normcase(i).rstrip(os.sep)
+ os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
for i in os.environ.get("PATH", "").split(os.pathsep)
]
# If an executable sits with sys.executable, we don't warn for it.
# This covers the case of venv invocations without activating the venv.
- not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
+ not_warn_dirs.append(
+ os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
+ )
warn_for: Dict[str, Set[str]] = {
parent_dir: scripts
for parent_dir, scripts in grouped_by_dir.items()
- if os.path.normcase(parent_dir) not in not_warn_dirs
+ if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
}
if not warn_for:
return None
diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py
index 343a01bef..227331523 100644
--- a/src/pip/_internal/operations/prepare.py
+++ b/src/pip/_internal/operations/prepare.py
@@ -179,7 +179,10 @@ def unpack_url(
def _check_download_dir(
- link: Link, download_dir: str, hashes: Optional[Hashes]
+ link: Link,
+ download_dir: str,
+ hashes: Optional[Hashes],
+ warn_on_hash_mismatch: bool = True,
) -> Optional[str]:
"""Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
@@ -195,10 +198,11 @@ def _check_download_dir(
try:
hashes.check_against_path(download_path)
except HashMismatch:
- logger.warning(
- "Previously-downloaded file %s has bad hash. Re-downloading.",
- download_path,
- )
+ if warn_on_hash_mismatch:
+ logger.warning(
+ "Previously-downloaded file %s has bad hash. Re-downloading.",
+ download_path,
+ )
os.unlink(download_path)
return None
return download_path
@@ -263,7 +267,7 @@ class RequirementPreparer:
def _log_preparing_link(self, req: InstallRequirement) -> None:
"""Provide context for the requirement being prepared."""
- if req.link.is_file and not req.original_link_is_in_wheel_cache:
+ if req.link.is_file and not req.is_wheel_from_cache:
message = "Processing %s"
information = str(display_path(req.link.file_path))
else:
@@ -284,7 +288,7 @@ class RequirementPreparer:
self._previous_requirement_header = (message, information)
logger.info(message, information)
- if req.original_link_is_in_wheel_cache:
+ if req.is_wheel_from_cache:
with indent_log():
logger.info("Using cached %s", req.link.filename)
@@ -485,7 +489,18 @@ class RequirementPreparer:
file_path = None
if self.download_dir is not None and req.link.is_wheel:
hashes = self._get_linked_req_hashes(req)
- file_path = _check_download_dir(req.link, self.download_dir, hashes)
+ file_path = _check_download_dir(
+ req.link,
+ self.download_dir,
+ hashes,
+ # When a locally built wheel has been found in cache, we don't warn
+ # about re-downloading when the already downloaded wheel hash does
+ # not match. This is because the hash must be checked against the
+ # original link, not the cached link. It that case the already
+ # downloaded file will be removed and re-fetched from cache (which
+ # implies a hash check against the cache entry's origin.json).
+ warn_on_hash_mismatch=not req.is_wheel_from_cache,
+ )
if file_path is not None:
# The file is already available, so mark it as downloaded
@@ -536,9 +551,35 @@ class RequirementPreparer:
assert req.link
link = req.link
- self._ensure_link_req_src_dir(req, parallel_builds)
hashes = self._get_linked_req_hashes(req)
+ if hashes and req.is_wheel_from_cache:
+ assert req.download_info is not None
+ assert link.is_wheel
+ assert link.is_file
+ # We need to verify hashes, and we have found the requirement in the cache
+ # of locally built wheels.
+ if (
+ isinstance(req.download_info.info, ArchiveInfo)
+ and req.download_info.info.hashes
+ and hashes.has_one_of(req.download_info.info.hashes)
+ ):
+ # At this point we know the requirement was built from a hashable source
+ # artifact, and we verified that the cache entry's hash of the original
+ # artifact matches one of the hashes we expect. We don't verify hashes
+ # against the cached wheel, because the wheel is not the original.
+ hashes = None
+ else:
+ logger.warning(
+ "The hashes of the source archive found in cache entry "
+ "don't match, ignoring cached built wheel "
+ "and re-downloading source."
+ )
+ req.link = req.cached_wheel_source_link
+ link = req.link
+
+ self._ensure_link_req_src_dir(req, parallel_builds)
+
if link.is_existing_dir():
local_file = None
elif link.url not in self._downloaded:
@@ -571,12 +612,15 @@ class RequirementPreparer:
# Make sure we have a hash in download_info. If we got it as part of the
# URL, it will have been verified and we can rely on it. Otherwise we
# compute it from the downloaded file.
+ # FIXME: https://github.com/pypa/pip/issues/11943
if (
isinstance(req.download_info.info, ArchiveInfo)
- and not req.download_info.info.hash
+ and not req.download_info.info.hashes
and local_file
):
hash = hash_file(local_file.path)[0].hexdigest()
+ # We populate info.hash for backward compatibility.
+ # This will automatically populate info.hashes.
req.download_info.info.hash = f"sha256={hash}"
# For use in later processing,
diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py
index 37dbd32e7..c5ca2d85d 100644
--- a/src/pip/_internal/req/constructors.py
+++ b/src/pip/_internal/req/constructors.py
@@ -211,7 +211,6 @@ def install_req_from_editable(
permit_editable_wheels: bool = False,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> InstallRequirement:
-
parts = parse_req_from_editable(editable_req)
return InstallRequirement(
@@ -417,7 +416,6 @@ def install_req_from_req_string(
isolated: bool = False,
use_pep517: Optional[bool] = None,
user_supplied: bool = False,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> InstallRequirement:
try:
req = get_requirement(req_string)
@@ -447,7 +445,6 @@ def install_req_from_req_string(
isolated=isolated,
use_pep517=use_pep517,
user_supplied=user_supplied,
- config_settings=config_settings,
)
@@ -456,6 +453,7 @@ def install_req_from_parsed_requirement(
isolated: bool = False,
use_pep517: Optional[bool] = None,
user_supplied: bool = False,
+ config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> InstallRequirement:
if parsed_req.is_editable:
req = install_req_from_editable(
@@ -465,6 +463,7 @@ def install_req_from_parsed_requirement(
constraint=parsed_req.constraint,
isolated=isolated,
user_supplied=user_supplied,
+ config_settings=config_settings,
)
else:
@@ -484,6 +483,7 @@ def install_req_from_parsed_requirement(
constraint=parsed_req.constraint,
line_source=parsed_req.line_source,
user_supplied=user_supplied,
+ config_settings=config_settings,
)
return req
diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py
index f8f07b0cd..f717c1ccc 100644
--- a/src/pip/_internal/req/req_file.py
+++ b/src/pip/_internal/req/req_file.py
@@ -2,6 +2,7 @@
Requirements file parsing
"""
+import logging
import optparse
import os
import re
@@ -71,11 +72,14 @@ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
cmdoptions.global_options,
cmdoptions.hash,
+ cmdoptions.config_settings,
]
# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
+logger = logging.getLogger(__name__)
+
class ParsedRequirement:
def __init__(
@@ -165,7 +169,6 @@ def handle_requirement_line(
line: ParsedLine,
options: Optional[optparse.Values] = None,
) -> ParsedRequirement:
-
# preserve for the nested code path
line_comes_from = "{} {} (line {})".format(
"-c" if line.constraint else "-r",
@@ -210,6 +213,12 @@ def handle_option_line(
options: Optional[optparse.Values] = None,
session: Optional[PipSession] = None,
) -> None:
+ if opts.hashes:
+ logger.warning(
+ "%s line %s has --hash but no requirement, and will be ignored.",
+ filename,
+ lineno,
+ )
if options:
# percolate options upward
diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py
index 2c628d36e..d01b24a91 100644
--- a/src/pip/_internal/req/req_install.py
+++ b/src/pip/_internal/req/req_install.py
@@ -20,7 +20,7 @@ from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
-from pip._internal.exceptions import InstallationError, LegacyInstallFailure
+from pip._internal.exceptions import InstallationError
from pip._internal.locations import get_scheme
from pip._internal.metadata import (
BaseDistribution,
@@ -39,15 +39,10 @@ from pip._internal.operations.build.metadata_legacy import (
from pip._internal.operations.install.editable_legacy import (
install_editable as install_editable_legacy,
)
-from pip._internal.operations.install.legacy import install as install_legacy
from pip._internal.operations.install.wheel import install_wheel
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
from pip._internal.req.req_uninstall import UninstallPathSet
-from pip._internal.utils.deprecation import LegacyInstallReason, deprecated
-from pip._internal.utils.direct_url_helpers import (
- direct_url_for_editable,
- direct_url_from_link,
-)
+from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.misc import (
ConfiguredBuildBackendHookCaller,
@@ -97,7 +92,6 @@ class InstallRequirement:
self.constraint = constraint
self.editable = editable
self.permit_editable_wheels = permit_editable_wheels
- self.legacy_install_reason: Optional[LegacyInstallReason] = None
# source_dir is the local directory where the linked requirement is
# located, or unpacked. In case unpacking is needed, creating and
@@ -114,7 +108,11 @@ class InstallRequirement:
# PEP 508 URL requirement
link = Link(req.url)
self.link = self.original_link = link
- self.original_link_is_in_wheel_cache = False
+
+ # When this InstallRequirement is a wheel obtained from the cache of locally
+ # built wheels, this is the source link corresponding to the cache entry, which
+ # was used to download and build the cached wheel.
+ self.cached_wheel_source_link: Optional[Link] = None
# Information about the location of the artifact that was downloaded . This
# property is guaranteed to be set in resolver results.
@@ -293,7 +291,12 @@ class InstallRequirement:
"""
good_hashes = self.hash_options.copy()
- link = self.link if trust_internet else self.original_link
+ if trust_internet:
+ link = self.link
+ elif self.original_link and self.user_supplied:
+ link = self.original_link
+ else:
+ link = None
if link and link.hash:
good_hashes.setdefault(link.hash_name, []).append(link.hash)
return Hashes(good_hashes)
@@ -438,6 +441,12 @@ class InstallRequirement:
return False
return self.link.is_wheel
+ @property
+ def is_wheel_from_cache(self) -> bool:
+ # When True, it means that this InstallRequirement is a local wheel file in the
+ # cache of locally built wheels.
+ return self.cached_wheel_source_link is not None
+
# Things valid for sdists
@property
def unpacked_source_directory(self) -> str:
@@ -770,10 +779,9 @@ class InstallRequirement:
prefix=prefix,
)
- global_options = global_options if global_options is not None else []
if self.editable and not self.is_wheel:
install_editable_legacy(
- global_options=global_options,
+ global_options=global_options if global_options is not None else [],
prefix=prefix,
home=home,
use_user_site=use_user_site,
@@ -786,80 +794,23 @@ class InstallRequirement:
self.install_succeeded = True
return
- if self.is_wheel:
- assert self.local_file_path
- direct_url = None
- # TODO this can be refactored to direct_url = self.download_info
- if self.editable:
- direct_url = direct_url_for_editable(self.unpacked_source_directory)
- elif self.original_link:
- direct_url = direct_url_from_link(
- self.original_link,
- self.source_dir,
- self.original_link_is_in_wheel_cache,
- )
- install_wheel(
- self.name,
- self.local_file_path,
- scheme=scheme,
- req_description=str(self.req),
- pycompile=pycompile,
- warn_script_location=warn_script_location,
- direct_url=direct_url,
- requested=self.user_supplied,
- )
- self.install_succeeded = True
- return
-
- # TODO: Why don't we do this for editable installs?
-
- # Extend the list of global options passed on to
- # the setup.py call with the ones from the requirements file.
- # Options specified in requirements file override those
- # specified on the command line, since the last option given
- # to setup.py is the one that is used.
- global_options = list(global_options) + self.global_options
-
- try:
- if (
- self.legacy_install_reason is not None
- and self.legacy_install_reason.emit_before_install
- ):
- self.legacy_install_reason.emit_deprecation(self.name)
- success = install_legacy(
- global_options=global_options,
- root=root,
- home=home,
- prefix=prefix,
- use_user_site=use_user_site,
- pycompile=pycompile,
- scheme=scheme,
- setup_py_path=self.setup_py_path,
- isolated=self.isolated,
- req_name=self.name,
- build_env=self.build_env,
- unpacked_source_directory=self.unpacked_source_directory,
- req_description=str(self.req),
- )
- except LegacyInstallFailure as exc:
- self.install_succeeded = False
- raise exc
- except Exception:
- self.install_succeeded = True
- raise
-
- self.install_succeeded = success
+ assert self.is_wheel
+ assert self.local_file_path
- if (
- success
- and self.legacy_install_reason is not None
- and self.legacy_install_reason.emit_after_success
- ):
- self.legacy_install_reason.emit_deprecation(self.name)
+ install_wheel(
+ self.name,
+ self.local_file_path,
+ scheme=scheme,
+ req_description=str(self.req),
+ pycompile=pycompile,
+ warn_script_location=warn_script_location,
+ direct_url=self.download_info if self.original_link else None,
+ requested=self.user_supplied,
+ )
+ self.install_succeeded = True
def check_invalid_constraint_type(req: InstallRequirement) -> str:
-
# Check for unsupported forms
problem = ""
if not req.name:
diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py
index 15b67385c..ad5178e76 100644
--- a/src/pip/_internal/req/req_uninstall.py
+++ b/src/pip/_internal/req/req_uninstall.py
@@ -11,8 +11,9 @@ from pip._internal.metadata import BaseDistribution
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.egg_link import egg_link_path_from_location
from pip._internal.utils.logging import getLogger, indent_log
-from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree
+from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
+from pip._internal.utils.virtualenv import running_under_virtualenv
logger = getLogger(__name__)
@@ -312,6 +313,10 @@ class UninstallPathSet:
self._pth: Dict[str, UninstallPthEntries] = {}
self._dist = dist
self._moved_paths = StashedUninstallPathSet()
+ # Create local cache of normalize_path results. Creating an UninstallPathSet
+ # can result in hundreds/thousands of redundant calls to normalize_path with
+ # the same args, which hurts performance.
+ self._normalize_path_cached = functools.lru_cache()(normalize_path)
def _permitted(self, path: str) -> bool:
"""
@@ -319,14 +324,17 @@ class UninstallPathSet:
remove/modify, False otherwise.
"""
- return is_local(path)
+ # aka is_local, but caching normalized sys.prefix
+ if not running_under_virtualenv():
+ return True
+ return path.startswith(self._normalize_path_cached(sys.prefix))
def add(self, path: str) -> None:
head, tail = os.path.split(path)
# we normalize the head to resolve parent directory symlinks, but not
# the tail, since we only want to uninstall symlinks, not their targets
- path = os.path.join(normalize_path(head), os.path.normcase(tail))
+ path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
if not os.path.exists(path):
return
@@ -341,7 +349,7 @@ class UninstallPathSet:
self.add(cache_from_source(path))
def add_pth(self, pth_file: str, entry: str) -> None:
- pth_file = normalize_path(pth_file)
+ pth_file = self._normalize_path_cached(pth_file)
if self._permitted(pth_file):
if pth_file not in self._pth:
self._pth[pth_file] = UninstallPthEntries(pth_file)
@@ -531,12 +539,14 @@ class UninstallPathSet:
# above, so this only covers the setuptools-style editable.
with open(develop_egg_link) as fh:
link_pointer = os.path.normcase(fh.readline().strip())
- normalized_link_pointer = normalize_path(link_pointer)
+ normalized_link_pointer = paths_to_remove._normalize_path_cached(
+ link_pointer
+ )
assert os.path.samefile(
normalized_link_pointer, normalized_dist_location
), (
- f"Egg-link {link_pointer} does not match installed location of "
- f"{dist.raw_name} (at {dist_location})"
+ f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
+ f"installed location of {dist.raw_name} (at {dist_location})"
)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(
diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py
index fb49d4169..b17b7e453 100644
--- a/src/pip/_internal/resolution/legacy/resolver.py
+++ b/src/pip/_internal/resolution/legacy/resolver.py
@@ -431,12 +431,12 @@ class Resolver(BaseResolver):
if cache_entry is not None:
logger.debug("Using cached wheel link: %s", cache_entry.link)
if req.link is req.original_link and cache_entry.persistent:
- req.original_link_is_in_wheel_cache = True
+ req.cached_wheel_source_link = req.link
if cache_entry.origin is not None:
req.download_info = cache_entry.origin
else:
# Legacy cache entry that does not have origin.json.
- # download_info may miss the archive_info.hash field.
+ # download_info may miss the archive_info.hashes field.
req.download_info = direct_url_from_link(
req.link, link_is_in_wheel_cache=cache_entry.persistent
)
diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py
index fe83a6123..31020e27a 100644
--- a/src/pip/_internal/resolution/resolvelib/candidates.py
+++ b/src/pip/_internal/resolution/resolvelib/candidates.py
@@ -71,6 +71,7 @@ def make_install_req_from_link(
)
ireq.original_link = template.original_link
ireq.link = link
+ ireq.extras = template.extras
return ireq
@@ -78,7 +79,7 @@ def make_install_req_from_editable(
link: Link, template: InstallRequirement
) -> InstallRequirement:
assert template.editable, "template not editable"
- return install_req_from_editable(
+ ireq = install_req_from_editable(
link.url,
user_supplied=template.user_supplied,
comes_from=template.comes_from,
@@ -90,6 +91,8 @@ def make_install_req_from_editable(
hash_options=template.hash_options,
config_settings=template.config_settings,
)
+ ireq.extras = template.extras
+ return ireq
def _make_install_req_from_dist(
@@ -256,7 +259,7 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
version: Optional[CandidateVersion] = None,
) -> None:
source_link = link
- cache_entry = factory.get_wheel_cache_entry(link, name)
+ cache_entry = factory.get_wheel_cache_entry(source_link, name)
if cache_entry is not None:
logger.debug("Using cached wheel link: %s", cache_entry.link)
link = cache_entry.link
@@ -274,13 +277,15 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
)
if cache_entry is not None:
+ assert ireq.link.is_wheel
+ assert ireq.link.is_file
if cache_entry.persistent and template.link is template.original_link:
- ireq.original_link_is_in_wheel_cache = True
+ ireq.cached_wheel_source_link = source_link
if cache_entry.origin is not None:
ireq.download_info = cache_entry.origin
else:
# Legacy cache entry that does not have origin.json.
- # download_info may miss the archive_info.hash field.
+ # download_info may miss the archive_info.hashes field.
ireq.download_info = direct_url_from_link(
source_link, link_is_in_wheel_cache=cache_entry.persistent
)
diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py
index a4c24b52a..0331297b8 100644
--- a/src/pip/_internal/resolution/resolvelib/factory.py
+++ b/src/pip/_internal/resolution/resolvelib/factory.py
@@ -535,7 +535,7 @@ class Factory:
hash mismatches. Furthermore, cached wheels at present have
nondeterministic contents due to file modification times.
"""
- if self._wheel_cache is None or self.preparer.require_hashes:
+ if self._wheel_cache is None:
return None
return self._wheel_cache.get_cache_entry(
link=link,
@@ -632,7 +632,6 @@ class Factory:
e: "ResolutionImpossible[Requirement, Candidate]",
constraints: Dict[str, Constraint],
) -> InstallationError:
-
assert e.causes, "Installation error reported with no cause"
# If one of the things we can't solve is "we need Python X.Y",
diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py
index f561f1f1e..06addc0dd 100644
--- a/src/pip/_internal/resolution/resolvelib/requirements.py
+++ b/src/pip/_internal/resolution/resolvelib/requirements.py
@@ -64,7 +64,6 @@ class SpecifierRequirement(Requirement):
return format_name(self.project_name, self._extras)
def format_for_error(self) -> str:
-
# Convert comma-separated specifiers into "A, B, ..., F and G"
# This makes the specifier a bit more "human readable", without
# risking a change in meaning. (Hopefully! Not all edge cases have
diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py
index a605d6c25..47bbfecce 100644
--- a/src/pip/_internal/resolution/resolvelib/resolver.py
+++ b/src/pip/_internal/resolution/resolvelib/resolver.py
@@ -88,9 +88,9 @@ class Resolver(BaseResolver):
)
try:
- try_to_avoid_resolution_too_deep = 2000000
+ limit_how_complex_resolution_can_be = 200000
result = self._result = resolver.resolve(
- collected.requirements, max_rounds=try_to_avoid_resolution_too_deep
+ collected.requirements, max_rounds=limit_how_complex_resolution_can_be
)
except ResolutionImpossible as e:
diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py
index db6daf718..72bd6f25a 100644
--- a/src/pip/_internal/utils/deprecation.py
+++ b/src/pip/_internal/utils/deprecation.py
@@ -118,44 +118,3 @@ def deprecated(
raise PipDeprecationWarning(message)
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
-
-
-class LegacyInstallReason:
- def __init__(
- self,
- reason: str,
- replacement: Optional[str] = None,
- gone_in: Optional[str] = None,
- feature_flag: Optional[str] = None,
- issue: Optional[int] = None,
- emit_after_success: bool = False,
- emit_before_install: bool = False,
- ):
- self._reason = reason
- self._replacement = replacement
- self._gone_in = gone_in
- self._feature_flag = feature_flag
- self._issue = issue
- self.emit_after_success = emit_after_success
- self.emit_before_install = emit_before_install
-
- def emit_deprecation(self, name: str) -> None:
- deprecated(
- reason=self._reason.format(name=name),
- replacement=self._replacement,
- gone_in=self._gone_in,
- feature_flag=self._feature_flag,
- issue=self._issue,
- )
-
-
-LegacyInstallReasonFailedBdistWheel = LegacyInstallReason(
- reason=(
- "{name} was installed using the legacy 'setup.py install' "
- "method, because a wheel could not be built for it."
- ),
- replacement="to fix the wheel build issue reported above",
- gone_in="23.1",
- issue=8368,
- emit_after_success=True,
-)
diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py
index 76727306a..843cffc6b 100644
--- a/src/pip/_internal/utils/hashes.py
+++ b/src/pip/_internal/utils/hashes.py
@@ -105,6 +105,13 @@ class Hashes:
with open(path, "rb") as file:
return self.check_against_file(file)
+ def has_one_of(self, hashes: Dict[str, str]) -> bool:
+ """Return whether any of the given hashes are allowed."""
+ for hash_name, hex_digest in hashes.items():
+ if self.is_hash_allowed(hash_name, hex_digest):
+ return True
+ return False
+
def __bool__(self) -> bool:
"""Return whether I know any known-good hashes."""
return bool(self._allowed)
diff --git a/src/pip/_internal/utils/setuptools_build.py b/src/pip/_internal/utils/setuptools_build.py
index 0662915cb..96d1b2460 100644
--- a/src/pip/_internal/utils/setuptools_build.py
+++ b/src/pip/_internal/utils/setuptools_build.py
@@ -144,48 +144,3 @@ def make_setuptools_egg_info_args(
args += ["--egg-base", egg_info_dir]
return args
-
-
-def make_setuptools_install_args(
- setup_py_path: str,
- *,
- global_options: Sequence[str],
- record_filename: str,
- root: Optional[str],
- prefix: Optional[str],
- header_dir: Optional[str],
- home: Optional[str],
- use_user_site: bool,
- no_user_config: bool,
- pycompile: bool,
-) -> List[str]:
- assert not (use_user_site and prefix)
- assert not (use_user_site and root)
-
- args = make_setuptools_shim_args(
- setup_py_path,
- global_options=global_options,
- no_user_config=no_user_config,
- unbuffered_output=True,
- )
- args += ["install", "--record", record_filename]
- args += ["--single-version-externally-managed"]
-
- if root is not None:
- args += ["--root", root]
- if prefix is not None:
- args += ["--prefix", prefix]
- if home is not None:
- args += ["--home", home]
- if use_user_site:
- args += ["--user", "--prefix="]
-
- if pycompile:
- args += ["--compile"]
- else:
- args += ["--no-compile"]
-
- if header_dir:
- args += ["--install-headers", header_dir]
-
- return args
diff --git a/src/pip/_vendor/README.rst b/src/pip/_vendor/README.rst
index 077f1abf7..a21314543 100644
--- a/src/pip/_vendor/README.rst
+++ b/src/pip/_vendor/README.rst
@@ -118,6 +118,30 @@ Vendoring is automated via the `vendoring <https://pypi.org/project/vendoring/>`
``pip/_vendor/vendor.txt`` and the different patches in
``tools/vendoring/patches``.
Launch it via ``vendoring sync . -v`` (requires ``vendoring>=0.2.2``).
+Tool configuration is done via ``pyproject.toml``.
+
+
+Managing Local Patches
+======================
+
+The ``vendoring`` tool automatically applies our local patches, but updating,
+the patches sometimes no longer apply cleanly. In that case, the update will
+fail. To resolve this, take the following steps:
+
+1. Revert any incomplete changes in the revendoring branch, to ensure you have
+ a clean starting point.
+2. Run the revendoring of the library with a problem again: ``nox -s vendoring
+ -- --upgrade <library_name>``.
+3. This will fail again, but you will have the original source in your working
+ directory. Review the existing patch against the source, and modify the patch
+ to reflect the new version of the source. If you ``git add`` the changes the
+ vendoring made, you can modify the source to reflect the patch file and then
+ generate a new patch with ``git diff``.
+4. Now, revert everything *except* the patch file changes. Leave the modified
+ patch file unstaged but saved in the working tree.
+5. Re-run the vendoring. This time, it should pick up the changed patch file
+ and apply it cleanly. The patch file changes will be committed along with the
+ revendoring, so the new commit should be ready to test and publish as a PR.
Debundling
diff --git a/src/pip/_vendor/msgpack/__init__.py b/src/pip/_vendor/msgpack/__init__.py
index 507102189..1300b8660 100644
--- a/src/pip/_vendor/msgpack/__init__.py
+++ b/src/pip/_vendor/msgpack/__init__.py
@@ -6,8 +6,8 @@ import os
import sys
-version = (1, 0, 4)
-__version__ = "1.0.4"
+version = (1, 0, 5)
+__version__ = "1.0.5"
if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2:
diff --git a/src/pip/_vendor/msgpack/ext.py b/src/pip/_vendor/msgpack/ext.py
index 25544c555..23e0d6b41 100644
--- a/src/pip/_vendor/msgpack/ext.py
+++ b/src/pip/_vendor/msgpack/ext.py
@@ -56,7 +56,7 @@ class Timestamp(object):
Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
"""
if not isinstance(seconds, int_types):
- raise TypeError("seconds must be an interger")
+ raise TypeError("seconds must be an integer")
if not isinstance(nanoseconds, int_types):
raise TypeError("nanoseconds must be an integer")
if not (0 <= nanoseconds < 10**9):
diff --git a/src/pip/_vendor/msgpack/fallback.py b/src/pip/_vendor/msgpack/fallback.py
index f560c7b55..e8cebc1be 100644
--- a/src/pip/_vendor/msgpack/fallback.py
+++ b/src/pip/_vendor/msgpack/fallback.py
@@ -814,7 +814,7 @@ class Packer(object):
self._pack_raw_header(n)
return self._buffer.write(obj)
if check(obj, memoryview):
- n = len(obj) * obj.itemsize
+ n = obj.nbytes
if n >= 2**32:
raise ValueError("Memoryview is too large")
self._pack_bin_header(n)
diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py
index 0ec74f8a6..a85aca10f 100644
--- a/src/pip/_vendor/pkg_resources/__init__.py
+++ b/src/pip/_vendor/pkg_resources/__init__.py
@@ -12,6 +12,12 @@ The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
+
+This module is deprecated. Users are directed to
+`importlib.resources <https://docs.python.org/3/library/importlib.resources.html>`_
+and
+`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_
+instead.
"""
import sys
@@ -34,7 +40,6 @@ import email.parser
import errno
import tempfile
import textwrap
-import itertools
import inspect
import ntpath
import posixpath
@@ -54,8 +59,10 @@ except NameError:
# capture these to bypass sandboxing
from os import utime
+
try:
from os import mkdir, rename, unlink
+
WRITE_SUPPORT = True
except ImportError:
# no write support, probably under GAE
@@ -66,6 +73,7 @@ from os.path import isdir, split
try:
import importlib.machinery as importlib_machinery
+
# access attribute to force import under delayed import mechanisms.
importlib_machinery.__name__
except ImportError:
@@ -79,6 +87,7 @@ from pip._internal.utils._jaraco_text import (
from pip._vendor import platformdirs
from pip._vendor import packaging
+
__import__('pip._vendor.packaging.version')
__import__('pip._vendor.packaging.specifiers')
__import__('pip._vendor.packaging.requirements')
@@ -109,6 +118,12 @@ _namespace_handlers = None
_namespace_packages = None
+warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning)
+
+
+_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
+
+
class PEP440Warning(RuntimeWarning):
"""
Used when there is an issue with a version or specifier not complying with
@@ -116,16 +131,7 @@ class PEP440Warning(RuntimeWarning):
"""
-def parse_version(v):
- try:
- return packaging.version.Version(v)
- except packaging.version.InvalidVersion:
- warnings.warn(
- f"{v} is an invalid version and will not be supported in "
- "a future release",
- PkgResourcesDeprecationWarning,
- )
- return packaging.version.LegacyVersion(v)
+parse_version = packaging.version.Version
_state_vars = {}
@@ -197,51 +203,87 @@ def get_supported_platform():
__all__ = [
# Basic resource access and distribution/entry point discovery
- 'require', 'run_script', 'get_provider', 'get_distribution',
- 'load_entry_point', 'get_entry_map', 'get_entry_info',
+ 'require',
+ 'run_script',
+ 'get_provider',
+ 'get_distribution',
+ 'load_entry_point',
+ 'get_entry_map',
+ 'get_entry_info',
'iter_entry_points',
- 'resource_string', 'resource_stream', 'resource_filename',
- 'resource_listdir', 'resource_exists', 'resource_isdir',
-
+ 'resource_string',
+ 'resource_stream',
+ 'resource_filename',
+ 'resource_listdir',
+ 'resource_exists',
+ 'resource_isdir',
# Environmental control
- 'declare_namespace', 'working_set', 'add_activation_listener',
- 'find_distributions', 'set_extraction_path', 'cleanup_resources',
+ 'declare_namespace',
+ 'working_set',
+ 'add_activation_listener',
+ 'find_distributions',
+ 'set_extraction_path',
+ 'cleanup_resources',
'get_default_cache',
-
# Primary implementation classes
- 'Environment', 'WorkingSet', 'ResourceManager',
- 'Distribution', 'Requirement', 'EntryPoint',
-
+ 'Environment',
+ 'WorkingSet',
+ 'ResourceManager',
+ 'Distribution',
+ 'Requirement',
+ 'EntryPoint',
# Exceptions
- 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
- 'UnknownExtra', 'ExtractionError',
-
+ 'ResolutionError',
+ 'VersionConflict',
+ 'DistributionNotFound',
+ 'UnknownExtra',
+ 'ExtractionError',
# Warnings
'PEP440Warning',
-
# Parsing functions and string utilities
- 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
- 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
- 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
-
+ 'parse_requirements',
+ 'parse_version',
+ 'safe_name',
+ 'safe_version',
+ 'get_platform',
+ 'compatible_platforms',
+ 'yield_lines',
+ 'split_sections',
+ 'safe_extra',
+ 'to_filename',
+ 'invalid_marker',
+ 'evaluate_marker',
# filesystem utilities
- 'ensure_directory', 'normalize_path',
-
+ 'ensure_directory',
+ 'normalize_path',
# Distribution "precedence" constants
- 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
-
+ 'EGG_DIST',
+ 'BINARY_DIST',
+ 'SOURCE_DIST',
+ 'CHECKOUT_DIST',
+ 'DEVELOP_DIST',
# "Provider" interfaces, implementations, and registration/lookup APIs
- 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
- 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
- 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
- 'register_finder', 'register_namespace_handler', 'register_loader_type',
- 'fixup_namespace_packages', 'get_importer',
-
+ 'IMetadataProvider',
+ 'IResourceProvider',
+ 'FileMetadata',
+ 'PathMetadata',
+ 'EggMetadata',
+ 'EmptyProvider',
+ 'empty_provider',
+ 'NullProvider',
+ 'EggProvider',
+ 'DefaultProvider',
+ 'ZipProvider',
+ 'register_finder',
+ 'register_namespace_handler',
+ 'register_loader_type',
+ 'fixup_namespace_packages',
+ 'get_importer',
# Warnings
'PkgResourcesDeprecationWarning',
-
# Deprecated/backward compatibility only
- 'run_main', 'AvailableDistributions',
+ 'run_main',
+ 'AvailableDistributions',
]
@@ -300,8 +342,10 @@ class ContextualVersionConflict(VersionConflict):
class DistributionNotFound(ResolutionError):
"""A requested distribution was not found"""
- _template = ("The '{self.req}' distribution was not found "
- "and is required by {self.requirers_str}")
+ _template = (
+ "The '{self.req}' distribution was not found "
+ "and is required by {self.requirers_str}"
+ )
@property
def req(self):
@@ -395,7 +439,8 @@ def get_build_platform():
version = _macos_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (
- int(version[0]), int(version[1]),
+ int(version[0]),
+ int(version[1]),
_macos_arch(machine),
)
except ValueError:
@@ -436,15 +481,18 @@ def compatible_platforms(provided, required):
if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
- if dversion == 7 and macosversion >= "10.3" or \
- dversion == 8 and macosversion >= "10.4":
+ if (
+ dversion == 7
+ and macosversion >= "10.3"
+ or dversion == 8
+ and macosversion >= "10.4"
+ ):
return True
# egg isn't macOS or legacy darwin
return False
# are they the same major version and machine type?
- if provMac.group(1) != reqMac.group(1) or \
- provMac.group(3) != reqMac.group(3):
+ if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3):
return False
# is the required OS major update >= the provided one?
@@ -506,8 +554,8 @@ class IMetadataProvider:
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
- Leading and trailing whitespace is stripped from each line, and lines
- with ``#`` as the first non-blank character are omitted."""
+ Leading and trailing whitespace is stripped from each line, and lines
+ with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
@@ -720,9 +768,14 @@ class WorkingSet:
keys2.append(dist.key)
self._added_new(dist)
- # FIXME: 'WorkingSet.resolve' is too complex (11)
- def resolve(self, requirements, env=None, installer=None, # noqa: C901
- replace_conflicting=False, extras=None):
+ def resolve(
+ self,
+ requirements,
+ env=None,
+ installer=None,
+ replace_conflicting=False,
+ extras=None,
+ ):
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
@@ -771,33 +824,9 @@ class WorkingSet:
if not req_extras.markers_pass(req, extras):
continue
- dist = best.get(req.key)
- if dist is None:
- # Find the best distribution and add it to the map
- dist = self.by_key.get(req.key)
- if dist is None or (dist not in req and replace_conflicting):
- ws = self
- if env is None:
- if dist is None:
- env = Environment(self.entries)
- else:
- # Use an empty environment and workingset to avoid
- # any further conflicts with the conflicting
- # distribution
- env = Environment([])
- ws = WorkingSet([])
- dist = best[req.key] = env.best_match(
- req, ws, installer,
- replace_conflicting=replace_conflicting
- )
- if dist is None:
- requirers = required_by.get(req, None)
- raise DistributionNotFound(req, requirers)
- to_activate.append(dist)
- if dist not in req:
- # Oops, the "best" so far conflicts with a dependency
- dependent_req = required_by[req]
- raise VersionConflict(dist, req).with_context(dependent_req)
+ dist = self._resolve_dist(
+ req, best, replace_conflicting, env, installer, required_by, to_activate
+ )
# push the new requirements onto the stack
new_requirements = dist.requires(req.extras)[::-1]
@@ -813,8 +842,38 @@ class WorkingSet:
# return list of distros to activate
return to_activate
- def find_plugins(
- self, plugin_env, full_env=None, installer=None, fallback=True):
+ def _resolve_dist(
+ self, req, best, replace_conflicting, env, installer, required_by, to_activate
+ ):
+ dist = best.get(req.key)
+ if dist is None:
+ # Find the best distribution and add it to the map
+ dist = self.by_key.get(req.key)
+ if dist is None or (dist not in req and replace_conflicting):
+ ws = self
+ if env is None:
+ if dist is None:
+ env = Environment(self.entries)
+ else:
+ # Use an empty environment and workingset to avoid
+ # any further conflicts with the conflicting
+ # distribution
+ env = Environment([])
+ ws = WorkingSet([])
+ dist = best[req.key] = env.best_match(
+ req, ws, installer, replace_conflicting=replace_conflicting
+ )
+ if dist is None:
+ requirers = required_by.get(req, None)
+ raise DistributionNotFound(req, requirers)
+ to_activate.append(dist)
+ if dist not in req:
+ # Oops, the "best" so far conflicts with a dependency
+ dependent_req = required_by[req]
+ raise VersionConflict(dist, req).with_context(dependent_req)
+ return dist
+
+ def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
"""Find all activatable distributions in `plugin_env`
Example usage::
@@ -867,9 +926,7 @@ class WorkingSet:
list(map(shadow_set.add, self))
for project_name in plugin_projects:
-
for dist in plugin_env[project_name]:
-
req = [dist.as_requirement()]
try:
@@ -933,8 +990,11 @@ class WorkingSet:
def __getstate__(self):
return (
- self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
- self.normalized_to_canonical_keys.copy(), self.callbacks[:]
+ self.entries[:],
+ self.entry_keys.copy(),
+ self.by_key.copy(),
+ self.normalized_to_canonical_keys.copy(),
+ self.callbacks[:],
)
def __setstate__(self, e_k_b_n_c):
@@ -970,8 +1030,8 @@ class Environment:
"""Searchable snapshot of distributions on a search path"""
def __init__(
- self, search_path=None, platform=get_supported_platform(),
- python=PY_MAJOR):
+ self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR
+ ):
"""Snapshot distributions available on a search path
Any distributions found on `search_path` are added to the environment.
@@ -1038,16 +1098,14 @@ class Environment:
return self._distmap.get(distribution_key, [])
def add(self, dist):
- """Add `dist` if we ``can_add()`` it and it has not already been added
- """
+ """Add `dist` if we ``can_add()`` it and it has not already been added"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
if dist not in dists:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
- def best_match(
- self, req, working_set, installer=None, replace_conflicting=False):
+ def best_match(self, req, working_set, installer=None, replace_conflicting=False):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
@@ -1134,6 +1192,7 @@ class ExtractionError(RuntimeError):
class ResourceManager:
"""Manage resource extraction and packages"""
+
extraction_path = None
def __init__(self):
@@ -1145,9 +1204,7 @@ class ResourceManager:
def resource_isdir(self, package_or_requirement, resource_name):
"""Is the named resource an existing directory?"""
- return get_provider(package_or_requirement).resource_isdir(
- resource_name
- )
+ return get_provider(package_or_requirement).resource_isdir(resource_name)
def resource_filename(self, package_or_requirement, resource_name):
"""Return a true filesystem path for specified resource"""
@@ -1169,9 +1226,7 @@ class ResourceManager:
def resource_listdir(self, package_or_requirement, resource_name):
"""List the contents of the named resource directory"""
- return get_provider(package_or_requirement).resource_listdir(
- resource_name
- )
+ return get_provider(package_or_requirement).resource_listdir(resource_name)
def extraction_error(self):
"""Give an error message for problems extracting file(s)"""
@@ -1179,7 +1234,8 @@ class ResourceManager:
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
- tmpl = textwrap.dedent("""
+ tmpl = textwrap.dedent(
+ """
Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s)
@@ -1194,7 +1250,8 @@ class ResourceManager:
Perhaps your account does not have write access to this directory?
You can change the cache directory by setting the PYTHON_EGG_CACHE
environment variable to point to an accessible directory.
- """).lstrip()
+ """
+ ).lstrip()
err = ExtractionError(tmpl.format(**locals()))
err.manager = self
err.cache_path = cache_path
@@ -1293,9 +1350,7 @@ class ResourceManager:
``cleanup_resources()``.)
"""
if self.cached_files:
- raise ValueError(
- "Can't change extraction path, files already extracted"
- )
+ raise ValueError("Can't change extraction path, files already extracted")
self.extraction_path = path
@@ -1319,9 +1374,8 @@ def get_default_cache():
or a platform-relevant user cache dir for an app
named "Python-Eggs".
"""
- return (
- os.environ.get('PYTHON_EGG_CACHE')
- or platformdirs.user_cache_dir(appname='Python-Eggs')
+ return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir(
+ appname='Python-Eggs'
)
@@ -1345,6 +1399,38 @@ def safe_version(version):
return re.sub('[^A-Za-z0-9.]+', '-', version)
+def _forgiving_version(version):
+ """Fallback when ``safe_version`` is not safe enough
+ >>> parse_version(_forgiving_version('0.23ubuntu1'))
+ <Version('0.23.dev0+sanitized.ubuntu1')>
+ >>> parse_version(_forgiving_version('0.23-'))
+ <Version('0.23.dev0+sanitized')>
+ >>> parse_version(_forgiving_version('0.-_'))
+ <Version('0.dev0+sanitized')>
+ >>> parse_version(_forgiving_version('42.+?1'))
+ <Version('42.dev0+sanitized.1')>
+ >>> parse_version(_forgiving_version('hello world'))
+ <Version('0.dev0+sanitized.hello.world')>
+ """
+ version = version.replace(' ', '.')
+ match = _PEP440_FALLBACK.search(version)
+ if match:
+ safe = match["safe"]
+ rest = version[len(safe):]
+ else:
+ safe = "0"
+ rest = version
+ local = f"sanitized.{_safe_segment(rest)}".strip(".")
+ return f"{safe}.dev0+{local}"
+
+
+def _safe_segment(segment):
+ """Convert an arbitrary string into a safe segment"""
+ segment = re.sub('[^A-Za-z0-9.]+', '-', segment)
+ segment = re.sub('-[^A-Za-z0-9]+', '-', segment)
+ return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
+
+
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
@@ -1458,8 +1544,9 @@ class NullProvider:
script = 'scripts/' + script_name
if not self.has_metadata(script):
raise ResolutionError(
- "Script {script!r} not found in metadata at {self.egg_info!r}"
- .format(**locals()),
+ "Script {script!r} not found in metadata at {self.egg_info!r}".format(
+ **locals()
+ ),
)
script_text = self.get_metadata(script).replace('\r\n', '\n')
script_text = script_text.replace('\r', '\n')
@@ -1472,8 +1559,12 @@ class NullProvider:
exec(code, namespace, namespace)
else:
from linecache import cache
+
cache[script_filename] = (
- len(script_text), 0, script_text.split('\n'), script_filename
+ len(script_text),
+ 0,
+ script_text.split('\n'),
+ script_filename,
)
script_code = compile(script_text, script_filename, 'exec')
exec(script_code, namespace, namespace)
@@ -1553,9 +1644,9 @@ is not allowed.
AttributeError: ...
"""
invalid = (
- os.path.pardir in path.split(posixpath.sep) or
- posixpath.isabs(path) or
- ntpath.isabs(path)
+ os.path.pardir in path.split(posixpath.sep)
+ or posixpath.isabs(path)
+ or ntpath.isabs(path)
)
if not invalid:
return
@@ -1637,7 +1728,10 @@ class DefaultProvider(EggProvider):
@classmethod
def _register(cls):
- loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
+ loader_names = (
+ 'SourceFileLoader',
+ 'SourcelessFileLoader',
+ )
for name in loader_names:
loader_cls = getattr(importlib_machinery, name, type(None))
register_loader_type(loader_cls, cls)
@@ -1697,6 +1791,7 @@ class MemoizedZipManifests(ZipManifests):
"""
Memoized zipfile manifests.
"""
+
manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
def load(self, path):
@@ -1730,20 +1825,16 @@ class ZipProvider(EggProvider):
if fspath == self.loader.archive:
return ''
if fspath.startswith(self.zip_pre):
- return fspath[len(self.zip_pre):]
- raise AssertionError(
- "%s is not a subpath of %s" % (fspath, self.zip_pre)
- )
+ return fspath[len(self.zip_pre) :]
+ raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))
def _parts(self, zip_path):
# Convert a zipfile subpath into an egg-relative path part list.
# pseudo-fs path
fspath = self.zip_pre + zip_path
if fspath.startswith(self.egg_root + os.sep):
- return fspath[len(self.egg_root) + 1:].split(os.sep)
- raise AssertionError(
- "%s is not a subpath of %s" % (fspath, self.egg_root)
- )
+ return fspath[len(self.egg_root) + 1 :].split(os.sep)
+ raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))
@property
def zipinfo(self):
@@ -1773,25 +1864,20 @@ class ZipProvider(EggProvider):
# FIXME: 'ZipProvider._extract_resource' is too complex (12)
def _extract_resource(self, manager, zip_path): # noqa: C901
-
if zip_path in self._index():
for name in self._index()[zip_path]:
- last = self._extract_resource(
- manager, os.path.join(zip_path, name)
- )
+ last = self._extract_resource(manager, os.path.join(zip_path, name))
# return the extracted directory name
return os.path.dirname(last)
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not WRITE_SUPPORT:
- raise IOError('"os.rename" and "os.unlink" are not supported '
- 'on this platform')
- try:
-
- real_path = manager.get_cache_path(
- self.egg_name, self._parts(zip_path)
+ raise IOError(
+ '"os.rename" and "os.unlink" are not supported ' 'on this platform'
)
+ try:
+ real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
if self._is_current(real_path, zip_path):
return real_path
@@ -2027,70 +2113,21 @@ def find_nothing(importer, path_item, only=False):
register_finder(object, find_nothing)
-def _by_version_descending(names):
- """
- Given a list of filenames, return them in descending order
- by version number.
-
- >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
- >>> _by_version_descending(names)
- ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo']
- >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
- >>> _by_version_descending(names)
- ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
- >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
- >>> _by_version_descending(names)
- ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
- """
- def try_parse(name):
- """
- Attempt to parse as a version or return a null version.
- """
- try:
- return packaging.version.Version(name)
- except Exception:
- return packaging.version.Version('0')
-
- def _by_version(name):
- """
- Parse each component of the filename
- """
- name, ext = os.path.splitext(name)
- parts = itertools.chain(name.split('-'), [ext])
- return [try_parse(part) for part in parts]
-
- return sorted(names, key=_by_version, reverse=True)
-
-
def find_on_path(importer, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
if _is_unpacked_egg(path_item):
yield Distribution.from_filename(
- path_item, metadata=PathMetadata(
- path_item, os.path.join(path_item, 'EGG-INFO')
- )
+ path_item,
+ metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')),
)
return
- entries = (
- os.path.join(path_item, child)
- for child in safe_listdir(path_item)
- )
-
- # for performance, before sorting by version,
- # screen entries for only those that will yield
- # distributions
- filtered = (
- entry
- for entry in entries
- if dist_factory(path_item, entry, only)
- )
+ entries = (os.path.join(path_item, child) for child in safe_listdir(path_item))
# scan for .egg and .egg-info in directory
- path_item_entries = _by_version_descending(filtered)
- for entry in path_item_entries:
+ for entry in sorted(entries):
fullpath = os.path.join(path_item, entry)
factory = dist_factory(path_item, entry, only)
for dist in factory(fullpath):
@@ -2101,19 +2138,18 @@ def dist_factory(path_item, entry, only):
"""Return a dist_factory for the given entry."""
lower = entry.lower()
is_egg_info = lower.endswith('.egg-info')
- is_dist_info = (
- lower.endswith('.dist-info') and
- os.path.isdir(os.path.join(path_item, entry))
+ is_dist_info = lower.endswith('.dist-info') and os.path.isdir(
+ os.path.join(path_item, entry)
)
is_meta = is_egg_info or is_dist_info
return (
distributions_from_metadata
- if is_meta else
- find_distributions
- if not only and _is_egg_path(entry) else
- resolve_egg_link
- if not only and lower.endswith('.egg-link') else
- NoDists()
+ if is_meta
+ else find_distributions
+ if not only and _is_egg_path(entry)
+ else resolve_egg_link
+ if not only and lower.endswith('.egg-link')
+ else NoDists()
)
@@ -2125,6 +2161,7 @@ class NoDists:
>>> list(NoDists()('anything'))
[]
"""
+
def __bool__(self):
return False
@@ -2159,7 +2196,10 @@ def distributions_from_metadata(path):
metadata = FileMetadata(path)
entry = os.path.basename(path)
yield Distribution.from_location(
- root, entry, metadata, precedence=DEVELOP_DIST,
+ root,
+ entry,
+ metadata,
+ precedence=DEVELOP_DIST,
)
@@ -2181,17 +2221,16 @@ def resolve_egg_link(path):
"""
referenced_paths = non_empty_lines(path)
resolved_paths = (
- os.path.join(os.path.dirname(path), ref)
- for ref in referenced_paths
+ os.path.join(os.path.dirname(path), ref) for ref in referenced_paths
)
dist_groups = map(find_distributions, resolved_paths)
return next(dist_groups, ())
-register_finder(pkgutil.ImpImporter, find_on_path)
+if hasattr(pkgutil, 'ImpImporter'):
+ register_finder(pkgutil.ImpImporter, find_on_path)
-if hasattr(importlib_machinery, 'FileFinder'):
- register_finder(importlib_machinery.FileFinder, find_on_path)
+register_finder(importlib_machinery.FileFinder, find_on_path)
_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})
@@ -2289,6 +2328,15 @@ def _rebuild_mod_path(orig_path, package_name, module):
def declare_namespace(packageName):
"""Declare that package 'packageName' is a namespace package"""
+ msg = (
+ f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n"
+ "Implementing implicit namespace packages (as specified in PEP 420) "
+ "is preferred to `pkg_resources.declare_namespace`. "
+ "See https://setuptools.pypa.io/en/latest/references/"
+ "keywords.html#keyword-namespace-packages"
+ )
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+
_imp.acquire_lock()
try:
if packageName in _namespace_packages:
@@ -2345,11 +2393,11 @@ def file_ns_handler(importer, path_item, packageName, module):
return subpath
-register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
-register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+if hasattr(pkgutil, 'ImpImporter'):
+ register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
-if hasattr(importlib_machinery, 'FileFinder'):
- register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
def null_ns_handler(importer, path_item, packageName, module):
@@ -2361,8 +2409,7 @@ register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
- return os.path.normcase(os.path.realpath(os.path.normpath(
- _cygwin_patch(filename))))
+ return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
def _cygwin_patch(filename): # pragma: nocover
@@ -2393,9 +2440,9 @@ def _is_egg_path(path):
def _is_zip_egg(path):
return (
- path.lower().endswith('.egg') and
- os.path.isfile(path) and
- zipfile.is_zipfile(path)
+ path.lower().endswith('.egg')
+ and os.path.isfile(path)
+ and zipfile.is_zipfile(path)
)
@@ -2403,9 +2450,8 @@ def _is_unpacked_egg(path):
"""
Determine if given path appears to be an unpacked egg.
"""
- return (
- path.lower().endswith('.egg') and
- os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
+ return path.lower().endswith('.egg') and os.path.isfile(
+ os.path.join(path, 'EGG-INFO', 'PKG-INFO')
)
@@ -2569,8 +2615,10 @@ def _version_from_file(lines):
Given an iterable of lines from a Metadata file, return
the value of the Version field, if present, or None otherwise.
"""
+
def is_version_line(line):
return line.lower().startswith('version:')
+
version_lines = filter(is_version_line, lines)
line = next(iter(version_lines), '')
_, _, value = line.partition(':')
@@ -2579,12 +2627,19 @@ def _version_from_file(lines):
class Distribution:
"""Wrap an actual or potential sys.path entry w/metadata"""
+
PKG_INFO = 'PKG-INFO'
def __init__(
- self, location=None, metadata=None, project_name=None,
- version=None, py_version=PY_MAJOR, platform=None,
- precedence=EGG_DIST):
+ self,
+ location=None,
+ metadata=None,
+ project_name=None,
+ version=None,
+ py_version=PY_MAJOR,
+ platform=None,
+ precedence=EGG_DIST,
+ ):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
self._version = safe_version(version)
@@ -2607,8 +2662,13 @@ class Distribution:
'name', 'ver', 'pyver', 'plat'
)
return cls(
- location, metadata, project_name=project_name, version=version,
- py_version=py_version, platform=platform, **kw
+ location,
+ metadata,
+ project_name=project_name,
+ version=version,
+ py_version=py_version,
+ platform=platform,
+ **kw,
)._reload_version()
def _reload_version(self):
@@ -2617,7 +2677,7 @@ class Distribution:
@property
def hashcmp(self):
return (
- self.parsed_version,
+ self._forgiving_parsed_version,
self.precedence,
self.key,
self.location,
@@ -2664,35 +2724,42 @@ class Distribution:
@property
def parsed_version(self):
if not hasattr(self, "_parsed_version"):
- self._parsed_version = parse_version(self.version)
+ try:
+ self._parsed_version = parse_version(self.version)
+ except packaging.version.InvalidVersion as ex:
+ info = f"(package: {self.project_name})"
+ if hasattr(ex, "add_note"):
+ ex.add_note(info) # PEP 678
+ raise
+ raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None
return self._parsed_version
- def _warn_legacy_version(self):
- LV = packaging.version.LegacyVersion
- is_legacy = isinstance(self._parsed_version, LV)
- if not is_legacy:
- return
+ @property
+ def _forgiving_parsed_version(self):
+ try:
+ return self.parsed_version
+ except packaging.version.InvalidVersion as ex:
+ self._parsed_version = parse_version(_forgiving_version(self.version))
- # While an empty version is technically a legacy version and
- # is not a valid PEP 440 version, it's also unlikely to
- # actually come from someone and instead it is more likely that
- # it comes from setuptools attempting to parse a filename and
- # including it in the list. So for that we'll gate this warning
- # on if the version is anything at all or not.
- if not self.version:
- return
+ notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678
+ msg = f"""!!\n\n
+ *************************************************************************
+ {str(ex)}\n{notes}
+
+ This is a long overdue deprecation.
+ For the time being, `pkg_resources` will use `{self._parsed_version}`
+ as a replacement to avoid breaking existing environments,
+ but no future compatibility is guaranteed.
- tmpl = textwrap.dedent("""
- '{project_name} ({version})' is being parsed as a legacy,
- non PEP 440,
- version. You may find odd behavior and sort order.
- In particular it will be sorted as less than 0.0. It
- is recommended to migrate to PEP 440 compatible
- versions.
- """).strip().replace('\n', ' ')
+ If you maintain package {self.project_name} you should implement
+ the relevant changes to adequate the project to PEP 440 immediately.
+ *************************************************************************
+ \n\n!!
+ """
+ warnings.warn(msg, DeprecationWarning)
- warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
+ return self._parsed_version
@property
def version(self):
@@ -2702,9 +2769,9 @@ class Distribution:
version = self._get_version()
if version is None:
path = self._get_metadata_path_for_display(self.PKG_INFO)
- msg = (
- "Missing 'Version:' header and/or {} file at path: {}"
- ).format(self.PKG_INFO, path)
+ msg = ("Missing 'Version:' header and/or {} file at path: {}").format(
+ self.PKG_INFO, path
+ )
raise ValueError(msg, self) from e
return version
@@ -2733,8 +2800,7 @@ class Distribution:
reqs = dm.pop(extra)
new_extra, _, marker = extra.partition(':')
fails_marker = marker and (
- invalid_marker(marker)
- or not evaluate_marker(marker)
+ invalid_marker(marker) or not evaluate_marker(marker)
)
if fails_marker:
reqs = []
@@ -2806,8 +2872,9 @@ class Distribution:
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
- to_filename(self.project_name), to_filename(self.version),
- self.py_version or PY_MAJOR
+ to_filename(self.project_name),
+ to_filename(self.version),
+ self.py_version or PY_MAJOR,
)
if self.platform:
@@ -2837,17 +2904,13 @@ class Distribution:
def __dir__(self):
return list(
set(super(Distribution, self).__dir__())
- | set(
- attr for attr in self._provider.__dir__()
- if not attr.startswith('_')
- )
+ | set(attr for attr in self._provider.__dir__() if not attr.startswith('_'))
)
@classmethod
def from_filename(cls, filename, metadata=None, **kw):
return cls.from_location(
- _normalize_cached(filename), os.path.basename(filename), metadata,
- **kw
+ _normalize_cached(filename), os.path.basename(filename), metadata, **kw
)
def as_requirement(self):
@@ -2959,14 +3022,18 @@ class Distribution:
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
loc = normalize_path(self.location)
for modname in self._get_metadata('top_level.txt'):
- if (modname not in sys.modules or modname in nsp
- or modname in _namespace_packages):
+ if (
+ modname not in sys.modules
+ or modname in nsp
+ or modname in _namespace_packages
+ ):
continue
if modname in ('pkg_resources', 'setuptools', 'site'):
continue
fn = getattr(sys.modules[modname], '__file__', None)
- if fn and (normalize_path(fn).startswith(loc) or
- fn.startswith(self.location)):
+ if fn and (
+ normalize_path(fn).startswith(loc) or fn.startswith(self.location)
+ ):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
@@ -3018,6 +3085,7 @@ class DistInfoDistribution(Distribution):
Wrap an actual or potential sys.path entry
w/metadata, .dist-info style.
"""
+
PKG_INFO = 'METADATA'
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
@@ -3103,8 +3171,7 @@ class Requirement(packaging.requirements.Requirement):
self.unsafe_name = self.name
project_name = safe_name(self.name)
self.project_name, self.key = project_name, project_name.lower()
- self.specs = [
- (spec.operator, spec.version) for spec in self.specifier]
+ self.specs = [(spec.operator, spec.version) for spec in self.specifier]
self.extras = tuple(map(safe_extra, self.extras))
self.hashCmp = (
self.key,
@@ -3116,10 +3183,7 @@ class Requirement(packaging.requirements.Requirement):
self.__hash = hash(self.hashCmp)
def __eq__(self, other):
- return (
- isinstance(other, Requirement) and
- self.hashCmp == other.hashCmp
- )
+ return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
def __ne__(self, other):
return not self == other
@@ -3144,7 +3208,7 @@ class Requirement(packaging.requirements.Requirement):
@staticmethod
def parse(s):
- req, = parse_requirements(s)
+ (req,) = parse_requirements(s)
return req
@@ -3282,10 +3346,7 @@ def _initialize_master_working_set():
# ensure that all distributions added to the working set in the future
# (e.g. by calling ``require()``) will get activated as well,
# with higher priority (replace=True).
- tuple(
- dist.activate(replace=False)
- for dist in working_set
- )
+ tuple(dist.activate(replace=False) for dist in working_set)
add_activation_listener(
lambda dist: dist.activate(replace=True),
existing=False,
diff --git a/src/pip/_vendor/platformdirs/__init__.py b/src/pip/_vendor/platformdirs/__init__.py
index 82d907163..c46a145cd 100644
--- a/src/pip/_vendor/platformdirs/__init__.py
+++ b/src/pip/_vendor/platformdirs/__init__.py
@@ -27,7 +27,6 @@ def _set_platform_dir_class() -> type[PlatformDirsABC]:
from pip._vendor.platformdirs.unix import Unix as Result
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
-
if os.getenv("SHELL") or os.getenv("PREFIX"):
return Result
@@ -50,15 +49,23 @@ def user_data_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
- :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
+ :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data directory tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ roaming=roaming,
+ ensure_exists=ensure_exists,
+ ).user_data_dir
def site_data_dir(
@@ -66,15 +73,23 @@ def site_data_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data directory shared by users
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ multipath=multipath,
+ ensure_exists=ensure_exists,
+ ).site_data_dir
def user_config_dir(
@@ -82,15 +97,23 @@ def user_config_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
- :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
+ :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config directory tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ roaming=roaming,
+ ensure_exists=ensure_exists,
+ ).user_config_dir
def site_config_dir(
@@ -98,15 +121,23 @@ def site_config_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config directory shared by the users
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ multipath=multipath,
+ ensure_exists=ensure_exists,
+ ).site_config_dir
def user_cache_dir(
@@ -114,15 +145,47 @@ def user_cache_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
+ :returns: cache directory tied to the user
+ """
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).user_cache_dir
+
+
+def site_cache_dir(
+ appname: str | None = None,
+ appauthor: str | None | Literal[False] = None,
+ version: str | None = None,
+ opinion: bool = True,
+ ensure_exists: bool = False,
+) -> str:
+ """
+ :param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
+ :param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
+ :param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
+ :param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: cache directory tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).site_cache_dir
def user_state_dir(
@@ -130,15 +193,23 @@ def user_state_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
- :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
+ :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: state directory tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ roaming=roaming,
+ ensure_exists=ensure_exists,
+ ).user_state_dir
def user_log_dir(
@@ -146,15 +217,23 @@ def user_log_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: log directory tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).user_log_dir
def user_documents_dir() -> str:
@@ -169,15 +248,23 @@ def user_runtime_dir(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True,
+ ensure_exists: bool = False,
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: runtime directory tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).user_runtime_dir
def user_data_path(
@@ -185,15 +272,23 @@ def user_data_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
- :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
+ :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data path tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ roaming=roaming,
+ ensure_exists=ensure_exists,
+ ).user_data_path
def site_data_path(
@@ -201,15 +296,23 @@ def site_data_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data path shared by users
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ multipath=multipath,
+ ensure_exists=ensure_exists,
+ ).site_data_path
def user_config_path(
@@ -217,15 +320,23 @@ def user_config_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
- :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
+ :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config path tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ roaming=roaming,
+ ensure_exists=ensure_exists,
+ ).user_config_path
def site_config_path(
@@ -233,15 +344,47 @@ def site_config_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config path shared by the users
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ multipath=multipath,
+ ensure_exists=ensure_exists,
+ ).site_config_path
+
+
+def site_cache_path(
+ appname: str | None = None,
+ appauthor: str | None | Literal[False] = None,
+ version: str | None = None,
+ opinion: bool = True,
+ ensure_exists: bool = False,
+) -> Path:
+ """
+ :param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
+ :param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
+ :param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
+ :param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
+ :returns: cache directory tied to the user
+ """
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).site_cache_path
def user_cache_path(
@@ -249,15 +392,23 @@ def user_cache_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: cache path tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).user_cache_path
def user_state_path(
@@ -265,15 +416,23 @@ def user_state_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
- :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
+ :param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: state path tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ roaming=roaming,
+ ensure_exists=ensure_exists,
+ ).user_state_path
def user_log_path(
@@ -281,15 +440,23 @@ def user_log_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: log path tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).user_log_path
def user_documents_path() -> Path:
@@ -304,15 +471,23 @@ def user_runtime_path(
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True,
+ ensure_exists: bool = False,
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
+ :param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: runtime path tied to the user
"""
- return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path
+ return PlatformDirs(
+ appname=appname,
+ appauthor=appauthor,
+ version=version,
+ opinion=opinion,
+ ensure_exists=ensure_exists,
+ ).user_runtime_path
__all__ = [
@@ -330,6 +505,7 @@ __all__ = [
"user_runtime_dir",
"site_data_dir",
"site_config_dir",
+ "site_cache_dir",
"user_data_path",
"user_config_path",
"user_cache_path",
@@ -339,4 +515,5 @@ __all__ = [
"user_runtime_path",
"site_data_path",
"site_config_path",
+ "site_cache_path",
]
diff --git a/src/pip/_vendor/platformdirs/__main__.py b/src/pip/_vendor/platformdirs/__main__.py
index 9c54bfb43..7171f1311 100644
--- a/src/pip/_vendor/platformdirs/__main__.py
+++ b/src/pip/_vendor/platformdirs/__main__.py
@@ -12,6 +12,7 @@ PROPS = (
"user_runtime_dir",
"site_data_dir",
"site_config_dir",
+ "site_cache_dir",
)
diff --git a/src/pip/_vendor/platformdirs/android.py b/src/pip/_vendor/platformdirs/android.py
index eda809351..f6de7451b 100644
--- a/src/pip/_vendor/platformdirs/android.py
+++ b/src/pip/_vendor/platformdirs/android.py
@@ -12,8 +12,9 @@ from .api import PlatformDirsABC
class Android(PlatformDirsABC):
"""
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_. Makes use of the
- `appname <platformdirs.api.PlatformDirsABC.appname>` and
- `version <platformdirs.api.PlatformDirsABC.version>`.
+ `appname <platformdirs.api.PlatformDirsABC.appname>`,
+ `version <platformdirs.api.PlatformDirsABC.version>`,
+ `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
@@ -44,6 +45,11 @@ class Android(PlatformDirsABC):
return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
@property
+ def site_cache_dir(self) -> str:
+ """:return: cache directory shared by users, same as `user_cache_dir`"""
+ return self.user_cache_dir
+
+ @property
def user_state_dir(self) -> str:
""":return: state directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
diff --git a/src/pip/_vendor/platformdirs/api.py b/src/pip/_vendor/platformdirs/api.py
index 6f6e2c2c6..f140e8b6d 100644
--- a/src/pip/_vendor/platformdirs/api.py
+++ b/src/pip/_vendor/platformdirs/api.py
@@ -22,6 +22,7 @@ class PlatformDirsABC(ABC):
roaming: bool = False,
multipath: bool = False,
opinion: bool = True,
+ ensure_exists: bool = False,
):
"""
Create a new platform directory.
@@ -32,6 +33,7 @@ class PlatformDirsABC(ABC):
:param roaming: See `roaming`.
:param multipath: See `multipath`.
:param opinion: See `opinion`.
+ :param ensure_exists: See `ensure_exists`.
"""
self.appname = appname #: The name of application.
self.appauthor = appauthor
@@ -56,6 +58,11 @@ class PlatformDirsABC(ABC):
returned. By default, the first item would only be returned.
"""
self.opinion = opinion #: A flag to indicating to use opinionated values.
+ self.ensure_exists = ensure_exists
+ """
+ Optionally create the directory (and any missing parents) upon access if it does not exist.
+ By default, no directories are created.
+ """
def _append_app_name_and_version(self, *base: str) -> str:
params = list(base[1:])
@@ -63,7 +70,13 @@ class PlatformDirsABC(ABC):
params.append(self.appname)
if self.version:
params.append(self.version)
- return os.path.join(base[0], *params)
+ path = os.path.join(base[0], *params)
+ self._optionally_create_directory(path)
+ return path
+
+ def _optionally_create_directory(self, path: str) -> None:
+ if self.ensure_exists:
+ Path(path).mkdir(parents=True, exist_ok=True)
@property
@abstractmethod
@@ -92,6 +105,11 @@ class PlatformDirsABC(ABC):
@property
@abstractmethod
+ def site_cache_dir(self) -> str:
+ """:return: cache directory shared by users"""
+
+ @property
+ @abstractmethod
def user_state_dir(self) -> str:
""":return: state directory tied to the user"""
@@ -136,6 +154,11 @@ class PlatformDirsABC(ABC):
return Path(self.user_cache_dir)
@property
+ def site_cache_path(self) -> Path:
+ """:return: cache path shared by users"""
+ return Path(self.site_cache_dir)
+
+ @property
def user_state_path(self) -> Path:
""":return: state path tied to the user"""
return Path(self.user_state_dir)
diff --git a/src/pip/_vendor/platformdirs/macos.py b/src/pip/_vendor/platformdirs/macos.py
index a01337c77..ec9751129 100644
--- a/src/pip/_vendor/platformdirs/macos.py
+++ b/src/pip/_vendor/platformdirs/macos.py
@@ -9,14 +9,15 @@ class MacOS(PlatformDirsABC):
"""
Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
<https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
- Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>` and
- `version <platformdirs.api.PlatformDirsABC.version>`.
+ Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>`,
+ `version <platformdirs.api.PlatformDirsABC.version>`,
+ `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
def user_data_dir(self) -> str:
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
- return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/"))
+ return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))
@property
def site_data_dir(self) -> str:
@@ -25,13 +26,13 @@ class MacOS(PlatformDirsABC):
@property
def user_config_dir(self) -> str:
- """:return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``"""
- return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/"))
+ """:return: config directory tied to the user, same as `user_data_dir`"""
+ return self.user_data_dir
@property
def site_config_dir(self) -> str:
- """:return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``"""
- return self._append_app_name_and_version("/Library/Preferences")
+ """:return: config directory shared by the users, same as `site_data_dir`"""
+ return self.site_data_dir
@property
def user_cache_dir(self) -> str:
@@ -39,6 +40,11 @@ class MacOS(PlatformDirsABC):
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))
@property
+ def site_cache_dir(self) -> str:
+ """:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``"""
+ return self._append_app_name_and_version("/Library/Caches")
+
+ @property
def user_state_dir(self) -> str:
""":return: state directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
diff --git a/src/pip/_vendor/platformdirs/unix.py b/src/pip/_vendor/platformdirs/unix.py
index 9aca5a030..17d355da9 100644
--- a/src/pip/_vendor/platformdirs/unix.py
+++ b/src/pip/_vendor/platformdirs/unix.py
@@ -24,7 +24,8 @@ class Unix(PlatformDirsABC):
`appname <platformdirs.api.PlatformDirsABC.appname>`,
`version <platformdirs.api.PlatformDirsABC.version>`,
`multipath <platformdirs.api.PlatformDirsABC.multipath>`,
- `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
+ `opinion <platformdirs.api.PlatformDirsABC.opinion>`,
+ `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
@@ -94,6 +95,13 @@ class Unix(PlatformDirsABC):
return self._append_app_name_and_version(path)
@property
+ def site_cache_dir(self) -> str:
+ """
+ :return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version``
+ """
+ return self._append_app_name_and_version("/var/tmp")
+
+ @property
def user_state_dir(self) -> str:
"""
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
@@ -148,6 +156,11 @@ class Unix(PlatformDirsABC):
""":return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
return self._first_item_as_path_if_multipath(self.site_config_dir)
+ @property
+ def site_cache_path(self) -> Path:
+ """:return: cache path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
+ return self._first_item_as_path_if_multipath(self.site_cache_dir)
+
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
if self.multipath:
# If multipath is True, the first path is returned.
diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py
index 9f6eb98e8..d906a2c99 100644
--- a/src/pip/_vendor/platformdirs/version.py
+++ b/src/pip/_vendor/platformdirs/version.py
@@ -1,4 +1,4 @@
# file generated by setuptools_scm
# don't change, don't track in version control
-__version__ = version = '2.6.2'
-__version_tuple__ = version_tuple = (2, 6, 2)
+__version__ = version = '3.2.0'
+__version_tuple__ = version_tuple = (3, 2, 0)
diff --git a/src/pip/_vendor/platformdirs/windows.py b/src/pip/_vendor/platformdirs/windows.py
index d5c27b341..e7573c3d6 100644
--- a/src/pip/_vendor/platformdirs/windows.py
+++ b/src/pip/_vendor/platformdirs/windows.py
@@ -17,7 +17,9 @@ class Windows(PlatformDirsABC):
`appauthor <platformdirs.api.PlatformDirsABC.appauthor>`,
`version <platformdirs.api.PlatformDirsABC.version>`,
`roaming <platformdirs.api.PlatformDirsABC.roaming>`,
- `opinion <platformdirs.api.PlatformDirsABC.opinion>`."""
+ `opinion <platformdirs.api.PlatformDirsABC.opinion>`,
+ `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
+ """
@property
def user_data_dir(self) -> str:
@@ -41,7 +43,9 @@ class Windows(PlatformDirsABC):
params.append(opinion_value)
if self.version:
params.append(self.version)
- return os.path.join(path, *params)
+ path = os.path.join(path, *params)
+ self._optionally_create_directory(path)
+ return path
@property
def site_data_dir(self) -> str:
@@ -69,6 +73,12 @@ class Windows(PlatformDirsABC):
return self._append_parts(path, opinion_value="Cache")
@property
+ def site_cache_dir(self) -> str:
+ """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
+ path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+ return self._append_parts(path, opinion_value="Cache")
+
+ @property
def user_state_dir(self) -> str:
""":return: state directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
@@ -81,6 +91,7 @@ class Windows(PlatformDirsABC):
path = self.user_data_dir
if self.opinion:
path = os.path.join(path, "Logs")
+ self._optionally_create_directory(path)
return path
@property
diff --git a/src/pip/_vendor/pygments/__init__.py b/src/pip/_vendor/pygments/__init__.py
index 7185e5376..d9b0a8dea 100644
--- a/src/pip/_vendor/pygments/__init__.py
+++ b/src/pip/_vendor/pygments/__init__.py
@@ -26,7 +26,7 @@
"""
from io import StringIO, BytesIO
-__version__ = '2.13.0'
+__version__ = '2.14.0'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
diff --git a/src/pip/_vendor/pygments/formatters/__init__.py b/src/pip/_vendor/pygments/formatters/__init__.py
index 43c4c89aa..7ecf7eee3 100644
--- a/src/pip/_vendor/pygments/formatters/__init__.py
+++ b/src/pip/_vendor/pygments/formatters/__init__.py
@@ -8,7 +8,6 @@
:license: BSD, see LICENSE for details.
"""
-import re
import sys
import types
from fnmatch import fnmatch
diff --git a/src/pip/_vendor/pygments/formatters/html.py b/src/pip/_vendor/pygments/formatters/html.py
index d5cda4c4b..f22b200c0 100644
--- a/src/pip/_vendor/pygments/formatters/html.py
+++ b/src/pip/_vendor/pygments/formatters/html.py
@@ -878,10 +878,12 @@ class HtmlFormatter(Formatter):
# for all but the last line
for part in parts[:-1]:
if line:
- if lspan != cspan:
+ # Also check for part being non-empty, so we avoid creating
+ # empty <span> tags
+ if lspan != cspan and part:
line.extend(((lspan and '</span>'), cspan, part,
(cspan and '</span>'), lsep))
- else: # both are the same
+ else: # both are the same, or the current part was empty
line.extend((part, (lspan and '</span>'), lsep))
yield 1, ''.join(line)
line = []
diff --git a/src/pip/_vendor/pygments/formatters/irc.py b/src/pip/_vendor/pygments/formatters/irc.py
index 3f6d52deb..53e19b83d 100644
--- a/src/pip/_vendor/pygments/formatters/irc.py
+++ b/src/pip/_vendor/pygments/formatters/irc.py
@@ -128,38 +128,12 @@ class IRCFormatter(Formatter):
self._lineno = 0
def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("\n%04d: " % self._lineno)
-
- def _format_unencoded_with_lineno(self, tokensource, outfile):
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- if value.endswith("\n"):
- self._write_lineno(outfile)
- value = value[:-1]
- color = self.colorscheme.get(ttype)
- while color is None:
- ttype = ttype.parent
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- self._write_lineno(outfile)
- if line:
- outfile.write(ircformat(color, line[:-1]))
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
-
- outfile.write("\n")
+ if self.linenos:
+ self._lineno += 1
+ outfile.write("%04d: " % self._lineno)
def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._format_unencoded_with_lineno(tokensource, outfile)
- return
+ self._write_lineno(outfile)
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
@@ -173,6 +147,7 @@ class IRCFormatter(Formatter):
if line:
outfile.write(ircformat(color, line))
outfile.write('\n')
+ self._write_lineno(outfile)
if spl[-1]:
outfile.write(ircformat(color, spl[-1]))
else:
diff --git a/src/pip/_vendor/pygments/lexer.py b/src/pip/_vendor/pygments/lexer.py
index ec7f4de32..74ab9b908 100644
--- a/src/pip/_vendor/pygments/lexer.py
+++ b/src/pip/_vendor/pygments/lexer.py
@@ -14,15 +14,16 @@ import time
from pip._vendor.pygments.filter import apply_filters, Filter
from pip._vendor.pygments.filters import get_filter_by_name
-from pip._vendor.pygments.token import Error, Text, Other, _TokenType
+from pip._vendor.pygments.token import Error, Text, Other, Whitespace, _TokenType
from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
make_analysator, Future, guess_decode
from pip._vendor.pygments.regexopt import regex_opt
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
- 'default', 'words']
+ 'default', 'words', 'line_re']
+line_re = re.compile('.*?\n')
_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
(b'\xff\xfe\0\0', 'utf-32'),
@@ -670,7 +671,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
# at EOL, reset state to "root"
statestack = ['root']
statetokens = tokendefs['root']
- yield pos, Text, '\n'
+ yield pos, Whitespace, '\n'
pos += 1
continue
yield pos, Error, text[pos]
diff --git a/src/pip/_vendor/pygments/lexers/__init__.py b/src/pip/_vendor/pygments/lexers/__init__.py
index ed69f24ed..e75a05791 100644
--- a/src/pip/_vendor/pygments/lexers/__init__.py
+++ b/src/pip/_vendor/pygments/lexers/__init__.py
@@ -8,7 +8,6 @@
:license: BSD, see LICENSE for details.
"""
-import re
import sys
import types
from fnmatch import fnmatch
diff --git a/src/pip/_vendor/pygments/lexers/_mapping.py b/src/pip/_vendor/pygments/lexers/_mapping.py
index 40dcaa3c7..1eaaf56e9 100644
--- a/src/pip/_vendor/pygments/lexers/_mapping.py
+++ b/src/pip/_vendor/pygments/lexers/_mapping.py
@@ -30,6 +30,7 @@ LEXERS = {
'AppleScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
'ArduinoLexer': ('pip._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
'ArrowLexer': ('pip._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
+ 'ArturoLexer': ('pip._vendor.pygments.lexers.arturo', 'Arturo', ('arturo', 'art'), ('*.art',), ()),
'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
'AspectJLexer': ('pip._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
'AsymptoteLexer': ('pip._vendor.pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
@@ -152,13 +153,14 @@ LEXERS = {
'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
'ExeclineLexer': ('pip._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
'EzhilLexer': ('pip._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
- 'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
+ 'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi', '*.fsx'), ('text/x-fsharp',)),
'FStarLexer': ('pip._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
'FactorLexer': ('pip._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
'FancyLexer': ('pip._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
'FantomLexer': ('pip._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
'FelixLexer': ('pip._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
'FennelLexer': ('pip._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
+ 'FiftLexer': ('pip._vendor.pygments.lexers.fift', 'Fift', ('fift', 'fif'), ('*.fif',), ()),
'FishShellLexer': ('pip._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
'FlatlineLexer': ('pip._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
'FloScriptLexer': ('pip._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
@@ -167,7 +169,9 @@ LEXERS = {
'FortranLexer': ('pip._vendor.pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pip._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
'FreeFemLexer': ('pip._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
+ 'FuncLexer': ('pip._vendor.pygments.lexers.func', 'FunC', ('func', 'fc'), ('*.fc', '*.func'), ()),
'FutharkLexer': ('pip._vendor.pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
+ 'GAPConsoleLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP session', ('gap-console', 'gap-repl'), ('*.tst',), ()),
'GAPLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
'GDScriptLexer': ('pip._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
'GLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
@@ -196,7 +200,7 @@ LEXERS = {
'HaxeLexer': ('pip._vendor.pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HexdumpLexer': ('pip._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
'HsailLexer': ('pip._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
- 'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
+ 'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), ('*Spec.hs',), ()),
'HtmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), ('*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2'), ('text/html+django', 'text/html+jinja')),
'HtmlGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
'HtmlLexer': ('pip._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
@@ -236,6 +240,7 @@ LEXERS = {
'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()),
'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
+ 'JsonnetLexer': ('pip._vendor.pygments.lexers.jsonnet', 'Jsonnet', ('jsonnet',), ('*.jsonnet', '*.libsonnet'), ()),
'JspLexer': ('pip._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'JuliaConsoleLexer': ('pip._vendor.pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
'JuliaLexer': ('pip._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
@@ -270,8 +275,10 @@ LEXERS = {
'LogosLexer': ('pip._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pip._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
'LuaLexer': ('pip._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MCFunctionLexer': ('pip._vendor.pygments.lexers.mcfunction', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)),
+ 'MCFunctionLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)),
+ 'MCSchemaLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)),
'MIMELexer': ('pip._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
+ 'MIPSLexer': ('pip._vendor.pygments.lexers.mips', 'MIPS', ('mips',), ('*.mips', '*.MIPS'), ()),
'MOOCodeLexer': ('pip._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
'MSDOSSessionLexer': ('pip._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
'Macaulay2Lexer': ('pip._vendor.pygments.lexers.macaulay2', 'Macaulay2', ('macaulay2',), ('*.m2',), ()),
@@ -316,7 +323,7 @@ LEXERS = {
'MyghtyXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NCLLexer': ('pip._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
'NSISLexer': ('pip._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
- 'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
+ 'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM', '*.nasm'), ('text/x-nasm',)),
'NasmObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
'NemerleLexer': ('pip._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
'NesCLexer': ('pip._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
@@ -350,6 +357,7 @@ LEXERS = {
'PegLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
'Perl6Lexer': ('pip._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
'PerlLexer': ('pip._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
+ 'PhixLexer': ('pip._vendor.pygments.lexers.phix', 'Phix', ('phix',), ('*.exw',), ('text/x-phix',)),
'PhpLexer': ('pip._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
'PigLexer': ('pip._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
'PikeLexer': ('pip._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
@@ -357,6 +365,7 @@ LEXERS = {
'PlPgsqlLexer': ('pip._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
'PointlessLexer': ('pip._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
'PonyLexer': ('pip._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
+ 'PortugolLexer': ('pip._vendor.pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()),
'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
@@ -376,7 +385,7 @@ LEXERS = {
'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
+ 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()),
'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
@@ -421,7 +430,7 @@ LEXERS = {
'SASLexer': ('pip._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
'SLexer': ('pip._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pip._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
- 'SNBTLexer': ('pip._vendor.pygments.lexers.mcfunction', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)),
+ 'SNBTLexer': ('pip._vendor.pygments.lexers.minecraft', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)),
'SarlLexer': ('pip._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
'SassLexer': ('pip._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
'SaviLexer': ('pip._vendor.pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()),
@@ -485,6 +494,7 @@ LEXERS = {
'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
'ThriftLexer': ('pip._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
'TiddlyWiki5Lexer': ('pip._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
+ 'TlbLexer': ('pip._vendor.pygments.lexers.tlb', 'Tl-b', ('tlb',), ('*.tlb',), ()),
'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
@@ -519,6 +529,8 @@ LEXERS = {
'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
+ 'WoWTocLexer': ('pip._vendor.pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()),
+ 'WrenLexer': ('pip._vendor.pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()),
'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
'XMLUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'XML+UL4', ('xml+ul4',), ('*.xmlul4',), ()),
'XQueryLexer': ('pip._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
diff --git a/src/pip/_vendor/pygments/lexers/python.py b/src/pip/_vendor/pygments/lexers/python.py
index c24e3c86e..3341a3826 100644
--- a/src/pip/_vendor/pygments/lexers/python.py
+++ b/src/pip/_vendor/pygments/lexers/python.py
@@ -12,18 +12,16 @@ import re
import keyword
from pip._vendor.pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- default, words, combined, do_insertions, this
+ default, words, combined, do_insertions, this, line_re
from pip._vendor.pygments.util import get_bool_opt, shebang_matches
from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other, Error
+ Number, Punctuation, Generic, Other, Error, Whitespace
from pip._vendor.pygments import unistring as uni
__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
'Python2Lexer', 'Python2TracebackLexer',
'CythonLexer', 'DgLexer', 'NumPyLexer']
-line_re = re.compile('.*?\n')
-
class PythonLexer(RegexLexer):
"""
@@ -42,6 +40,8 @@ class PythonLexer(RegexLexer):
filenames = [
'*.py',
'*.pyw',
+ # Type stubs
+ '*.pyi',
# Jython
'*.jy',
# Sage
@@ -100,11 +100,11 @@ class PythonLexer(RegexLexer):
tokens = {
'root': [
- (r'\n', Text),
+ (r'\n', Whitespace),
(r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Text, String.Affix, String.Doc)),
+ bygroups(Whitespace, String.Affix, String.Doc)),
(r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Text, String.Affix, String.Doc)),
+ bygroups(Whitespace, String.Affix, String.Doc)),
(r'\A#!.+$', Comment.Hashbang),
(r'#.*$', Comment.Single),
(r'\\\n', Text),
@@ -169,7 +169,7 @@ class PythonLexer(RegexLexer):
combined('bytesescape', 'dqs')),
("([bB])(')", bygroups(String.Affix, String.Single),
combined('bytesescape', 'sqs')),
-
+
(r'[^\S\n]+', Text),
include('numbers'),
(r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
@@ -192,13 +192,13 @@ class PythonLexer(RegexLexer):
(r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
r'(\![sraf])?' # conversion
r':', String.Interpol, '#pop'),
- (r'\s+', Text), # allow new lines
+ (r'\s+', Whitespace), # allow new lines
include('expr'),
],
'expr-inside-fstring-inner': [
(r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
(r'[])}]', Punctuation, '#pop'),
- (r'\s+', Text), # allow new lines
+ (r'\s+', Whitespace), # allow new lines
include('expr'),
],
'expr-keywords': [
@@ -229,7 +229,7 @@ class PythonLexer(RegexLexer):
],
'soft-keywords-inner': [
# optional `_` keyword
- (r'(\s+)([^\n_]*)(_\b)', bygroups(Text, using(this), Keyword)),
+ (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)),
default('#pop')
],
'builtins': [
@@ -445,11 +445,11 @@ class Python2Lexer(RegexLexer):
tokens = {
'root': [
- (r'\n', Text),
+ (r'\n', Whitespace),
(r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Text, String.Affix, String.Doc)),
+ bygroups(Whitespace, String.Affix, String.Doc)),
(r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Text, String.Affix, String.Doc)),
+ bygroups(Whitespace, String.Affix, String.Doc)),
(r'[^\S\n]+', Text),
(r'\A#!.+$', Comment.Hashbang),
(r'#.*$', Comment.Single),
@@ -742,7 +742,7 @@ class PythonTracebackLexer(RegexLexer):
tokens = {
'root': [
- (r'\n', Text),
+ (r'\n', Whitespace),
(r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
(r'^During handling of the above exception, another '
r'exception occurred:\n\n', Generic.Traceback),
@@ -753,24 +753,24 @@ class PythonTracebackLexer(RegexLexer):
],
'intb': [
(r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
+ bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(PythonLexer), Text), 'markers'),
+ bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'),
(r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
+ bygroups(Whitespace, Comment, Whitespace)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
(r'^([a-zA-Z_][\w.]*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
+ bygroups(Generic.Error, Whitespace), '#pop')
],
'markers': [
# Either `PEP 657 <https://www.python.org/dev/peps/pep-0657/>`
# error locations in Python 3.11+, or single-caret markers
# for syntax errors before that.
(r'^( {4,})([~^]+)(\n)',
- bygroups(Text, Punctuation.Marker, Text),
+ bygroups(Whitespace, Punctuation.Marker, Whitespace),
'#pop'),
default('#pop'),
],
@@ -808,17 +808,17 @@ class Python2TracebackLexer(RegexLexer):
],
'intb': [
(r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
+ bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(Python2Lexer), Text), 'marker'),
+ bygroups(Text, using(Python2Lexer), Whitespace), 'marker'),
(r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
+ bygroups(Text, Comment, Whitespace)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
(r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
+ bygroups(Generic.Error, Whitespace), '#pop')
],
'marker': [
# For syntax errors.
@@ -843,13 +843,13 @@ class CythonLexer(RegexLexer):
tokens = {
'root': [
- (r'\n', Text),
- (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'\n', Whitespace),
+ (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)),
+ (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)),
(r'[^\S\n]+', Text),
(r'#.*$', Comment),
(r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
+ (r'\\\n', Whitespace),
(r'\\', Text),
(r'(in|is|and|or|not)\b', Operator.Word),
(r'(<)([a-zA-Z0-9.?]+)(>)',
diff --git a/src/pip/_vendor/pygments/sphinxext.py b/src/pip/_vendor/pygments/sphinxext.py
index c41bd49dd..3537ecdb2 100644
--- a/src/pip/_vendor/pygments/sphinxext.py
+++ b/src/pip/_vendor/pygments/sphinxext.py
@@ -74,6 +74,8 @@ class PygmentsDoc(Directive):
out = self.document_formatters()
elif self.arguments[0] == 'filters':
out = self.document_filters()
+ elif self.arguments[0] == 'lexers_overview':
+ out = self.document_lexers_overview()
else:
raise Exception('invalid argument for "pygmentsdoc" directive')
node = nodes.compound()
@@ -83,6 +85,66 @@ class PygmentsDoc(Directive):
self.state.document.settings.record_dependencies.add(fn)
return node.children
+ def document_lexers_overview(self):
+ """Generate a tabular overview of all lexers.
+
+ The columns are the lexer name, the extensions handled by this lexer
+ (or "None"), the aliases and a link to the lexer class."""
+ from pip._vendor.pygments.lexers._mapping import LEXERS
+ from pip._vendor.pygments.lexers import find_lexer_class
+ out = []
+
+ table = []
+
+ def format_link(name, url):
+ if url:
+ return f'`{name} <{url}>`_'
+ return name
+
+ for classname, data in sorted(LEXERS.items(), key=lambda x: x[1][1].lower()):
+ lexer_cls = find_lexer_class(data[1])
+ extensions = lexer_cls.filenames + lexer_cls.alias_filenames
+
+ table.append({
+ 'name': format_link(data[1], lexer_cls.url),
+ 'extensions': ', '.join(extensions).replace('*', '\\*').replace('_', '\\') or 'None',
+ 'aliases': ', '.join(data[2]),
+ 'class': f'{data[0]}.{classname}'
+ })
+
+ column_names = ['name', 'extensions', 'aliases', 'class']
+ column_lengths = [max([len(row[column]) for row in table if row[column]])
+ for column in column_names]
+
+ def write_row(*columns):
+ """Format a table row"""
+ out = []
+ for l, c in zip(column_lengths, columns):
+ if c:
+ out.append(c.ljust(l))
+ else:
+ out.append(' '*l)
+
+ return ' '.join(out)
+
+ def write_seperator():
+ """Write a table separator row"""
+ sep = ['='*c for c in column_lengths]
+ return write_row(*sep)
+
+ out.append(write_seperator())
+ out.append(write_row('Name', 'Extension(s)', 'Short name(s)', 'Lexer class'))
+ out.append(write_seperator())
+ for row in table:
+ out.append(write_row(
+ row['name'],
+ row['extensions'],
+ row['aliases'],
+ f':class:`~{row["class"]}`'))
+ out.append(write_seperator())
+
+ return '\n'.join(out)
+
def document_lexers(self):
from pip._vendor.pygments.lexers._mapping import LEXERS
out = []
diff --git a/src/pip/_vendor/rich/_export_format.py b/src/pip/_vendor/rich/_export_format.py
index b79c13069..094d2dc22 100644
--- a/src/pip/_vendor/rich/_export_format.py
+++ b/src/pip/_vendor/rich/_export_format.py
@@ -12,9 +12,7 @@ body {{
</head>
<html>
<body>
- <code>
- <pre style="font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace">{code}</pre>
- </code>
+ <pre style="font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace"><code>{code}</code></pre>
</body>
</html>
"""
diff --git a/src/pip/_vendor/rich/_fileno.py b/src/pip/_vendor/rich/_fileno.py
new file mode 100644
index 000000000..b17ee6511
--- /dev/null
+++ b/src/pip/_vendor/rich/_fileno.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from typing import IO, Callable
+
+
+def get_fileno(file_like: IO[str]) -> int | None:
+ """Get fileno() from a file, accounting for poorly implemented file-like objects.
+
+ Args:
+ file_like (IO): A file-like object.
+
+ Returns:
+ int | None: The result of fileno if available, or None if operation failed.
+ """
+ fileno: Callable[[], int] | None = getattr(file_like, "fileno", None)
+ if fileno is not None:
+ try:
+ return fileno()
+ except Exception:
+ # `fileno` is documented as potentially raising a OSError
+ # Alas, from the issues, there are so many poorly implemented file-like objects,
+ # that `fileno()` can raise just about anything.
+ return None
+ return None
diff --git a/src/pip/_vendor/rich/_null_file.py b/src/pip/_vendor/rich/_null_file.py
index 49038bfcb..b659673ef 100644
--- a/src/pip/_vendor/rich/_null_file.py
+++ b/src/pip/_vendor/rich/_null_file.py
@@ -3,20 +3,6 @@ from typing import IO, Iterable, Iterator, List, Optional, Type
class NullFile(IO[str]):
-
- # TODO: "mode", "name" and "closed" are only required for Python 3.6.
-
- @property
- def mode(self) -> str:
- return ""
-
- @property
- def name(self) -> str:
- return "NullFile"
-
- def closed(self) -> bool:
- return False
-
def close(self) -> None:
pass
diff --git a/src/pip/_vendor/rich/align.py b/src/pip/_vendor/rich/align.py
index d5abb5947..c310b66e7 100644
--- a/src/pip/_vendor/rich/align.py
+++ b/src/pip/_vendor/rich/align.py
@@ -303,7 +303,7 @@ if __name__ == "__main__": # pragma: no cover
),
width=60,
style="on dark_blue",
- title="Algin",
+ title="Align",
)
console.print(
diff --git a/src/pip/_vendor/rich/ansi.py b/src/pip/_vendor/rich/ansi.py
index 92ef51941..66365e653 100644
--- a/src/pip/_vendor/rich/ansi.py
+++ b/src/pip/_vendor/rich/ansi.py
@@ -43,6 +43,9 @@ def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
if start > position:
yield _AnsiToken(ansi_text[position:start])
if sgr:
+ if sgr == "(":
+ position = end + 1
+ continue
if sgr.endswith("m"):
yield _AnsiToken("", sgr[1:-1], osc)
else:
diff --git a/src/pip/_vendor/rich/cells.py b/src/pip/_vendor/rich/cells.py
index 139b949f7..9354f9e31 100644
--- a/src/pip/_vendor/rich/cells.py
+++ b/src/pip/_vendor/rich/cells.py
@@ -60,7 +60,7 @@ def _get_codepoint_cell_size(codepoint: int) -> int:
"""Get the cell size of a character.
Args:
- character (str): A single character.
+ codepoint (int): Codepoint of a character.
Returns:
int: Number of cells (0, 1 or 2) occupied by that character.
diff --git a/src/pip/_vendor/rich/color.py b/src/pip/_vendor/rich/color.py
index ef2e895d7..dfe455937 100644
--- a/src/pip/_vendor/rich/color.py
+++ b/src/pip/_vendor/rich/color.py
@@ -513,15 +513,14 @@ class Color(NamedTuple):
def downgrade(self, system: ColorSystem) -> "Color":
"""Downgrade a color system to a system with fewer colors."""
- if self.type in [ColorType.DEFAULT, system]:
+ if self.type in (ColorType.DEFAULT, system):
return self
# Convert to 8-bit color from truecolor color
if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR:
assert self.triplet is not None
- red, green, blue = self.triplet.normalized
- _h, l, s = rgb_to_hls(red, green, blue)
- # If saturation is under 10% assume it is grayscale
- if s < 0.1:
+ _h, l, s = rgb_to_hls(*self.triplet.normalized)
+ # If saturation is under 15% assume it is grayscale
+ if s < 0.15:
gray = round(l * 25.0)
if gray == 0:
color_number = 16
@@ -531,8 +530,13 @@ class Color(NamedTuple):
color_number = 231 + gray
return Color(self.name, ColorType.EIGHT_BIT, number=color_number)
+ red, green, blue = self.triplet
+ six_red = red / 95 if red < 95 else 1 + (red - 95) / 40
+ six_green = green / 95 if green < 95 else 1 + (green - 95) / 40
+ six_blue = blue / 95 if blue < 95 else 1 + (blue - 95) / 40
+
color_number = (
- 16 + 36 * round(red * 5.0) + 6 * round(green * 5.0) + round(blue * 5.0)
+ 16 + 36 * round(six_red) + 6 * round(six_green) + round(six_blue)
)
return Color(self.name, ColorType.EIGHT_BIT, number=color_number)
diff --git a/src/pip/_vendor/rich/console.py b/src/pip/_vendor/rich/console.py
index f805f2dea..7c363dfdc 100644
--- a/src/pip/_vendor/rich/console.py
+++ b/src/pip/_vendor/rich/console.py
@@ -1,5 +1,4 @@
import inspect
-import io
import os
import platform
import sys
@@ -48,6 +47,7 @@ else:
from . import errors, themes
from ._emoji_replace import _emoji_replace
from ._export_format import CONSOLE_HTML_FORMAT, CONSOLE_SVG_FORMAT
+from ._fileno import get_fileno
from ._log_render import FormatTimeCallable, LogRender
from .align import Align, AlignMethod
from .color import ColorSystem, blend_rgb
@@ -711,11 +711,6 @@ class Console:
self._force_terminal = None
if force_terminal is not None:
self._force_terminal = force_terminal
- else:
- # If FORCE_COLOR env var has any value at all, we force terminal.
- force_color = self._environ.get("FORCE_COLOR")
- if force_color is not None:
- self._force_terminal = True
self._file = file
self.quiet = quiet
@@ -758,7 +753,7 @@ class Console:
self._is_alt_screen = False
def __repr__(self) -> str:
- return f"<console width={self.width} {str(self._color_system)}>"
+ return f"<console width={self.width} {self._color_system!s}>"
@property
def file(self) -> IO[str]:
@@ -949,6 +944,15 @@ class Console:
# Return False for Idle which claims to be a tty but can't handle ansi codes
return False
+ if self.is_jupyter:
+ # return False for Jupyter, which may have FORCE_COLOR set
+ return False
+
+ # If FORCE_COLOR env var has any value at all, we assume a terminal.
+ force_color = self._environ.get("FORCE_COLOR")
+ if force_color is not None:
+ self._force_terminal = True
+
isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None)
try:
return False if isatty is None else isatty()
@@ -1146,7 +1150,7 @@ class Console:
status: RenderableType,
*,
spinner: str = "dots",
- spinner_style: str = "status.spinner",
+ spinner_style: StyleType = "status.spinner",
speed: float = 1.0,
refresh_per_second: float = 12.5,
) -> "Status":
@@ -1523,7 +1527,7 @@ class Console:
if text:
sep_text = Text(sep, justify=justify, end=end)
append(sep_text.join(text))
- del text[:]
+ text.clear()
for renderable in objects:
renderable = rich_cast(renderable)
@@ -2006,12 +2010,11 @@ class Console:
if WINDOWS:
use_legacy_windows_render = False
if self.legacy_windows:
- try:
+ fileno = get_fileno(self.file)
+ if fileno is not None:
use_legacy_windows_render = (
- self.file.fileno() in _STD_STREAMS_OUTPUT
+ fileno in _STD_STREAMS_OUTPUT
)
- except (ValueError, io.UnsupportedOperation):
- pass
if use_legacy_windows_render:
from pip._vendor.rich._win32_console import LegacyWindowsTerm
@@ -2026,13 +2029,31 @@ class Console:
# Either a non-std stream on legacy Windows, or modern Windows.
text = self._render_buffer(self._buffer[:])
# https://bugs.python.org/issue37871
+ # https://github.com/python/cpython/issues/82052
+ # We need to avoid writing more than 32Kb in a single write, due to the above bug
write = self.file.write
- for line in text.splitlines(True):
- try:
- write(line)
- except UnicodeEncodeError as error:
- error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***"
- raise
+ # Worse case scenario, every character is 4 bytes of utf-8
+ MAX_WRITE = 32 * 1024 // 4
+ try:
+ if len(text) <= MAX_WRITE:
+ write(text)
+ else:
+ batch: List[str] = []
+ batch_append = batch.append
+ size = 0
+ for line in text.splitlines(True):
+ if size + len(line) > MAX_WRITE and batch:
+ write("".join(batch))
+ batch.clear()
+ size = 0
+ batch_append(line)
+ size += len(line)
+ if batch:
+ write("".join(batch))
+ batch.clear()
+ except UnicodeEncodeError as error:
+ error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***"
+ raise
else:
text = self._render_buffer(self._buffer[:])
try:
diff --git a/src/pip/_vendor/rich/default_styles.py b/src/pip/_vendor/rich/default_styles.py
index 46e9ea52c..dca37193a 100644
--- a/src/pip/_vendor/rich/default_styles.py
+++ b/src/pip/_vendor/rich/default_styles.py
@@ -138,10 +138,11 @@ DEFAULT_STYLES: Dict[str, Style] = {
"tree.line": Style(),
"markdown.paragraph": Style(),
"markdown.text": Style(),
- "markdown.emph": Style(italic=True),
+ "markdown.em": Style(italic=True),
+ "markdown.emph": Style(italic=True), # For commonmark backwards compatibility
"markdown.strong": Style(bold=True),
- "markdown.code": Style(bgcolor="black", color="bright_white"),
- "markdown.code_block": Style(dim=True, color="cyan", bgcolor="black"),
+ "markdown.code": Style(bold=True, color="cyan", bgcolor="black"),
+ "markdown.code_block": Style(color="cyan", bgcolor="black"),
"markdown.block_quote": Style(color="magenta"),
"markdown.list": Style(color="cyan"),
"markdown.item": Style(),
@@ -157,7 +158,8 @@ DEFAULT_STYLES: Dict[str, Style] = {
"markdown.h6": Style(italic=True),
"markdown.h7": Style(italic=True, dim=True),
"markdown.link": Style(color="bright_blue"),
- "markdown.link_url": Style(color="blue"),
+ "markdown.link_url": Style(color="blue", underline=True),
+ "markdown.s": Style(strike=True),
"iso8601.date": Style(color="blue"),
"iso8601.time": Style(color="magenta"),
"iso8601.timezone": Style(color="yellow"),
diff --git a/src/pip/_vendor/rich/file_proxy.py b/src/pip/_vendor/rich/file_proxy.py
index cc69f22f3..4b0b0da6c 100644
--- a/src/pip/_vendor/rich/file_proxy.py
+++ b/src/pip/_vendor/rich/file_proxy.py
@@ -34,7 +34,7 @@ class FileProxy(io.TextIOBase):
line, new_line, text = text.partition("\n")
if new_line:
lines.append("".join(buffer) + line)
- del buffer[:]
+ buffer.clear()
else:
buffer.append(line)
break
@@ -52,3 +52,6 @@ class FileProxy(io.TextIOBase):
if output:
self.__console.print(output)
del self.__buffer[:]
+
+ def fileno(self) -> int:
+ return self.__file.fileno()
diff --git a/src/pip/_vendor/rich/highlighter.py b/src/pip/_vendor/rich/highlighter.py
index 82293dffc..c2646794a 100644
--- a/src/pip/_vendor/rich/highlighter.py
+++ b/src/pip/_vendor/rich/highlighter.py
@@ -82,7 +82,7 @@ class ReprHighlighter(RegexHighlighter):
base_style = "repr."
highlights = [
- r"(?P<tag_start><)(?P<tag_name>[-\w.:|]*)(?P<tag_contents>[\w\W]*?)(?P<tag_end>>)",
+ r"(?P<tag_start><)(?P<tag_name>[-\w.:|]*)(?P<tag_contents>[\w\W]*)(?P<tag_end>>)",
r'(?P<attrib_name>[\w_]{1,50})=(?P<attrib_value>"?[\w_]+"?)?',
r"(?P<brace>[][{}()])",
_combine_regex(
diff --git a/src/pip/_vendor/rich/json.py b/src/pip/_vendor/rich/json.py
index 21b642ab8..ea94493f2 100644
--- a/src/pip/_vendor/rich/json.py
+++ b/src/pip/_vendor/rich/json.py
@@ -1,3 +1,4 @@
+from pathlib import Path
from json import loads, dumps
from typing import Any, Callable, Optional, Union
@@ -131,8 +132,7 @@ if __name__ == "__main__":
if args.path == "-":
json_data = sys.stdin.read()
else:
- with open(args.path, "rt") as json_file:
- json_data = json_file.read()
+ json_data = Path(args.path).read_text()
except Exception as error:
error_console.print(f"Unable to read {args.path!r}; {error}")
sys.exit(-1)
diff --git a/src/pip/_vendor/rich/live.py b/src/pip/_vendor/rich/live.py
index e635fe5c9..3ebbbc4cc 100644
--- a/src/pip/_vendor/rich/live.py
+++ b/src/pip/_vendor/rich/live.py
@@ -210,6 +210,8 @@ class Live(JupyterMixin, RenderHook):
renderable (RenderableType): New renderable to use.
refresh (bool, optional): Refresh the display. Defaults to False.
"""
+ if isinstance(renderable, str):
+ renderable = self.console.render_str(renderable)
with self._lock:
self._renderable = renderable
if refresh:
diff --git a/src/pip/_vendor/rich/pretty.py b/src/pip/_vendor/rich/pretty.py
index 847b558c9..2bd9eb007 100644
--- a/src/pip/_vendor/rich/pretty.py
+++ b/src/pip/_vendor/rich/pretty.py
@@ -30,7 +30,7 @@ from pip._vendor.rich.repr import RichReprResult
try:
import attr as _attr_module
- _has_attrs = True
+ _has_attrs = hasattr(_attr_module, "ib")
except ImportError: # pragma: no cover
_has_attrs = False
@@ -55,13 +55,6 @@ if TYPE_CHECKING:
)
-JUPYTER_CLASSES_TO_NOT_RENDER = {
- # Matplotlib "Artists" manage their own rendering in a Jupyter notebook, and we should not try to render them too.
- # "Typically, all [Matplotlib] visible elements in a figure are subclasses of Artist."
- "matplotlib.artist.Artist",
-}
-
-
def _is_attr_object(obj: Any) -> bool:
"""Check if an object was created with attrs module."""
return _has_attrs and _attr_module.has(type(obj))
@@ -122,69 +115,40 @@ def _ipy_display_hook(
max_string: Optional[int] = None,
max_depth: Optional[int] = None,
expand_all: bool = False,
-) -> None:
+) -> Union[str, None]:
# needed here to prevent circular import:
- from ._inspect import is_object_one_of_types
from .console import ConsoleRenderable
# always skip rich generated jupyter renderables or None values
if _safe_isinstance(value, JupyterRenderable) or value is None:
- return
+ return None
console = console or get_console()
- if console.is_jupyter:
- # Delegate rendering to IPython if the object (and IPython) supports it
- # https://ipython.readthedocs.io/en/stable/config/integrating.html#rich-display
- ipython_repr_methods = [
- "_repr_html_",
- "_repr_markdown_",
- "_repr_json_",
- "_repr_latex_",
- "_repr_jpeg_",
- "_repr_png_",
- "_repr_svg_",
- "_repr_mimebundle_",
- ]
- for repr_method in ipython_repr_methods:
- method = getattr(value, repr_method, None)
- if inspect.ismethod(method):
- # Calling the method ourselves isn't ideal. The interface for the `_repr_*_` methods
- # specifies that if they return None, then they should not be rendered
- # by the notebook.
- try:
- repr_result = method()
- except Exception:
- continue # If the method raises, treat it as if it doesn't exist, try any others
- if repr_result is not None:
- return # Delegate rendering to IPython
-
- # When in a Jupyter notebook let's avoid the display of some specific classes,
- # as they result in the rendering of useless and noisy lines such as `<Figure size 432x288 with 1 Axes>`.
- # What does this do?
- # --> if the class has "matplotlib.artist.Artist" in its hierarchy for example, we don't render it.
- if is_object_one_of_types(value, JUPYTER_CLASSES_TO_NOT_RENDER):
- return
-
- # certain renderables should start on a new line
- if _safe_isinstance(value, ConsoleRenderable):
- console.line()
-
- console.print(
- value
- if _safe_isinstance(value, RichRenderable)
- else Pretty(
- value,
- overflow=overflow,
- indent_guides=indent_guides,
- max_length=max_length,
- max_string=max_string,
- max_depth=max_depth,
- expand_all=expand_all,
- margin=12,
- ),
- crop=crop,
- new_line_start=True,
- )
+
+ with console.capture() as capture:
+ # certain renderables should start on a new line
+ if _safe_isinstance(value, ConsoleRenderable):
+ console.line()
+ console.print(
+ value
+ if _safe_isinstance(value, RichRenderable)
+ else Pretty(
+ value,
+ overflow=overflow,
+ indent_guides=indent_guides,
+ max_length=max_length,
+ max_string=max_string,
+ max_depth=max_depth,
+ expand_all=expand_all,
+ margin=12,
+ ),
+ crop=crop,
+ new_line_start=True,
+ end="",
+ )
+ # strip trailing newline, not usually part of a text repr
+ # I'm not sure if this should be prevented at a lower level
+ return capture.get().rstrip("\n")
def _safe_isinstance(
@@ -247,7 +211,7 @@ def install(
)
builtins._ = value # type: ignore[attr-defined]
- try: # pragma: no cover
+ if "get_ipython" in globals():
ip = get_ipython() # type: ignore[name-defined]
from IPython.core.formatters import BaseFormatter
@@ -272,7 +236,7 @@ def install(
# replace plain text formatter with rich formatter
rich_formatter = RichFormatter()
ip.display_formatter.formatters["text/plain"] = rich_formatter
- except Exception:
+ else:
sys.displayhook = display_hook
@@ -371,6 +335,7 @@ class Pretty(JupyterMixin):
indent_size=self.indent_size,
max_length=self.max_length,
max_string=self.max_string,
+ max_depth=self.max_depth,
expand_all=self.expand_all,
)
text_width = (
@@ -433,7 +398,7 @@ class Node:
is_tuple: bool = False
is_namedtuple: bool = False
children: Optional[List["Node"]] = None
- key_separator = ": "
+ key_separator: str = ": "
separator: str = ", "
def iter_tokens(self) -> Iterable[str]:
@@ -642,7 +607,6 @@ def traverse(
return Node(value_repr="...")
obj_type = type(obj)
- py_version = (sys.version_info.major, sys.version_info.minor)
children: List[Node]
reached_max_depth = max_depth is not None and depth >= max_depth
@@ -780,7 +744,7 @@ def traverse(
is_dataclass(obj)
and not _safe_isinstance(obj, type)
and not fake_attributes
- and (_is_dataclass_repr(obj) or py_version == (3, 6))
+ and _is_dataclass_repr(obj)
):
push_visited(obj_id)
children = []
@@ -793,6 +757,7 @@ def traverse(
close_brace=")",
children=children,
last=root,
+ empty=f"{obj.__class__.__name__}()",
)
for last, field in loop_last(
diff --git a/src/pip/_vendor/rich/progress.py b/src/pip/_vendor/rich/progress.py
index e7d163c13..8b0a315f3 100644
--- a/src/pip/_vendor/rich/progress.py
+++ b/src/pip/_vendor/rich/progress.py
@@ -4,12 +4,12 @@ import typing
import warnings
from abc import ABC, abstractmethod
from collections import deque
-from collections.abc import Sized
from dataclasses import dataclass, field
from datetime import timedelta
from io import RawIOBase, UnsupportedOperation
from math import ceil
from mmap import mmap
+from operator import length_hint
from os import PathLike, stat
from threading import Event, RLock, Thread
from types import TracebackType
@@ -151,7 +151,7 @@ def track(
pulse_style=pulse_style,
),
TaskProgressColumn(show_speed=show_speed),
- TimeRemainingColumn(),
+ TimeRemainingColumn(elapsed_when_finished=True),
)
)
progress = Progress(
@@ -677,7 +677,7 @@ class TimeElapsedColumn(ProgressColumn):
"""Renders time elapsed."""
def render(self, task: "Task") -> Text:
- """Show time remaining."""
+ """Show time elapsed."""
elapsed = task.finished_time if task.finished else task.elapsed
if elapsed is None:
return Text("-:--:--", style="progress.elapsed")
@@ -1197,18 +1197,13 @@ class Progress(JupyterMixin):
Returns:
Iterable[ProgressType]: An iterable of values taken from the provided sequence.
"""
-
- task_total: Optional[float] = None
if total is None:
- if isinstance(sequence, Sized):
- task_total = float(len(sequence))
- else:
- task_total = total
+ total = float(length_hint(sequence)) or None
if task_id is None:
- task_id = self.add_task(description, total=task_total)
+ task_id = self.add_task(description, total=total)
else:
- self.update(task_id, total=task_total)
+ self.update(task_id, total=total)
if self.live.auto_refresh:
with _TrackThread(self, task_id, update_period) as track_thread:
@@ -1342,7 +1337,7 @@ class Progress(JupyterMixin):
RuntimeWarning,
)
buffering = -1
- elif _mode == "rt" or _mode == "r":
+ elif _mode in ("rt", "r"):
if buffering == 0:
raise ValueError("can't have unbuffered text I/O")
elif buffering == 1:
@@ -1363,7 +1358,7 @@ class Progress(JupyterMixin):
reader = _Reader(handle, self, task_id, close_handle=True)
# wrap the reader in a `TextIOWrapper` if text mode
- if mode == "r" or mode == "rt":
+ if mode in ("r", "rt"):
return io.TextIOWrapper(
reader,
encoding=encoding,
diff --git a/src/pip/_vendor/rich/repr.py b/src/pip/_vendor/rich/repr.py
index 72d1a7e30..f284bcafa 100644
--- a/src/pip/_vendor/rich/repr.py
+++ b/src/pip/_vendor/rich/repr.py
@@ -55,7 +55,7 @@ def auto(
if key is None:
append(repr(value))
else:
- if len(default) and default[0] == value:
+ if default and default[0] == value:
continue
append(f"{key}={value!r}")
else:
diff --git a/src/pip/_vendor/rich/rule.py b/src/pip/_vendor/rich/rule.py
index 0b78f7a4e..fd00ce6e4 100644
--- a/src/pip/_vendor/rich/rule.py
+++ b/src/pip/_vendor/rich/rule.py
@@ -51,13 +51,9 @@ class Rule(JupyterMixin):
) -> RenderResult:
width = options.max_width
- # Python3.6 doesn't have an isascii method on str
- isascii = getattr(str, "isascii", None) or (
- lambda s: all(ord(c) < 128 for c in s)
- )
characters = (
"-"
- if (options.ascii_only and not isascii(self.characters))
+ if (options.ascii_only and not self.characters.isascii())
else self.characters
)
diff --git a/src/pip/_vendor/rich/segment.py b/src/pip/_vendor/rich/segment.py
index 1ea5435ad..e12579846 100644
--- a/src/pip/_vendor/rich/segment.py
+++ b/src/pip/_vendor/rich/segment.py
@@ -119,7 +119,7 @@ class Segment(NamedTuple):
cell_size = get_character_cell_size
- pos = int((cut / cell_length) * len(text))
+ pos = int((cut / cell_length) * (len(text) - 1))
before = text[:pos]
cell_pos = cell_len(before)
@@ -303,7 +303,7 @@ class Segment(NamedTuple):
if include_new_lines:
cropped_line.append(new_line_segment)
yield cropped_line
- del line[:]
+ line.clear()
else:
append(segment)
if line:
@@ -365,7 +365,7 @@ class Segment(NamedTuple):
int: The length of the line.
"""
_cell_len = cell_len
- return sum(_cell_len(segment.text) for segment in line)
+ return sum(_cell_len(text) for text, style, control in line if not control)
@classmethod
def get_shape(cls, lines: List[List["Segment"]]) -> Tuple[int, int]:
@@ -727,7 +727,7 @@ console.print(text)"""
console.print(Syntax(code, "python", line_numbers=True))
console.print()
console.print(
- "When you call [b]print()[/b], Rich [i]renders[/i] the object in to the the following:\n"
+ "When you call [b]print()[/b], Rich [i]renders[/i] the object in to the following:\n"
)
fragments = list(console.render(text))
console.print(fragments)
diff --git a/src/pip/_vendor/rich/spinner.py b/src/pip/_vendor/rich/spinner.py
index 0879088e1..91ea630e1 100644
--- a/src/pip/_vendor/rich/spinner.py
+++ b/src/pip/_vendor/rich/spinner.py
@@ -11,6 +11,18 @@ if TYPE_CHECKING:
class Spinner:
+ """A spinner animation.
+
+ Args:
+ name (str): Name of spinner (run python -m rich.spinner).
+ text (RenderableType, optional): A renderable to display at the right of the spinner (str or Text typically). Defaults to "".
+ style (StyleType, optional): Style for spinner animation. Defaults to None.
+ speed (float, optional): Speed factor for animation. Defaults to 1.0.
+
+ Raises:
+ KeyError: If name isn't one of the supported spinner animations.
+ """
+
def __init__(
self,
name: str,
@@ -19,17 +31,6 @@ class Spinner:
style: Optional["StyleType"] = None,
speed: float = 1.0,
) -> None:
- """A spinner animation.
-
- Args:
- name (str): Name of spinner (run python -m rich.spinner).
- text (RenderableType, optional): A renderable to display at the right of the spinner (str or Text typically). Defaults to "".
- style (StyleType, optional): Style for spinner animation. Defaults to None.
- speed (float, optional): Speed factor for animation. Defaults to 1.0.
-
- Raises:
- KeyError: If name isn't one of the supported spinner animations.
- """
try:
spinner = SPINNERS[name]
except KeyError:
diff --git a/src/pip/_vendor/rich/style.py b/src/pip/_vendor/rich/style.py
index ad388aadb..313c88949 100644
--- a/src/pip/_vendor/rich/style.py
+++ b/src/pip/_vendor/rich/style.py
@@ -645,6 +645,29 @@ class Style:
style._meta = self._meta
return style
+ @lru_cache(maxsize=128)
+ def clear_meta_and_links(self) -> "Style":
+ """Get a copy of this style with link and meta information removed.
+
+ Returns:
+ Style: New style object.
+ """
+ if self._null:
+ return NULL_STYLE
+ style: Style = self.__new__(Style)
+ style._ansi = self._ansi
+ style._style_definition = self._style_definition
+ style._color = self._color
+ style._bgcolor = self._bgcolor
+ style._attributes = self._attributes
+ style._set_attributes = self._set_attributes
+ style._link = None
+ style._link_id = ""
+ style._hash = self._hash
+ style._null = False
+ style._meta = None
+ return style
+
def update_link(self, link: Optional[str] = None) -> "Style":
"""Get a copy with a different value for link.
diff --git a/src/pip/_vendor/rich/syntax.py b/src/pip/_vendor/rich/syntax.py
index 01bdd0439..25b226a3a 100644
--- a/src/pip/_vendor/rich/syntax.py
+++ b/src/pip/_vendor/rich/syntax.py
@@ -4,6 +4,7 @@ import re
import sys
import textwrap
from abc import ABC, abstractmethod
+from pathlib import Path
from typing import (
Any,
Dict,
@@ -338,8 +339,7 @@ class Syntax(JupyterMixin):
Returns:
[Syntax]: A Syntax object that may be printed to the console
"""
- with open(path, "rt", encoding=encoding) as code_file:
- code = code_file.read()
+ code = Path(path).read_text(encoding=encoding)
if not lexer:
lexer = cls.guess_lexer(path, code=code)
@@ -494,7 +494,10 @@ class Syntax(JupyterMixin):
# Skip over tokens until line start
while line_no < _line_start:
- _token_type, token = next(tokens)
+ try:
+ _token_type, token = next(tokens)
+ except StopIteration:
+ break
yield (token, None)
if token.endswith("\n"):
line_no += 1
@@ -671,6 +674,8 @@ class Syntax(JupyterMixin):
line_offset = max(0, start_line - 1)
lines: Union[List[Text], Lines] = text.split("\n", allow_blank=ends_on_nl)
if self.line_range:
+ if line_offset > len(lines):
+ return
lines = lines[line_offset:end_line]
if self.indent_guides and not options.ascii_only:
diff --git a/src/pip/_vendor/rich/text.py b/src/pip/_vendor/rich/text.py
index b14055aa7..998cb87da 100644
--- a/src/pip/_vendor/rich/text.py
+++ b/src/pip/_vendor/rich/text.py
@@ -53,11 +53,7 @@ class Span(NamedTuple):
"""Style associated with the span."""
def __repr__(self) -> str:
- return (
- f"Span({self.start}, {self.end}, {self.style!r})"
- if (isinstance(self.style, Style) and self.style._meta)
- else f"Span({self.start}, {self.end}, {repr(self.style)})"
- )
+ return f"Span({self.start}, {self.end}, {self.style!r})"
def __bool__(self) -> bool:
return self.end > self.start
@@ -1204,7 +1200,7 @@ class Text(JupyterMixin):
width (int): Maximum characters in a line.
Returns:
- Lines: List of lines.
+ Lines: Lines container.
"""
lines: Lines = Lines()
append = lines.append
diff --git a/src/pip/_vendor/rich/theme.py b/src/pip/_vendor/rich/theme.py
index bfb3c7f82..471dfb2f9 100644
--- a/src/pip/_vendor/rich/theme.py
+++ b/src/pip/_vendor/rich/theme.py
@@ -56,17 +56,20 @@ class Theme:
return theme
@classmethod
- def read(cls, path: str, inherit: bool = True) -> "Theme":
+ def read(
+ cls, path: str, inherit: bool = True, encoding: Optional[str] = None
+ ) -> "Theme":
"""Read a theme from a path.
Args:
path (str): Path to a config file readable by Python configparser module.
inherit (bool, optional): Inherit default styles. Defaults to True.
+ encoding (str, optional): Encoding of the config file. Defaults to None.
Returns:
Theme: A new theme instance.
"""
- with open(path, "rt") as config_file:
+ with open(path, "rt", encoding=encoding) as config_file:
return cls.from_file(config_file, source=path, inherit=inherit)
diff --git a/src/pip/_vendor/rich/traceback.py b/src/pip/_vendor/rich/traceback.py
index 1f481298f..c4ffe1f99 100644
--- a/src/pip/_vendor/rich/traceback.py
+++ b/src/pip/_vendor/rich/traceback.py
@@ -1,12 +1,24 @@
from __future__ import absolute_import
+import linecache
import os
import platform
import sys
from dataclasses import dataclass, field
from traceback import walk_tb
from types import ModuleType, TracebackType
-from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Type, Union
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+)
from pip._vendor.pygments.lexers import guess_lexer_for_filename
from pip._vendor.pygments.token import Comment, Keyword, Name, Number, Operator, String
@@ -41,6 +53,10 @@ def install(
theme: Optional[str] = None,
word_wrap: bool = False,
show_locals: bool = False,
+ locals_max_length: int = LOCALS_MAX_LENGTH,
+ locals_max_string: int = LOCALS_MAX_STRING,
+ locals_hide_dunder: bool = True,
+ locals_hide_sunder: Optional[bool] = None,
indent_guides: bool = True,
suppress: Iterable[Union[str, ModuleType]] = (),
max_frames: int = 100,
@@ -58,6 +74,11 @@ def install(
a theme appropriate for the platform.
word_wrap (bool, optional): Enable word wrapping of long lines. Defaults to False.
show_locals (bool, optional): Enable display of local variables. Defaults to False.
+ locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
+ Defaults to 10.
+ locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80.
+ locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True.
+ locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False.
indent_guides (bool, optional): Enable indent guides in code and locals. Defaults to True.
suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback.
@@ -65,7 +86,13 @@ def install(
Callable: The previous exception handler that was replaced.
"""
- traceback_console = Console(file=sys.stderr) if console is None else console
+ traceback_console = Console(stderr=True) if console is None else console
+
+ locals_hide_sunder = (
+ True
+ if (traceback_console.is_jupyter and locals_hide_sunder is None)
+ else locals_hide_sunder
+ )
def excepthook(
type_: Type[BaseException],
@@ -82,6 +109,10 @@ def install(
theme=theme,
word_wrap=word_wrap,
show_locals=show_locals,
+ locals_max_length=locals_max_length,
+ locals_max_string=locals_max_string,
+ locals_hide_dunder=locals_hide_dunder,
+ locals_hide_sunder=bool(locals_hide_sunder),
indent_guides=indent_guides,
suppress=suppress,
max_frames=max_frames,
@@ -192,6 +223,8 @@ class Traceback:
locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to 10.
locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80.
+ locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True.
+ locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False.
suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback.
max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100.
@@ -208,14 +241,17 @@ class Traceback:
def __init__(
self,
trace: Optional[Trace] = None,
+ *,
width: Optional[int] = 100,
extra_lines: int = 3,
theme: Optional[str] = None,
word_wrap: bool = False,
show_locals: bool = False,
- indent_guides: bool = True,
locals_max_length: int = LOCALS_MAX_LENGTH,
locals_max_string: int = LOCALS_MAX_STRING,
+ locals_hide_dunder: bool = True,
+ locals_hide_sunder: bool = False,
+ indent_guides: bool = True,
suppress: Iterable[Union[str, ModuleType]] = (),
max_frames: int = 100,
):
@@ -237,6 +273,8 @@ class Traceback:
self.indent_guides = indent_guides
self.locals_max_length = locals_max_length
self.locals_max_string = locals_max_string
+ self.locals_hide_dunder = locals_hide_dunder
+ self.locals_hide_sunder = locals_hide_sunder
self.suppress: Sequence[str] = []
for suppress_entity in suppress:
@@ -257,14 +295,17 @@ class Traceback:
exc_type: Type[Any],
exc_value: BaseException,
traceback: Optional[TracebackType],
+ *,
width: Optional[int] = 100,
extra_lines: int = 3,
theme: Optional[str] = None,
word_wrap: bool = False,
show_locals: bool = False,
- indent_guides: bool = True,
locals_max_length: int = LOCALS_MAX_LENGTH,
locals_max_string: int = LOCALS_MAX_STRING,
+ locals_hide_dunder: bool = True,
+ locals_hide_sunder: bool = False,
+ indent_guides: bool = True,
suppress: Iterable[Union[str, ModuleType]] = (),
max_frames: int = 100,
) -> "Traceback":
@@ -283,6 +324,8 @@ class Traceback:
locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to 10.
locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80.
+ locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True.
+ locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False.
suppress (Iterable[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback.
max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100.
@@ -290,8 +333,16 @@ class Traceback:
Traceback: A Traceback instance that may be printed.
"""
rich_traceback = cls.extract(
- exc_type, exc_value, traceback, show_locals=show_locals
+ exc_type,
+ exc_value,
+ traceback,
+ show_locals=show_locals,
+ locals_max_length=locals_max_length,
+ locals_max_string=locals_max_string,
+ locals_hide_dunder=locals_hide_dunder,
+ locals_hide_sunder=locals_hide_sunder,
)
+
return cls(
rich_traceback,
width=width,
@@ -302,6 +353,8 @@ class Traceback:
indent_guides=indent_guides,
locals_max_length=locals_max_length,
locals_max_string=locals_max_string,
+ locals_hide_dunder=locals_hide_dunder,
+ locals_hide_sunder=locals_hide_sunder,
suppress=suppress,
max_frames=max_frames,
)
@@ -312,9 +365,12 @@ class Traceback:
exc_type: Type[BaseException],
exc_value: BaseException,
traceback: Optional[TracebackType],
+ *,
show_locals: bool = False,
locals_max_length: int = LOCALS_MAX_LENGTH,
locals_max_string: int = LOCALS_MAX_STRING,
+ locals_hide_dunder: bool = True,
+ locals_hide_sunder: bool = False,
) -> Trace:
"""Extract traceback information.
@@ -326,6 +382,8 @@ class Traceback:
locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to 10.
locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80.
+ locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True.
+ locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False.
Returns:
Trace: A Trace instance which you can use to construct a `Traceback`.
@@ -362,6 +420,20 @@ class Traceback:
stacks.append(stack)
append = stack.frames.append
+ def get_locals(
+ iter_locals: Iterable[Tuple[str, object]]
+ ) -> Iterable[Tuple[str, object]]:
+ """Extract locals from an iterator of key pairs."""
+ if not (locals_hide_dunder or locals_hide_sunder):
+ yield from iter_locals
+ return
+ for key, value in iter_locals:
+ if locals_hide_dunder and key.startswith("__"):
+ continue
+ if locals_hide_sunder and key.startswith("_"):
+ continue
+ yield key, value
+
for frame_summary, line_no in walk_tb(traceback):
filename = frame_summary.f_code.co_filename
if filename and not filename.startswith("<"):
@@ -369,6 +441,7 @@ class Traceback:
filename = os.path.join(_IMPORT_CWD, filename)
if frame_summary.f_locals.get("_rich_traceback_omit", False):
continue
+
frame = Frame(
filename=filename or "?",
lineno=line_no,
@@ -379,7 +452,7 @@ class Traceback:
max_length=locals_max_length,
max_string=locals_max_string,
)
- for key, value in frame_summary.f_locals.items()
+ for key, value in get_locals(frame_summary.f_locals.items())
}
if show_locals
else None,
@@ -494,13 +567,14 @@ class Traceback:
highlighter = ReprHighlighter()
path_highlighter = PathHighlighter()
if syntax_error.filename != "<stdin>":
- text = Text.assemble(
- (f" {syntax_error.filename}", "pygments.string"),
- (":", "pygments.text"),
- (str(syntax_error.lineno), "pygments.number"),
- style="pygments.text",
- )
- yield path_highlighter(text)
+ if os.path.exists(syntax_error.filename):
+ text = Text.assemble(
+ (f" {syntax_error.filename}", "pygments.string"),
+ (":", "pygments.text"),
+ (str(syntax_error.lineno), "pygments.number"),
+ style="pygments.text",
+ )
+ yield path_highlighter(text)
syntax_error_text = highlighter(syntax_error.line.rstrip())
syntax_error_text.no_wrap = True
offset = min(syntax_error.offset - 1, len(syntax_error_text))
@@ -531,7 +605,6 @@ class Traceback:
def _render_stack(self, stack: Stack) -> RenderResult:
path_highlighter = PathHighlighter()
theme = self.theme
- code_cache: Dict[str, str] = {}
def read_code(filename: str) -> str:
"""Read files, and cache results on filename.
@@ -542,14 +615,7 @@ class Traceback:
Returns:
str: Contents of file
"""
- code = code_cache.get(filename)
- if code is None:
- with open(
- filename, "rt", encoding="utf-8", errors="replace"
- ) as code_file:
- code = code_file.read()
- code_cache[filename] = code
- return code
+ return "".join(linecache.getlines(filename))
def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]:
if frame.locals:
@@ -588,14 +654,23 @@ class Traceback:
frame_filename = frame.filename
suppressed = any(frame_filename.startswith(path) for path in self.suppress)
- text = Text.assemble(
- path_highlighter(Text(frame.filename, style="pygments.string")),
- (":", "pygments.text"),
- (str(frame.lineno), "pygments.number"),
- " in ",
- (frame.name, "pygments.function"),
- style="pygments.text",
- )
+ if os.path.exists(frame.filename):
+ text = Text.assemble(
+ path_highlighter(Text(frame.filename, style="pygments.string")),
+ (":", "pygments.text"),
+ (str(frame.lineno), "pygments.number"),
+ " in ",
+ (frame.name, "pygments.function"),
+ style="pygments.text",
+ )
+ else:
+ text = Text.assemble(
+ "in ",
+ (frame.name, "pygments.function"),
+ (":", "pygments.text"),
+ (str(frame.lineno), "pygments.number"),
+ style="pygments.text",
+ )
if not frame.filename.startswith("<") and not first:
yield ""
yield text
@@ -605,6 +680,10 @@ class Traceback:
if not suppressed:
try:
code = read_code(frame.filename)
+ if not code:
+ # code may be an empty string if the file doesn't exist, OR
+ # if the traceback filename is generated dynamically
+ continue
lexer_name = self._guess_lexer(frame.filename, code)
syntax = Syntax(
code,
diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__init__.py
index ab3be3bf6..4f1603ade 100644
--- a/src/pip/_vendor/tenacity/__init__.py
+++ b/src/pip/_vendor/tenacity/__init__.py
@@ -16,6 +16,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+
import functools
import sys
import threading
@@ -88,51 +89,13 @@ tornado = None # type: ignore
if t.TYPE_CHECKING:
import types
- from .wait import wait_base
- from .stop import stop_base
-
-
-WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable)
-_RetValT = t.TypeVar("_RetValT")
-
-
-@t.overload
-def retry(fn: WrappedFn) -> WrappedFn:
- pass
-
-
-@t.overload
-def retry(*dargs: t.Any, **dkw: t.Any) -> t.Callable[[WrappedFn], WrappedFn]: # noqa
- pass
-
-
-def retry(*dargs: t.Any, **dkw: t.Any) -> t.Union[WrappedFn, t.Callable[[WrappedFn], WrappedFn]]: # noqa
- """Wrap a function with a new `Retrying` object.
-
- :param dargs: positional arguments passed to Retrying object
- :param dkw: keyword arguments passed to the Retrying object
- """
- # support both @retry and @retry() as valid syntax
- if len(dargs) == 1 and callable(dargs[0]):
- return retry()(dargs[0])
- else:
-
- def wrap(f: WrappedFn) -> WrappedFn:
- if isinstance(f, retry_base):
- warnings.warn(
- f"Got retry_base instance ({f.__class__.__name__}) as callable argument, "
- f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)"
- )
- if iscoroutinefunction(f):
- r: "BaseRetrying" = AsyncRetrying(*dargs, **dkw)
- elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f):
- r = TornadoRetrying(*dargs, **dkw)
- else:
- r = Retrying(*dargs, **dkw)
+ from .retry import RetryBaseT
+ from .stop import StopBaseT
+ from .wait import WaitBaseT
- return r.wraps(f)
- return wrap
+WrappedFnReturnT = t.TypeVar("WrappedFnReturnT")
+WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Any])
class TryAgain(Exception):
@@ -216,7 +179,7 @@ class AttemptManager:
exc_value: t.Optional[BaseException],
traceback: t.Optional["types.TracebackType"],
) -> t.Optional[bool]:
- if isinstance(exc_value, BaseException):
+ if exc_type is not None and exc_value is not None:
self.retry_state.set_exception((exc_type, exc_value, traceback))
return True # Swallow exception.
else:
@@ -229,9 +192,9 @@ class BaseRetrying(ABC):
def __init__(
self,
sleep: t.Callable[[t.Union[int, float]], None] = sleep,
- stop: "stop_base" = stop_never,
- wait: "wait_base" = wait_none(),
- retry: retry_base = retry_if_exception_type(),
+ stop: "StopBaseT" = stop_never,
+ wait: "WaitBaseT" = wait_none(),
+ retry: "RetryBaseT" = retry_if_exception_type(),
before: t.Callable[["RetryCallState"], None] = before_nothing,
after: t.Callable[["RetryCallState"], None] = after_nothing,
before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
@@ -254,8 +217,8 @@ class BaseRetrying(ABC):
def copy(
self,
sleep: t.Union[t.Callable[[t.Union[int, float]], None], object] = _unset,
- stop: t.Union["stop_base", object] = _unset,
- wait: t.Union["wait_base", object] = _unset,
+ stop: t.Union["StopBaseT", object] = _unset,
+ wait: t.Union["WaitBaseT", object] = _unset,
retry: t.Union[retry_base, object] = _unset,
before: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
after: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
@@ -312,9 +275,9 @@ class BaseRetrying(ABC):
statistics from each thread).
"""
try:
- return self._local.statistics
+ return self._local.statistics # type: ignore[no-any-return]
except AttributeError:
- self._local.statistics = {}
+ self._local.statistics = t.cast(t.Dict[str, t.Any], {})
return self._local.statistics
def wraps(self, f: WrappedFn) -> WrappedFn:
@@ -330,10 +293,10 @@ class BaseRetrying(ABC):
def retry_with(*args: t.Any, **kwargs: t.Any) -> WrappedFn:
return self.copy(*args, **kwargs).wraps(f)
- wrapped_f.retry = self
- wrapped_f.retry_with = retry_with
+ wrapped_f.retry = self # type: ignore[attr-defined]
+ wrapped_f.retry_with = retry_with # type: ignore[attr-defined]
- return wrapped_f
+ return wrapped_f # type: ignore[return-value]
def begin(self) -> None:
self.statistics.clear()
@@ -348,15 +311,15 @@ class BaseRetrying(ABC):
self.before(retry_state)
return DoAttempt()
- is_explicit_retry = retry_state.outcome.failed and isinstance(retry_state.outcome.exception(), TryAgain)
- if not (is_explicit_retry or self.retry(retry_state=retry_state)):
+ is_explicit_retry = fut.failed and isinstance(fut.exception(), TryAgain)
+ if not (is_explicit_retry or self.retry(retry_state)):
return fut.result()
if self.after is not None:
self.after(retry_state)
self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start
- if self.stop(retry_state=retry_state):
+ if self.stop(retry_state):
if self.retry_error_callback:
return self.retry_error_callback(retry_state)
retry_exc = self.retry_error_cls(fut)
@@ -365,7 +328,7 @@ class BaseRetrying(ABC):
raise retry_exc from fut.exception()
if self.wait:
- sleep = self.wait(retry_state=retry_state)
+ sleep = self.wait(retry_state)
else:
sleep = 0.0
retry_state.next_action = RetryAction(sleep)
@@ -393,14 +356,24 @@ class BaseRetrying(ABC):
break
@abstractmethod
- def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) -> _RetValT:
+ def __call__(
+ self,
+ fn: t.Callable[..., WrappedFnReturnT],
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> WrappedFnReturnT:
pass
class Retrying(BaseRetrying):
"""Retrying controller."""
- def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) -> _RetValT:
+ def __call__(
+ self,
+ fn: t.Callable[..., WrappedFnReturnT],
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> WrappedFnReturnT:
self.begin()
retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
@@ -410,17 +383,23 @@ class Retrying(BaseRetrying):
try:
result = fn(*args, **kwargs)
except BaseException: # noqa: B902
- retry_state.set_exception(sys.exc_info())
+ retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
else:
retry_state.set_result(result)
elif isinstance(do, DoSleep):
retry_state.prepare_for_next_attempt()
self.sleep(do)
else:
- return do
+ return do # type: ignore[no-any-return]
+
+
+if sys.version_info[1] >= 9:
+ FutureGenericT = futures.Future[t.Any]
+else:
+ FutureGenericT = futures.Future
-class Future(futures.Future):
+class Future(FutureGenericT):
"""Encapsulates a (future or past) attempted call to a target function."""
def __init__(self, attempt_number: int) -> None:
@@ -493,13 +472,15 @@ class RetryCallState:
fut.set_result(val)
self.outcome, self.outcome_timestamp = fut, ts
- def set_exception(self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType"]) -> None:
+ def set_exception(
+ self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType| None"]
+ ) -> None:
ts = time.monotonic()
fut = Future(self.attempt_number)
fut.set_exception(exc_info[1])
self.outcome, self.outcome_timestamp = fut, ts
- def __repr__(self):
+ def __repr__(self) -> str:
if self.outcome is None:
result = "none yet"
elif self.outcome.failed:
@@ -513,7 +494,115 @@ class RetryCallState:
return f"<{clsname} {id(self)}: attempt #{self.attempt_number}; slept for {slept}; last result: {result}>"
+@t.overload
+def retry(func: WrappedFn) -> WrappedFn:
+ ...
+
+
+@t.overload
+def retry(
+ sleep: t.Callable[[t.Union[int, float]], None] = sleep,
+ stop: "StopBaseT" = stop_never,
+ wait: "WaitBaseT" = wait_none(),
+ retry: "RetryBaseT" = retry_if_exception_type(),
+ before: t.Callable[["RetryCallState"], None] = before_nothing,
+ after: t.Callable[["RetryCallState"], None] = after_nothing,
+ before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
+ reraise: bool = False,
+ retry_error_cls: t.Type["RetryError"] = RetryError,
+ retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
+) -> t.Callable[[WrappedFn], WrappedFn]:
+ ...
+
+
+def retry(*dargs: t.Any, **dkw: t.Any) -> t.Any:
+ """Wrap a function with a new `Retrying` object.
+
+ :param dargs: positional arguments passed to Retrying object
+ :param dkw: keyword arguments passed to the Retrying object
+ """
+ # support both @retry and @retry() as valid syntax
+ if len(dargs) == 1 and callable(dargs[0]):
+ return retry()(dargs[0])
+ else:
+
+ def wrap(f: WrappedFn) -> WrappedFn:
+ if isinstance(f, retry_base):
+ warnings.warn(
+ f"Got retry_base instance ({f.__class__.__name__}) as callable argument, "
+ f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)"
+ )
+ r: "BaseRetrying"
+ if iscoroutinefunction(f):
+ r = AsyncRetrying(*dargs, **dkw)
+ elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f):
+ r = TornadoRetrying(*dargs, **dkw)
+ else:
+ r = Retrying(*dargs, **dkw)
+
+ return r.wraps(f)
+
+ return wrap
+
+
from pip._vendor.tenacity._asyncio import AsyncRetrying # noqa:E402,I100
if tornado:
from pip._vendor.tenacity.tornadoweb import TornadoRetrying
+
+
+__all__ = [
+ "retry_base",
+ "retry_all",
+ "retry_always",
+ "retry_any",
+ "retry_if_exception",
+ "retry_if_exception_type",
+ "retry_if_exception_cause_type",
+ "retry_if_not_exception_type",
+ "retry_if_not_result",
+ "retry_if_result",
+ "retry_never",
+ "retry_unless_exception_type",
+ "retry_if_exception_message",
+ "retry_if_not_exception_message",
+ "sleep",
+ "sleep_using_event",
+ "stop_after_attempt",
+ "stop_after_delay",
+ "stop_all",
+ "stop_any",
+ "stop_never",
+ "stop_when_event_set",
+ "wait_chain",
+ "wait_combine",
+ "wait_exponential",
+ "wait_fixed",
+ "wait_incrementing",
+ "wait_none",
+ "wait_random",
+ "wait_random_exponential",
+ "wait_full_jitter",
+ "wait_exponential_jitter",
+ "before_log",
+ "before_nothing",
+ "after_log",
+ "after_nothing",
+ "before_sleep_log",
+ "before_sleep_nothing",
+ "retry",
+ "WrappedFn",
+ "TryAgain",
+ "NO_RESULT",
+ "DoAttempt",
+ "DoSleep",
+ "BaseAction",
+ "RetryAction",
+ "RetryError",
+ "AttemptManager",
+ "BaseRetrying",
+ "Retrying",
+ "Future",
+ "RetryCallState",
+ "AsyncRetrying",
+]
diff --git a/src/pip/_vendor/tenacity/_asyncio.py b/src/pip/_vendor/tenacity/_asyncio.py
index 0f32b5f62..2e50cd7b4 100644
--- a/src/pip/_vendor/tenacity/_asyncio.py
+++ b/src/pip/_vendor/tenacity/_asyncio.py
@@ -17,7 +17,7 @@
import functools
import sys
-import typing
+import typing as t
from asyncio import sleep
from pip._vendor.tenacity import AttemptManager
@@ -26,21 +26,20 @@ from pip._vendor.tenacity import DoAttempt
from pip._vendor.tenacity import DoSleep
from pip._vendor.tenacity import RetryCallState
-WrappedFn = typing.TypeVar("WrappedFn", bound=typing.Callable)
-_RetValT = typing.TypeVar("_RetValT")
+WrappedFnReturnT = t.TypeVar("WrappedFnReturnT")
+WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Awaitable[t.Any]])
class AsyncRetrying(BaseRetrying):
- def __init__(self, sleep: typing.Callable[[float], typing.Awaitable] = sleep, **kwargs: typing.Any) -> None:
+ sleep: t.Callable[[float], t.Awaitable[t.Any]]
+
+ def __init__(self, sleep: t.Callable[[float], t.Awaitable[t.Any]] = sleep, **kwargs: t.Any) -> None:
super().__init__(**kwargs)
self.sleep = sleep
- async def __call__( # type: ignore # Change signature from supertype
- self,
- fn: typing.Callable[..., typing.Awaitable[_RetValT]],
- *args: typing.Any,
- **kwargs: typing.Any,
- ) -> _RetValT:
+ async def __call__( # type: ignore[override]
+ self, fn: WrappedFn, *args: t.Any, **kwargs: t.Any
+ ) -> WrappedFnReturnT:
self.begin()
retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
@@ -50,21 +49,24 @@ class AsyncRetrying(BaseRetrying):
try:
result = await fn(*args, **kwargs)
except BaseException: # noqa: B902
- retry_state.set_exception(sys.exc_info())
+ retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
else:
retry_state.set_result(result)
elif isinstance(do, DoSleep):
retry_state.prepare_for_next_attempt()
await self.sleep(do)
else:
- return do
+ return do # type: ignore[no-any-return]
+
+ def __iter__(self) -> t.Generator[AttemptManager, None, None]:
+ raise TypeError("AsyncRetrying object is not iterable")
def __aiter__(self) -> "AsyncRetrying":
self.begin()
self._retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
return self
- async def __anext__(self) -> typing.Union[AttemptManager, typing.Any]:
+ async def __anext__(self) -> AttemptManager:
while True:
do = self.iter(retry_state=self._retry_state)
if do is None:
@@ -75,18 +77,18 @@ class AsyncRetrying(BaseRetrying):
self._retry_state.prepare_for_next_attempt()
await self.sleep(do)
else:
- return do
+ raise StopAsyncIteration
def wraps(self, fn: WrappedFn) -> WrappedFn:
fn = super().wraps(fn)
# Ensure wrapper is recognized as a coroutine function.
@functools.wraps(fn)
- async def async_wrapped(*args: typing.Any, **kwargs: typing.Any) -> typing.Any:
+ async def async_wrapped(*args: t.Any, **kwargs: t.Any) -> t.Any:
return await fn(*args, **kwargs)
# Preserve attributes
- async_wrapped.retry = fn.retry
- async_wrapped.retry_with = fn.retry_with
+ async_wrapped.retry = fn.retry # type: ignore[attr-defined]
+ async_wrapped.retry_with = fn.retry_with # type: ignore[attr-defined]
- return async_wrapped
+ return async_wrapped # type: ignore[return-value]
diff --git a/src/pip/_vendor/tenacity/_utils.py b/src/pip/_vendor/tenacity/_utils.py
index d5c4c9de5..f14ff3209 100644
--- a/src/pip/_vendor/tenacity/_utils.py
+++ b/src/pip/_vendor/tenacity/_utils.py
@@ -16,6 +16,7 @@
import sys
import typing
+from datetime import timedelta
# sys.maxsize:
@@ -66,3 +67,10 @@ def get_callback_name(cb: typing.Callable[..., typing.Any]) -> str:
except AttributeError:
pass
return ".".join(segments)
+
+
+time_unit_type = typing.Union[int, float, timedelta]
+
+
+def to_seconds(time_unit: time_unit_type) -> float:
+ return float(time_unit.total_seconds() if isinstance(time_unit, timedelta) else time_unit)
diff --git a/src/pip/_vendor/tenacity/after.py b/src/pip/_vendor/tenacity/after.py
index c056700f9..574c9bcea 100644
--- a/src/pip/_vendor/tenacity/after.py
+++ b/src/pip/_vendor/tenacity/after.py
@@ -36,9 +36,14 @@ def after_log(
"""After call strategy that logs to some logger the finished attempt."""
def log_it(retry_state: "RetryCallState") -> None:
+ if retry_state.fn is None:
+ # NOTE(sileht): can't really happen, but we must please mypy
+ fn_name = "<unknown>"
+ else:
+ fn_name = _utils.get_callback_name(retry_state.fn)
logger.log(
log_level,
- f"Finished call to '{_utils.get_callback_name(retry_state.fn)}' "
+ f"Finished call to '{fn_name}' "
f"after {sec_format % retry_state.seconds_since_start}(s), "
f"this was the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.",
)
diff --git a/src/pip/_vendor/tenacity/before.py b/src/pip/_vendor/tenacity/before.py
index a72c2c5f7..cfd7dc72e 100644
--- a/src/pip/_vendor/tenacity/before.py
+++ b/src/pip/_vendor/tenacity/before.py
@@ -32,9 +32,14 @@ def before_log(logger: "logging.Logger", log_level: int) -> typing.Callable[["Re
"""Before call strategy that logs to some logger the attempt."""
def log_it(retry_state: "RetryCallState") -> None:
+ if retry_state.fn is None:
+ # NOTE(sileht): can't really happen, but we must please mypy
+ fn_name = "<unknown>"
+ else:
+ fn_name = _utils.get_callback_name(retry_state.fn)
logger.log(
log_level,
- f"Starting call to '{_utils.get_callback_name(retry_state.fn)}', "
+ f"Starting call to '{fn_name}', "
f"this is the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.",
)
diff --git a/src/pip/_vendor/tenacity/before_sleep.py b/src/pip/_vendor/tenacity/before_sleep.py
index b35564fba..8c6167fb3 100644
--- a/src/pip/_vendor/tenacity/before_sleep.py
+++ b/src/pip/_vendor/tenacity/before_sleep.py
@@ -36,6 +36,14 @@ def before_sleep_log(
"""Before call strategy that logs to some logger the attempt."""
def log_it(retry_state: "RetryCallState") -> None:
+ local_exc_info: BaseException | bool | None
+
+ if retry_state.outcome is None:
+ raise RuntimeError("log_it() called before outcome was set")
+
+ if retry_state.next_action is None:
+ raise RuntimeError("log_it() called before next_action was set")
+
if retry_state.outcome.failed:
ex = retry_state.outcome.exception()
verb, value = "raised", f"{ex.__class__.__name__}: {ex}"
@@ -48,10 +56,15 @@ def before_sleep_log(
verb, value = "returned", retry_state.outcome.result()
local_exc_info = False # exc_info does not apply when no exception
+ if retry_state.fn is None:
+ # NOTE(sileht): can't really happen, but we must please mypy
+ fn_name = "<unknown>"
+ else:
+ fn_name = _utils.get_callback_name(retry_state.fn)
+
logger.log(
log_level,
- f"Retrying {_utils.get_callback_name(retry_state.fn)} "
- f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.",
+ f"Retrying {fn_name} " f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.",
exc_info=local_exc_info,
)
diff --git a/src/pip/_vendor/tenacity/retry.py b/src/pip/_vendor/tenacity/retry.py
index 9ebeb62d5..38988739d 100644
--- a/src/pip/_vendor/tenacity/retry.py
+++ b/src/pip/_vendor/tenacity/retry.py
@@ -36,6 +36,9 @@ class retry_base(abc.ABC):
return retry_any(self, other)
+RetryBaseT = typing.Union[retry_base, typing.Callable[["RetryCallState"], bool]]
+
+
class _retry_never(retry_base):
"""Retry strategy that never rejects any result."""
@@ -63,8 +66,14 @@ class retry_if_exception(retry_base):
self.predicate = predicate
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.outcome is None:
+ raise RuntimeError("__call__() called before outcome was set")
+
if retry_state.outcome.failed:
- return self.predicate(retry_state.outcome.exception())
+ exception = retry_state.outcome.exception()
+ if exception is None:
+ raise RuntimeError("outcome failed but the exception is None")
+ return self.predicate(exception)
else:
return False
@@ -111,10 +120,17 @@ class retry_unless_exception_type(retry_if_exception):
super().__init__(lambda e: not isinstance(e, exception_types))
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.outcome is None:
+ raise RuntimeError("__call__() called before outcome was set")
+
# always retry if no exception was raised
if not retry_state.outcome.failed:
return True
- return self.predicate(retry_state.outcome.exception())
+
+ exception = retry_state.outcome.exception()
+ if exception is None:
+ raise RuntimeError("outcome failed but the exception is None")
+ return self.predicate(exception)
class retry_if_exception_cause_type(retry_base):
@@ -134,6 +150,9 @@ class retry_if_exception_cause_type(retry_base):
self.exception_cause_types = exception_types
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.outcome is None:
+ raise RuntimeError("__call__ called before outcome was set")
+
if retry_state.outcome.failed:
exc = retry_state.outcome.exception()
while exc is not None:
@@ -151,6 +170,9 @@ class retry_if_result(retry_base):
self.predicate = predicate
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.outcome is None:
+ raise RuntimeError("__call__() called before outcome was set")
+
if not retry_state.outcome.failed:
return self.predicate(retry_state.outcome.result())
else:
@@ -164,6 +186,9 @@ class retry_if_not_result(retry_base):
self.predicate = predicate
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.outcome is None:
+ raise RuntimeError("__call__() called before outcome was set")
+
if not retry_state.outcome.failed:
return not self.predicate(retry_state.outcome.result())
else:
@@ -215,9 +240,16 @@ class retry_if_not_exception_message(retry_if_exception_message):
self.predicate = lambda *args_, **kwargs_: not if_predicate(*args_, **kwargs_)
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.outcome is None:
+ raise RuntimeError("__call__() called before outcome was set")
+
if not retry_state.outcome.failed:
return True
- return self.predicate(retry_state.outcome.exception())
+
+ exception = retry_state.outcome.exception()
+ if exception is None:
+ raise RuntimeError("outcome failed but the exception is None")
+ return self.predicate(exception)
class retry_any(retry_base):
diff --git a/src/pip/_vendor/tenacity/stop.py b/src/pip/_vendor/tenacity/stop.py
index faaae9a8d..bb23effdf 100644
--- a/src/pip/_vendor/tenacity/stop.py
+++ b/src/pip/_vendor/tenacity/stop.py
@@ -16,6 +16,8 @@
import abc
import typing
+from pip._vendor.tenacity import _utils
+
if typing.TYPE_CHECKING:
import threading
@@ -36,6 +38,9 @@ class stop_base(abc.ABC):
return stop_any(self, other)
+StopBaseT = typing.Union[stop_base, typing.Callable[["RetryCallState"], bool]]
+
+
class stop_any(stop_base):
"""Stop if any of the stop condition is valid."""
@@ -89,8 +94,10 @@ class stop_after_attempt(stop_base):
class stop_after_delay(stop_base):
"""Stop when the time from the first attempt >= limit."""
- def __init__(self, max_delay: float) -> None:
- self.max_delay = max_delay
+ def __init__(self, max_delay: _utils.time_unit_type) -> None:
+ self.max_delay = _utils.to_seconds(max_delay)
def __call__(self, retry_state: "RetryCallState") -> bool:
+ if retry_state.seconds_since_start is None:
+ raise RuntimeError("__call__() called but seconds_since_start is not set")
return retry_state.seconds_since_start >= self.max_delay
diff --git a/src/pip/_vendor/tenacity/tornadoweb.py b/src/pip/_vendor/tenacity/tornadoweb.py
index 8f7731af0..e19c30b18 100644
--- a/src/pip/_vendor/tenacity/tornadoweb.py
+++ b/src/pip/_vendor/tenacity/tornadoweb.py
@@ -33,8 +33,8 @@ class TornadoRetrying(BaseRetrying):
super().__init__(**kwargs)
self.sleep = sleep
- @gen.coroutine
- def __call__( # type: ignore # Change signature from supertype
+ @gen.coroutine # type: ignore[misc]
+ def __call__(
self,
fn: "typing.Callable[..., typing.Union[typing.Generator[typing.Any, typing.Any, _RetValT], Future[_RetValT]]]",
*args: typing.Any,
@@ -49,7 +49,7 @@ class TornadoRetrying(BaseRetrying):
try:
result = yield fn(*args, **kwargs)
except BaseException: # noqa: B902
- retry_state.set_exception(sys.exc_info())
+ retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
else:
retry_state.set_result(result)
elif isinstance(do, DoSleep):
diff --git a/src/pip/_vendor/tenacity/wait.py b/src/pip/_vendor/tenacity/wait.py
index 8fdfc8f9d..f9349c028 100644
--- a/src/pip/_vendor/tenacity/wait.py
+++ b/src/pip/_vendor/tenacity/wait.py
@@ -17,19 +17,12 @@
import abc
import random
import typing
-from datetime import timedelta
from pip._vendor.tenacity import _utils
if typing.TYPE_CHECKING:
from pip._vendor.tenacity import RetryCallState
-wait_unit_type = typing.Union[int, float, timedelta]
-
-
-def to_seconds(wait_unit: wait_unit_type) -> float:
- return float(wait_unit.total_seconds() if isinstance(wait_unit, timedelta) else wait_unit)
-
class wait_base(abc.ABC):
"""Abstract base class for wait strategies."""
@@ -43,16 +36,19 @@ class wait_base(abc.ABC):
def __radd__(self, other: "wait_base") -> typing.Union["wait_combine", "wait_base"]:
# make it possible to use multiple waits with the built-in sum function
- if other == 0:
+ if other == 0: # type: ignore[comparison-overlap]
return self
return self.__add__(other)
+WaitBaseT = typing.Union[wait_base, typing.Callable[["RetryCallState"], typing.Union[float, int]]]
+
+
class wait_fixed(wait_base):
"""Wait strategy that waits a fixed amount of time between each retry."""
- def __init__(self, wait: wait_unit_type) -> None:
- self.wait_fixed = to_seconds(wait)
+ def __init__(self, wait: _utils.time_unit_type) -> None:
+ self.wait_fixed = _utils.to_seconds(wait)
def __call__(self, retry_state: "RetryCallState") -> float:
return self.wait_fixed
@@ -68,9 +64,9 @@ class wait_none(wait_fixed):
class wait_random(wait_base):
"""Wait strategy that waits a random amount of time between min/max."""
- def __init__(self, min: wait_unit_type = 0, max: wait_unit_type = 1) -> None: # noqa
- self.wait_random_min = to_seconds(min)
- self.wait_random_max = to_seconds(max)
+ def __init__(self, min: _utils.time_unit_type = 0, max: _utils.time_unit_type = 1) -> None: # noqa
+ self.wait_random_min = _utils.to_seconds(min)
+ self.wait_random_max = _utils.to_seconds(max)
def __call__(self, retry_state: "RetryCallState") -> float:
return self.wait_random_min + (random.random() * (self.wait_random_max - self.wait_random_min))
@@ -120,13 +116,13 @@ class wait_incrementing(wait_base):
def __init__(
self,
- start: wait_unit_type = 0,
- increment: wait_unit_type = 100,
- max: wait_unit_type = _utils.MAX_WAIT, # noqa
+ start: _utils.time_unit_type = 0,
+ increment: _utils.time_unit_type = 100,
+ max: _utils.time_unit_type = _utils.MAX_WAIT, # noqa
) -> None:
- self.start = to_seconds(start)
- self.increment = to_seconds(increment)
- self.max = to_seconds(max)
+ self.start = _utils.to_seconds(start)
+ self.increment = _utils.to_seconds(increment)
+ self.max = _utils.to_seconds(max)
def __call__(self, retry_state: "RetryCallState") -> float:
result = self.start + (self.increment * (retry_state.attempt_number - 1))
@@ -149,13 +145,13 @@ class wait_exponential(wait_base):
def __init__(
self,
multiplier: typing.Union[int, float] = 1,
- max: wait_unit_type = _utils.MAX_WAIT, # noqa
+ max: _utils.time_unit_type = _utils.MAX_WAIT, # noqa
exp_base: typing.Union[int, float] = 2,
- min: wait_unit_type = 0, # noqa
+ min: _utils.time_unit_type = 0, # noqa
) -> None:
self.multiplier = multiplier
- self.min = to_seconds(min)
- self.max = to_seconds(max)
+ self.min = _utils.to_seconds(min)
+ self.max = _utils.to_seconds(max)
self.exp_base = exp_base
def __call__(self, retry_state: "RetryCallState") -> float:
@@ -206,7 +202,7 @@ class wait_exponential_jitter(wait_base):
This implements the strategy described here:
https://cloud.google.com/storage/docs/retry-strategy
- The wait time is min(initial * (2**n + random.uniform(0, jitter)), maximum)
+ The wait time is min(initial * 2**n + random.uniform(0, jitter), maximum)
where n is the retry count.
"""
diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py
index 34199c2a9..9cbf5b87b 100644
--- a/src/pip/_vendor/typing_extensions.py
+++ b/src/pip/_vendor/typing_extensions.py
@@ -2,10 +2,12 @@ import abc
import collections
import collections.abc
import functools
+import inspect
import operator
import sys
import types as _types
import typing
+import warnings
__all__ = [
@@ -51,6 +53,7 @@ __all__ = [
'assert_type',
'clear_overloads',
'dataclass_transform',
+ 'deprecated',
'get_overloads',
'final',
'get_args',
@@ -728,6 +731,8 @@ else:
_typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
' /, *, total=True, **kwargs)')
+ _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
class _TypedDictMeta(type):
def __init__(cls, name, bases, ns, total=True):
super().__init__(name, bases, ns)
@@ -753,8 +758,10 @@ else:
annotations = {}
own_annotations = ns.get('__annotations__', {})
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ kwds = {"module": tp_dict.__module__} if _TAKES_MODULE else {}
own_annotations = {
- n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
+ n: typing._type_check(tp, msg, **kwds)
+ for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
@@ -1157,7 +1164,7 @@ class _DefaultMixin:
if isinstance(default, (tuple, list)):
self.__default__ = tuple((typing._type_check(d, "Default must be a type")
for d in default))
- elif default:
+ elif default != _marker:
self.__default__ = typing._type_check(default, "Default must be a type")
else:
self.__default__ = None
@@ -1171,7 +1178,7 @@ class TypeVar(typing.TypeVar, _DefaultMixin, _root=True):
def __init__(self, name, *constraints, bound=None,
covariant=False, contravariant=False,
- default=None, infer_variance=False):
+ default=_marker, infer_variance=False):
super().__init__(name, *constraints, bound=bound, covariant=covariant,
contravariant=contravariant)
_DefaultMixin.__init__(self, default)
@@ -1258,7 +1265,7 @@ if hasattr(typing, 'ParamSpec'):
__module__ = 'typing'
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
- default=None):
+ default=_marker):
super().__init__(name, bound=bound, covariant=covariant,
contravariant=contravariant)
_DefaultMixin.__init__(self, default)
@@ -1334,7 +1341,7 @@ else:
return ParamSpecKwargs(self)
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
- default=None):
+ default=_marker):
super().__init__([self])
self.__name__ = name
self.__covariant__ = bool(covariant)
@@ -1850,7 +1857,7 @@ if hasattr(typing, "TypeVarTuple"): # 3.11+
class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True):
"""Type variable tuple."""
- def __init__(self, name, *, default=None):
+ def __init__(self, name, *, default=_marker):
super().__init__(name)
_DefaultMixin.__init__(self, default)
@@ -1913,7 +1920,7 @@ else:
def __iter__(self):
yield self.__unpacked__
- def __init__(self, name, *, default=None):
+ def __init__(self, name, *, default=_marker):
self.__name__ = name
_DefaultMixin.__init__(self, default)
@@ -1993,7 +2000,8 @@ else:
raise AssertionError("Expected code to be unreachable")
-if hasattr(typing, 'dataclass_transform'):
+if sys.version_info >= (3, 12):
+ # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
dataclass_transform = typing.dataclass_transform
else:
def dataclass_transform(
@@ -2001,6 +2009,7 @@ else:
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
+ frozen_default: bool = False,
field_specifiers: typing.Tuple[
typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
...
@@ -2057,6 +2066,8 @@ else:
assumed to be True or False if it is omitted by the caller.
- ``kw_only_default`` indicates whether the ``kw_only`` parameter is
assumed to be True or False if it is omitted by the caller.
+ - ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
- ``field_specifiers`` specifies a static list of supported classes
or functions that describe fields, similar to ``dataclasses.field()``.
@@ -2071,6 +2082,7 @@ else:
"eq_default": eq_default,
"order_default": order_default,
"kw_only_default": kw_only_default,
+ "frozen_default": frozen_default,
"field_specifiers": field_specifiers,
"kwargs": kwargs,
}
@@ -2102,12 +2114,103 @@ else:
This helps prevent bugs that may occur when a base class is changed
without an equivalent change to a child class.
+ There is no runtime checking of these properties. The decorator
+ sets the ``__override__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+
See PEP 698 for details.
"""
+ try:
+ __arg.__override__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
return __arg
+if hasattr(typing, "deprecated"):
+ deprecated = typing.deprecated
+else:
+ _T = typing.TypeVar("_T")
+
+ def deprecated(
+ __msg: str,
+ *,
+ category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+ stacklevel: int = 1,
+ ) -> typing.Callable[[_T], _T]:
+ """Indicate that a class, function or overload is deprecated.
+
+ Usage:
+
+ @deprecated("Use B instead")
+ class A:
+ pass
+
+ @deprecated("Use g instead")
+ def f():
+ pass
+
+ @overload
+ @deprecated("int support is deprecated")
+ def g(x: int) -> int: ...
+ @overload
+ def g(x: str) -> int: ...
+
+ When this decorator is applied to an object, the type checker
+ will generate a diagnostic on usage of the deprecated object.
+
+ No runtime warning is issued. The decorator sets the ``__deprecated__``
+ attribute on the decorated object to the deprecation message
+ passed to the decorator. If applied to an overload, the decorator
+ must be after the ``@overload`` decorator for the attribute to
+ exist on the overload as returned by ``get_overloads()``.
+
+ See PEP 702 for details.
+
+ """
+ def decorator(__arg: _T) -> _T:
+ if category is None:
+ __arg.__deprecated__ = __msg
+ return __arg
+ elif isinstance(__arg, type):
+ original_new = __arg.__new__
+ has_init = __arg.__init__ is not object.__init__
+
+ @functools.wraps(original_new)
+ def __new__(cls, *args, **kwargs):
+ warnings.warn(__msg, category=category, stacklevel=stacklevel + 1)
+ # Mirrors a similar check in object.__new__.
+ if not has_init and (args or kwargs):
+ raise TypeError(f"{cls.__name__}() takes no arguments")
+ if original_new is not object.__new__:
+ return original_new(cls, *args, **kwargs)
+ else:
+ return original_new(cls)
+
+ __arg.__new__ = staticmethod(__new__)
+ __arg.__deprecated__ = __new__.__deprecated__ = __msg
+ return __arg
+ elif callable(__arg):
+ @functools.wraps(__arg)
+ def wrapper(*args, **kwargs):
+ warnings.warn(__msg, category=category, stacklevel=stacklevel + 1)
+ return __arg(*args, **kwargs)
+
+ __arg.__deprecated__ = wrapper.__deprecated__ = __msg
+ return wrapper
+ else:
+ raise TypeError(
+ "@deprecated decorator with non-None category must be applied to "
+ f"a class or callable, not {__arg!r}"
+ )
+
+ return decorator
+
+
# We have to do some monkey patching to deal with the dual nature of
# Unpack/TypeVarTuple:
# - We want Unpack to be a kind of TypeVar so it gets accepted in
diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py
index 7c031661b..e12dd0e78 100644
--- a/src/pip/_vendor/urllib3/_version.py
+++ b/src/pip/_vendor/urllib3/_version.py
@@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS
-__version__ = "1.26.14"
+__version__ = "1.26.15"
diff --git a/src/pip/_vendor/urllib3/connection.py b/src/pip/_vendor/urllib3/connection.py
index 10fb36c4e..54b96b191 100644
--- a/src/pip/_vendor/urllib3/connection.py
+++ b/src/pip/_vendor/urllib3/connection.py
@@ -229,6 +229,11 @@ class HTTPConnection(_HTTPConnection, object):
)
def request(self, method, url, body=None, headers=None):
+ # Update the inner socket's timeout value to send the request.
+ # This only triggers if the connection is re-used.
+ if getattr(self, "sock", None) is not None:
+ self.sock.settimeout(self.timeout)
+
if headers is None:
headers = {}
else:
diff --git a/src/pip/_vendor/urllib3/connectionpool.py b/src/pip/_vendor/urllib3/connectionpool.py
index 708739279..c23d736b1 100644
--- a/src/pip/_vendor/urllib3/connectionpool.py
+++ b/src/pip/_vendor/urllib3/connectionpool.py
@@ -379,7 +379,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
- conn.timeout = timeout_obj.connect_timeout
+ conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
# Trigger any extra validation we need to do.
try:
diff --git a/src/pip/_vendor/urllib3/util/timeout.py b/src/pip/_vendor/urllib3/util/timeout.py
index ff69593b0..78e18a627 100644
--- a/src/pip/_vendor/urllib3/util/timeout.py
+++ b/src/pip/_vendor/urllib3/util/timeout.py
@@ -2,9 +2,8 @@ from __future__ import absolute_import
import time
-# The default socket timeout, used by httplib to indicate that no timeout was
-# specified by the user
-from socket import _GLOBAL_DEFAULT_TIMEOUT
+# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout
from ..exceptions import TimeoutStateError
@@ -117,6 +116,10 @@ class Timeout(object):
__str__ = __repr__
@classmethod
+ def resolve_default_timeout(cls, timeout):
+ return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout
+
+ @classmethod
def _validate_timeout(cls, value, name):
"""Check that a timeout attribute is valid.
diff --git a/src/pip/_vendor/urllib3/util/url.py b/src/pip/_vendor/urllib3/util/url.py
index d6d0bbcea..a960b2f3c 100644
--- a/src/pip/_vendor/urllib3/util/url.py
+++ b/src/pip/_vendor/urllib3/util/url.py
@@ -50,7 +50,7 @@ _variations = [
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
-UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
@@ -303,7 +303,7 @@ def _normalize_host(host, scheme):
def _idna_encode(name):
- if name and any([ord(x) > 128 for x in name]):
+ if name and any(ord(x) >= 128 for x in name):
try:
from pip._vendor import idna
except ImportError:
diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt
index 5519479bf..3974df3f1 100644
--- a/src/pip/_vendor/vendor.txt
+++ b/src/pip/_vendor/vendor.txt
@@ -2,22 +2,22 @@ CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for t
colorama==0.4.6
distlib==0.3.6
distro==1.8.0
-msgpack==1.0.4
+msgpack==1.0.5
packaging==21.3
-platformdirs==2.6.2
+platformdirs==3.2.0
pyparsing==3.0.9
pyproject-hooks==1.0.0
requests==2.28.2
certifi==2022.12.7
chardet==5.1.0
idna==3.4
- urllib3==1.26.14
-rich==12.6.0
- pygments==2.13.0
- typing_extensions==4.4.0
+ urllib3==1.26.15
+rich==13.3.3
+ pygments==2.14.0
+ typing_extensions==4.5.0
resolvelib==1.0.1
-setuptools==65.6.3
+setuptools==67.6.1
six==1.16.0
-tenacity==8.1.0
+tenacity==8.2.2
tomli==2.0.1
webencodings==0.5.1
diff --git a/tests/conftest.py b/tests/conftest.py
index 13011f4fd..57dd7e68a 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -432,7 +432,6 @@ def virtualenv_template(
wheel_install: Path,
coverage_install: Path,
) -> Iterator[VirtualEnvironment]:
-
venv_type: VirtualEnvironmentType
if request.config.getoption("--use-venv"):
venv_type = "venv"
diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py
index 93a6b930f..22a71cd32 100644
--- a/tests/functional/test_build_env.py
+++ b/tests/functional/test_build_env.py
@@ -106,7 +106,6 @@ def test_build_env_allow_only_one_install(script: PipTestEnvironment) -> None:
def test_build_env_requirements_check(script: PipTestEnvironment) -> None:
-
create_basic_wheel_for_package(script, "foo", "2.0")
create_basic_wheel_for_package(script, "bar", "1.0")
create_basic_wheel_for_package(script, "bar", "3.0")
@@ -206,7 +205,6 @@ def test_build_env_overlay_prefix_has_priority(script: PipTestEnvironment) -> No
@pytest.mark.usefixtures("enable_user_site")
def test_build_env_isolation(script: PipTestEnvironment) -> None:
-
# Create dummy `pkg` wheel.
pkg_whl = create_basic_wheel_for_package(script, "pkg", "1.0")
diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py
index 7d20f5e31..788abdd2b 100644
--- a/tests/functional/test_cache.py
+++ b/tests/functional/test_cache.py
@@ -107,7 +107,7 @@ def list_matches_wheel(wheel_name: str, result: TestPipResult) -> bool:
`- foo-1.2.3-py3-none-any.whl `."""
lines = result.stdout.splitlines()
expected = f" - {wheel_name}-py3-none-any.whl "
- return any(map(lambda l: l.startswith(expected), lines))
+ return any(map(lambda line: line.startswith(expected), lines))
def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool:
@@ -121,7 +121,9 @@ def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool:
expected = f"{wheel_name}-py3-none-any.whl"
return any(
map(
- lambda l: os.path.basename(l).startswith(expected) and os.path.exists(l),
+ lambda line: (
+ os.path.basename(line).startswith(expected) and os.path.exists(line)
+ ),
lines,
)
)
diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py
index b1e15c010..f3975de2a 100644
--- a/tests/functional/test_config_settings.py
+++ b/tests/functional/test_config_settings.py
@@ -1,9 +1,11 @@
import json
+import tarfile
from pathlib import Path
-from typing import Tuple
+from typing import List, Optional, Tuple
from zipfile import ZipFile
-from tests.lib import PipTestEnvironment
+from pip._internal.utils.urls import path_to_url
+from tests.lib import PipTestEnvironment, create_basic_sdist_for_package
PYPROJECT_TOML = """\
[build-system]
@@ -36,9 +38,10 @@ Summary: A dummy package
Author: None
Author-email: none@example.org
License: MIT
+{requires_dist}
"""
-def make_wheel(z, project, version, files):
+def make_wheel(z, project, version, requires_dist, files):
record = []
def add_file(name, data):
data = data.encode("utf-8")
@@ -48,7 +51,9 @@ def make_wheel(z, project, version, files):
record.append((name, f"sha256={hash}", len(data)))
distinfo = f"{project}-{version}.dist-info"
add_file(f"{distinfo}/WHEEL", WHEEL)
- add_file(f"{distinfo}/METADATA", METADATA.format(project=project, version=version))
+ add_file(f"{distinfo}/METADATA", METADATA.format(
+ project=project, version=version, requires_dist=requires_dist
+ ))
for name, data in files:
add_file(name, data)
record_name = f"{distinfo}/RECORD"
@@ -70,14 +75,14 @@ class Backend:
):
if config_settings is None:
config_settings = {}
- w = os.path.join(wheel_directory, "foo-1.0-py3-none-any.whl")
+ w = os.path.join(wheel_directory, "{{name}}-1.0-py3-none-any.whl")
with open(w, "wb") as f:
with ZipFile(f, "w") as z:
make_wheel(
- z, "foo", "1.0",
- [("config.json", json.dumps(config_settings))]
+ z, "{{name}}", "1.0", "{{requires_dist}}",
+ [("{{name}}-config.json", json.dumps(config_settings))]
)
- return "foo-1.0-py3-none-any.whl"
+ return "{{name}}-1.0-py3-none-any.whl"
build_editable = build_wheel
@@ -85,14 +90,20 @@ main = Backend()
'''
-def make_project(path: Path) -> Tuple[str, str, Path]:
- name = "foo"
+def make_project(
+ path: Path, name: str = "foo", dependencies: Optional[List[str]] = None
+) -> Tuple[str, str, Path]:
version = "1.0"
project_dir = path / name
backend = project_dir / "backend"
backend.mkdir(parents=True)
(project_dir / "pyproject.toml").write_text(PYPROJECT_TOML)
- (backend / "dummy_backend.py").write_text(BACKEND_SRC)
+ requires_dist = [f"Requires-Dist: {dep}" for dep in dependencies or []]
+ (backend / "dummy_backend.py").write_text(
+ BACKEND_SRC.replace("{{name}}", name).replace(
+ "{{requires_dist}}", "\n".join(requires_dist)
+ )
+ )
return name, version, project_dir
@@ -108,25 +119,158 @@ def test_backend_sees_config(script: PipTestEnvironment) -> None:
wheel_file_path = script.cwd / wheel_file_name
with open(wheel_file_path, "rb") as f:
with ZipFile(f) as z:
- output = z.read("config.json")
+ output = z.read(f"{name}-config.json")
+ assert json.loads(output) == {"FOO": "Hello"}
+
+
+def test_backend_sees_config_reqs(script: PipTestEnvironment) -> None:
+ name, version, project_dir = make_project(script.scratch_path)
+ script.scratch_path.joinpath("reqs.txt").write_text(
+ f"{project_dir} --config-settings FOO=Hello"
+ )
+ script.pip("wheel", "-r", "reqs.txt")
+ wheel_file_name = f"{name}-{version}-py3-none-any.whl"
+ wheel_file_path = script.cwd / wheel_file_name
+ with open(wheel_file_path, "rb") as f:
+ with ZipFile(f) as z:
+ output = z.read(f"{name}-config.json")
+ assert json.loads(output) == {"FOO": "Hello"}
+
+
+def test_backend_sees_config_via_constraint(script: PipTestEnvironment) -> None:
+ name, version, project_dir = make_project(script.scratch_path)
+ constraints_file = script.scratch_path / "constraints.txt"
+ constraints_file.write_text(f"{name} @ {path_to_url(str(project_dir))}")
+ script.pip(
+ "wheel",
+ "--config-settings",
+ "FOO=Hello",
+ "-c",
+ "constraints.txt",
+ name,
+ )
+ wheel_file_name = f"{name}-{version}-py3-none-any.whl"
+ wheel_file_path = script.cwd / wheel_file_name
+ with open(wheel_file_path, "rb") as f:
+ with ZipFile(f) as z:
+ output = z.read(f"{name}-config.json")
+ assert json.loads(output) == {"FOO": "Hello"}
+
+
+def test_backend_sees_config_via_sdist(script: PipTestEnvironment) -> None:
+ name, version, project_dir = make_project(script.scratch_path)
+ dists_dir = script.scratch_path / "dists"
+ dists_dir.mkdir()
+ with tarfile.open(dists_dir / f"{name}-{version}.tar.gz", "w:gz") as dist_tar:
+ dist_tar.add(project_dir, arcname=name)
+ script.pip(
+ "wheel",
+ "--config-settings",
+ "FOO=Hello",
+ "-f",
+ dists_dir,
+ name,
+ )
+ wheel_file_name = f"{name}-{version}-py3-none-any.whl"
+ wheel_file_path = script.cwd / wheel_file_name
+ with open(wheel_file_path, "rb") as f:
+ with ZipFile(f) as z:
+ output = z.read(f"{name}-config.json")
assert json.loads(output) == {"FOO": "Hello"}
+def test_req_file_does_not_see_config(script: PipTestEnvironment) -> None:
+ """Test that CLI config settings do not propagate to requirement files."""
+ name, _, project_dir = make_project(script.scratch_path)
+ reqs_file = script.scratch_path / "reqs.txt"
+ reqs_file.write_text(f"{project_dir}")
+ script.pip(
+ "install",
+ "--config-settings",
+ "FOO=Hello",
+ "-r",
+ reqs_file,
+ )
+ config = script.site_packages_path / f"{name}-config.json"
+ with open(config, "rb") as f:
+ assert json.load(f) == {}
+
+
+def test_dep_does_not_see_config(script: PipTestEnvironment) -> None:
+ """Test that CLI config settings do not propagate to dependencies."""
+ _, _, bar_project_dir = make_project(script.scratch_path, name="bar")
+ _, _, foo_project_dir = make_project(
+ script.scratch_path,
+ name="foo",
+ dependencies=[f"bar @ {path_to_url(str(bar_project_dir))}"],
+ )
+ script.pip(
+ "install",
+ "--config-settings",
+ "FOO=Hello",
+ foo_project_dir,
+ )
+ foo_config = script.site_packages_path / "foo-config.json"
+ with open(foo_config, "rb") as f:
+ assert json.load(f) == {"FOO": "Hello"}
+ bar_config = script.site_packages_path / "bar-config.json"
+ with open(bar_config, "rb") as f:
+ assert json.load(f) == {}
+
+
+def test_dep_in_req_file_does_not_see_config(script: PipTestEnvironment) -> None:
+ """Test that CLI config settings do not propagate to dependencies found in
+ requirement files."""
+ _, _, bar_project_dir = make_project(script.scratch_path, name="bar")
+ _, _, foo_project_dir = make_project(
+ script.scratch_path,
+ name="foo",
+ dependencies=["bar"],
+ )
+ reqs_file = script.scratch_path / "reqs.txt"
+ reqs_file.write_text(f"bar @ {path_to_url(str(bar_project_dir))}")
+ script.pip(
+ "install",
+ "--config-settings",
+ "FOO=Hello",
+ "-r",
+ reqs_file,
+ foo_project_dir,
+ )
+ foo_config = script.site_packages_path / "foo-config.json"
+ with open(foo_config, "rb") as f:
+ assert json.load(f) == {"FOO": "Hello"}
+ bar_config = script.site_packages_path / "bar-config.json"
+ with open(bar_config, "rb") as f:
+ assert json.load(f) == {}
+
+
def test_install_sees_config(script: PipTestEnvironment) -> None:
- _, _, project_dir = make_project(script.scratch_path)
+ name, _, project_dir = make_project(script.scratch_path)
script.pip(
"install",
"--config-settings",
"FOO=Hello",
project_dir,
)
- config = script.site_packages_path / "config.json"
+ config = script.site_packages_path / f"{name}-config.json"
+ with open(config, "rb") as f:
+ assert json.load(f) == {"FOO": "Hello"}
+
+
+def test_install_sees_config_reqs(script: PipTestEnvironment) -> None:
+ name, _, project_dir = make_project(script.scratch_path)
+ script.scratch_path.joinpath("reqs.txt").write_text(
+ f"{project_dir} --config-settings FOO=Hello"
+ )
+ script.pip("install", "-r", "reqs.txt")
+ config = script.site_packages_path / f"{name}-config.json"
with open(config, "rb") as f:
assert json.load(f) == {"FOO": "Hello"}
def test_install_editable_sees_config(script: PipTestEnvironment) -> None:
- _, _, project_dir = make_project(script.scratch_path)
+ name, _, project_dir = make_project(script.scratch_path)
script.pip(
"install",
"--config-settings",
@@ -134,6 +278,26 @@ def test_install_editable_sees_config(script: PipTestEnvironment) -> None:
"--editable",
project_dir,
)
- config = script.site_packages_path / "config.json"
+ config = script.site_packages_path / f"{name}-config.json"
with open(config, "rb") as f:
assert json.load(f) == {"FOO": "Hello"}
+
+
+def test_install_config_reqs(script: PipTestEnvironment) -> None:
+ name, _, project_dir = make_project(script.scratch_path)
+ a_sdist = create_basic_sdist_for_package(
+ script,
+ "foo",
+ "1.0",
+ {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC},
+ )
+ script.scratch_path.joinpath("reqs.txt").write_text(
+ f'{project_dir} --config-settings "--build-option=--cffi" '
+ '--config-settings "--build-option=--avx2" '
+ "--config-settings FOO=BAR"
+ )
+ script.pip("install", "--no-index", "-f", str(a_sdist.parent), "-r", "reqs.txt")
+ script.assert_installed(foo="1.0")
+ config = script.site_packages_path / f"{name}-config.json"
+ with open(config, "rb") as f:
+ assert json.load(f) == {"--build-option": ["--cffi", "--avx2"], "FOO": "BAR"}
diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py
index 97ccfa911..637128274 100644
--- a/tests/functional/test_install.py
+++ b/tests/functional/test_install.py
@@ -1,3 +1,4 @@
+import hashlib
import os
import re
import ssl
@@ -13,6 +14,7 @@ import pytest
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.models.index import PyPI, TestPyPI
from pip._internal.utils.misc import rmtree
+from pip._internal.utils.urls import path_to_url
from tests.conftest import CertFactory
from tests.lib import (
PipTestEnvironment,
@@ -616,6 +618,159 @@ def test_hashed_install_failure(script: PipTestEnvironment, tmpdir: Path) -> Non
assert len(result.files_created) == 0
+def test_link_hash_pass_require_hashes(
+ script: PipTestEnvironment, shared_data: TestData
+) -> None:
+ """Test that a good hash in user provided direct URL is
+ considered valid for --require-hashes."""
+ url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz")))
+ url = (
+ f"{url}#sha256="
+ "393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653"
+ )
+ script.pip_install_local("--no-deps", "--require-hashes", url)
+
+
+def test_bad_link_hash_install_failure(
+ script: PipTestEnvironment, shared_data: TestData
+) -> None:
+ """Test that wrong hash in direct URL stops installation."""
+ url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz")))
+ url = f"{url}#sha256=invalidhash"
+ result = script.pip_install_local("--no-deps", url, expect_error=True)
+ assert "THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr
+
+
+def test_bad_link_hash_good_user_hash_install_success(
+ script: PipTestEnvironment, shared_data: TestData, tmp_path: Path
+) -> None:
+ """Test that wrong hash in direct URL ignored when good --hash provided.
+
+ This behaviour may be accidental?
+ """
+ url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz")))
+ url = f"{url}#sha256=invalidhash"
+ digest = "393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653"
+ with requirements_file(
+ f"simple @ {url} --hash sha256:{digest}", tmp_path
+ ) as reqs_file:
+ script.pip_install_local("--no-deps", "--require-hashes", "-r", reqs_file)
+
+
+def test_link_hash_in_dep_fails_require_hashes(
+ script: PipTestEnvironment, tmp_path: Path, shared_data: TestData
+) -> None:
+ """Test that a good hash in direct URL dependency is not considered
+ for --require-hashes."""
+ # Create a project named pkga that depends on the simple-1.0.tar.gz with a direct
+ # URL including a hash.
+ simple_url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz")))
+ simple_url_with_hash = (
+ f"{simple_url}#sha256="
+ "393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653"
+ )
+ project_path = tmp_path / "pkga"
+ project_path.mkdir()
+ project_path.joinpath("pyproject.toml").write_text(
+ textwrap.dedent(
+ f"""\
+ [project]
+ name = "pkga"
+ version = "1.0"
+ dependencies = ["simple @ {simple_url_with_hash}"]
+ """
+ )
+ )
+ # Build a wheel for pkga and compute its hash.
+ wheelhouse = tmp_path / "wheehouse"
+ wheelhouse.mkdir()
+ script.pip("wheel", "--no-deps", "-w", wheelhouse, project_path)
+ digest = hashlib.sha256(
+ wheelhouse.joinpath("pkga-1.0-py3-none-any.whl").read_bytes()
+ ).hexdigest()
+ # Install pkga from a requirements file with hash, using --require-hashes.
+ # This should fail because we have not provided a hash for the 'simple' dependency.
+ with requirements_file(f"pkga==1.0 --hash sha256:{digest}", tmp_path) as reqs_file:
+ result = script.pip(
+ "install",
+ "--no-build-isolation",
+ "--require-hashes",
+ "--no-index",
+ "-f",
+ wheelhouse,
+ "-r",
+ reqs_file,
+ expect_error=True,
+ )
+ assert "Hashes are required in --require-hashes mode" in result.stderr
+
+
+def test_bad_link_hash_in_dep_install_failure(
+ script: PipTestEnvironment, tmp_path: Path, shared_data: TestData
+) -> None:
+ """Test that wrong hash in direct URL dependency stops installation."""
+ url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz")))
+ url = f"{url}#sha256=invalidhash"
+ project_path = tmp_path / "pkga"
+ project_path.mkdir()
+ project_path.joinpath("pyproject.toml").write_text(
+ textwrap.dedent(
+ f"""\
+ [project]
+ name = "pkga"
+ version = "1.0"
+ dependencies = ["simple @ {url}"]
+ """
+ )
+ )
+ result = script.pip_install_local(
+ "--no-build-isolation", project_path, expect_error=True
+ )
+ assert "THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr, result.stderr
+
+
+def test_hashed_install_from_cache(
+ script: PipTestEnvironment, data: TestData, tmpdir: Path
+) -> None:
+ """
+ Test that installing from a cached built wheel works and that the hash is verified
+ against the hash of the original source archived stored in the cache entry.
+ """
+ with requirements_file(
+ "simple2==1.0 --hash=sha256:"
+ "9336af72ca661e6336eb87bc7de3e8844d853e3848c2b9bbd2e8bf01db88c2c7\n",
+ tmpdir,
+ ) as reqs_file:
+ result = script.pip_install_local(
+ "--use-pep517", "--no-build-isolation", "-r", reqs_file.resolve()
+ )
+ assert "Created wheel for simple2" in result.stdout
+ script.pip("uninstall", "simple2", "-y")
+ result = script.pip_install_local(
+ "--use-pep517", "--no-build-isolation", "-r", reqs_file.resolve()
+ )
+ assert "Using cached simple2" in result.stdout
+ # now try with an invalid hash
+ with requirements_file(
+ "simple2==1.0 --hash=sha256:invalid\n",
+ tmpdir,
+ ) as reqs_file:
+ script.pip("uninstall", "simple2", "-y")
+ result = script.pip_install_local(
+ "--use-pep517",
+ "--no-build-isolation",
+ "-r",
+ reqs_file.resolve(),
+ expect_error=True,
+ )
+ assert (
+ "WARNING: The hashes of the source archive found in cache entry "
+ "don't match, ignoring cached built wheel and re-downloading source."
+ ) in result.stderr
+ assert "Using cached simple2" in result.stdout
+ assert "ERROR: THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr
+
+
def assert_re_match(pattern: str, text: str) -> None:
assert re.search(pattern, text), f"Could not find {pattern!r} in {text!r}"
@@ -829,7 +984,10 @@ def test_install_global_option(script: PipTestEnvironment) -> None:
(In particular those that disable the actual install action)
"""
result = script.pip(
- "install", "--global-option=--version", "INITools==0.1", expect_stderr=True
+ "install",
+ "--global-option=--version",
+ "INITools==0.1",
+ expect_error=True, # build is going to fail because of --version
)
assert "INITools==0.1\n" in result.stdout
assert not result.files_created
@@ -1157,7 +1315,6 @@ def test_install_package_with_prefix(
rel_prefix_path = script.scratch / "prefix"
install_path = join(
sysconfig.get_path("purelib", vars={"base": rel_prefix_path}),
- # we still test for egg-info because no-binary implies setup.py install
"simple-1.0.dist-info",
)
result.did_create(install_path)
@@ -1498,15 +1655,12 @@ def test_install_subprocess_output_handling(
# This error is emitted 3 times:
# - by setup.py bdist_wheel
# - by setup.py clean
- # - by setup.py install which is used as fallback when setup.py bdist_wheel failed
- # Before, it failed only once because it attempted only setup.py install.
- # TODO update this when we remove the last setup.py install code path.
- assert 3 == result.stderr.count("I DIE, I DIE")
+ assert 2 == result.stderr.count("I DIE, I DIE")
result = script.pip(
*(args + ["--global-option=--fail", "--verbose"]), expect_error=True
)
- assert 3 == result.stderr.count("I DIE, I DIE")
+ assert 2 == result.stderr.count("I DIE, I DIE")
def test_install_log(script: PipTestEnvironment, data: TestData, tmpdir: Path) -> None:
@@ -1526,22 +1680,9 @@ def test_install_topological_sort(script: PipTestEnvironment, data: TestData) ->
assert order1 in res or order2 in res, res
-def test_install_wheel_broken(script: PipTestEnvironment) -> None:
- res = script.pip_install_local("wheelbroken", allow_stderr_error=True)
- assert "ERROR: Failed building wheel for wheelbroken" in res.stderr
- # Fallback to setup.py install (https://github.com/pypa/pip/issues/8368)
- assert "Successfully installed wheelbroken-0.1" in str(res), str(res)
-
-
def test_cleanup_after_failed_wheel(script: PipTestEnvironment) -> None:
- res = script.pip_install_local("wheelbrokenafter", allow_stderr_error=True)
+ res = script.pip_install_local("wheelbrokenafter", expect_error=True)
assert "ERROR: Failed building wheel for wheelbrokenafter" in res.stderr
- # One of the effects of not cleaning up is broken scripts:
- script_py = script.bin_path / "script.py"
- assert script_py.exists(), script_py
- with open(script_py) as f:
- shebang = f.readline().strip()
- assert shebang != "#!python", shebang
# OK, assert that we *said* we were cleaning up:
# /!\ if in need to change this, also change test_pep517_no_legacy_cleanup
assert "Running setup.py clean for wheelbrokenafter" in str(res), str(res)
@@ -1568,38 +1709,26 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No
"-f",
data.find_links,
to_install,
- allow_stderr_error=True, # error building wheelbroken
+ expect_error=True, # error building wheelbroken
)
- expected = (
- "Successfully installed requires-wheelbroken-upper-0"
- " upper-2.0 wheelbroken-0.1"
- )
- # Must have installed it all
- assert expected in str(res), str(res)
wheels: List[str] = []
for _, _, files in os.walk(wheels_cache):
wheels.extend(f for f in files if f.endswith(".whl"))
- # and built wheels for upper and wheelbroken
+ # Built wheel for upper
assert "Building wheel for upper" in str(res), str(res)
+ # Built wheel for wheelbroken, but failed
assert "Building wheel for wheelb" in str(res), str(res)
+ assert "Failed to build wheelbroken" in str(res), str(res)
# Wheels are built for local directories, but not cached.
assert "Building wheel for requir" in str(res), str(res)
- # wheelbroken has to run install
# into the cache
assert wheels != [], str(res)
- # and installed from the wheel
- assert "Running setup.py install for upper" not in str(res), str(res)
- # Wheels are built for local directories, but not cached.
- assert "Running setup.py install for requir" not in str(res), str(res)
- # wheelbroken has to run install
- assert "Running setup.py install for wheelb" in str(res), str(res)
- # We want to make sure pure python wheels do not have an implementation tag
assert wheels == [
"Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]),
]
-def test_install_no_binary_disables_building_wheels(
+def test_install_no_binary_builds_wheels(
script: PipTestEnvironment, data: TestData
) -> None:
to_install = data.packages.joinpath("requires_wheelbroken_upper")
@@ -1610,22 +1739,14 @@ def test_install_no_binary_disables_building_wheels(
"-f",
data.find_links,
to_install,
- allow_stderr_error=True, # error building wheelbroken
- )
- expected = (
- "Successfully installed requires-wheelbroken-upper-0"
- " upper-2.0 wheelbroken-0.1"
+ expect_error=True, # error building wheelbroken
)
- # Must have installed it all
- assert expected in str(res), str(res)
- # and built wheels for wheelbroken only
+ # Wheels are built for all requirements
assert "Building wheel for wheelb" in str(res), str(res)
- # Wheels are built for local directories, but not cached across runs
assert "Building wheel for requir" in str(res), str(res)
- # Don't build wheel for upper which was blacklisted
assert "Building wheel for upper" in str(res), str(res)
- # And these two fell back to sdist based installed.
- assert "Running setup.py install for wheelb" in str(res), str(res)
+ # Wheelbroken failed to build
+ assert "Failed to build wheelbroken" in str(res), str(res)
@pytest.mark.network
@@ -1639,7 +1760,6 @@ def test_install_no_binary_builds_pep_517_wheel(
assert expected in str(res), str(res)
assert "Building wheel for pep517-setup" in str(res), str(res)
- assert "Running setup.py install for pep517-set" not in str(res), str(res)
@pytest.mark.network
@@ -1656,7 +1776,7 @@ def test_install_no_binary_uses_local_backend(
assert os.path.isfile(marker), "Local PEP 517 backend not used"
-def test_install_no_binary_disables_cached_wheels(
+def test_install_no_binary_uses_cached_wheels(
script: PipTestEnvironment, data: TestData
) -> None:
# Seed the cache
@@ -1673,7 +1793,7 @@ def test_install_no_binary_disables_cached_wheels(
)
assert "Successfully installed upper-2.0" in str(res), str(res)
# upper is built and not obtained from cache
- assert "Building wheel for upper" in str(res), str(res)
+ assert "Building wheel for upper" not in str(res), str(res)
def test_install_editable_with_wrong_egg_name(
@@ -2217,7 +2337,6 @@ def test_install_skip_work_dir_pkg(script: PipTestEnvironment, data: TestData) -
def test_install_verify_package_name_normalization(
script: PipTestEnvironment, package_name: str
) -> None:
-
"""
Test that install of a package again using a name which
normalizes to the original package name, is a no-op
diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py
index 563b5604a..9f8a80677 100644
--- a/tests/functional/test_install_config.py
+++ b/tests/functional/test_install_config.py
@@ -370,7 +370,7 @@ def auth_needed(request: pytest.FixtureRequest) -> bool:
return request.param
-@pytest.fixture(params=("disabled", "import", "subprocess", "auto"))
+@pytest.fixture(params=(None, "disabled", "import", "subprocess", "auto"))
def keyring_provider(request: pytest.FixtureRequest) -> str:
return request.param
@@ -389,17 +389,20 @@ def flags(
keyring_provider_implementation: str,
) -> List[str]:
if (
- keyring_provider != "auto"
+ keyring_provider not in [None, "auto"]
and keyring_provider_implementation != keyring_provider
):
pytest.skip()
- flags = ["--keyring-provider", keyring_provider]
+ flags = []
+ if keyring_provider is not None:
+ flags.append("--keyring-provider")
+ flags.append(keyring_provider)
if not interactive:
flags.append("--no-input")
if auth_needed:
if keyring_provider_implementation == "disabled" or (
- not interactive and keyring_provider == "auto"
+ not interactive and keyring_provider in [None, "auto"]
):
request.applymarker(pytest.mark.xfail())
return flags
@@ -441,7 +444,10 @@ def test_prompt_for_keyring_if_needed(
virtualenv = virtualenv_factory(workspace.joinpath("venv"))
script = script_factory(workspace.joinpath("venv"), virtualenv, environ=environ)
- if keyring_provider != "auto" or keyring_provider_implementation != "subprocess":
+ if (
+ keyring_provider not in [None, "auto"]
+ or keyring_provider_implementation != "subprocess"
+ ):
script.pip(
"install",
"keyring",
diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py
index 83f5b5c2c..003b29d38 100644
--- a/tests/functional/test_install_report.py
+++ b/tests/functional/test_install_report.py
@@ -1,4 +1,5 @@
import json
+import textwrap
from pathlib import Path
from typing import Any, Dict
@@ -93,6 +94,39 @@ def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> Non
@pytest.mark.network
+def test_install_report_direct_archive(
+ script: PipTestEnvironment, tmp_path: Path, shared_data: TestData
+) -> None:
+ """Test report for direct URL archive."""
+ report_path = tmp_path / "report.json"
+ script.pip(
+ "install",
+ str(shared_data.root / "packages" / "simplewheel-1.0-py2.py3-none-any.whl"),
+ "--dry-run",
+ "--no-index",
+ "--report",
+ str(report_path),
+ )
+ report = json.loads(report_path.read_text())
+ assert "install" in report
+ assert len(report["install"]) == 1
+ simplewheel_report = _install_dict(report)["simplewheel"]
+ assert simplewheel_report["metadata"]["name"] == "simplewheel"
+ assert simplewheel_report["requested"] is True
+ assert simplewheel_report["is_direct"] is True
+ url = simplewheel_report["download_info"]["url"]
+ assert url.startswith("file://")
+ assert url.endswith("/packages/simplewheel-1.0-py2.py3-none-any.whl")
+ assert (
+ simplewheel_report["download_info"]["archive_info"]["hash"]
+ == "sha256=e63aa139caee941ec7f33f057a5b987708c2128238357cf905429846a2008718"
+ )
+ assert simplewheel_report["download_info"]["archive_info"]["hashes"] == {
+ "sha256": "e63aa139caee941ec7f33f057a5b987708c2128238357cf905429846a2008718"
+ }
+
+
+@pytest.mark.network
def test_install_report_vcs_and_wheel_cache(
script: PipTestEnvironment, tmp_path: Path
) -> None:
@@ -178,6 +212,97 @@ def test_install_report_vcs_editable(
assert pip_test_package_report["download_info"]["dir_info"]["editable"] is True
+@pytest.mark.network
+def test_install_report_local_path_with_extras(
+ script: PipTestEnvironment, tmp_path: Path, shared_data: TestData
+) -> None:
+ """Test report remote editable."""
+ project_path = tmp_path / "pkga"
+ project_path.mkdir()
+ project_path.joinpath("pyproject.toml").write_text(
+ textwrap.dedent(
+ """\
+ [project]
+ name = "pkga"
+ version = "1.0"
+
+ [project.optional-dependencies]
+ test = ["simple"]
+ """
+ )
+ )
+ report_path = tmp_path / "report.json"
+ script.pip(
+ "install",
+ "--dry-run",
+ "--no-build-isolation",
+ "--no-index",
+ "--find-links",
+ str(shared_data.root / "packages/"),
+ "--report",
+ str(report_path),
+ str(project_path) + "[test]",
+ )
+ report = json.loads(report_path.read_text())
+ assert len(report["install"]) == 2
+ pkga_report = report["install"][0]
+ assert pkga_report["metadata"]["name"] == "pkga"
+ assert pkga_report["is_direct"] is True
+ assert pkga_report["requested"] is True
+ assert pkga_report["requested_extras"] == ["test"]
+ simple_report = report["install"][1]
+ assert simple_report["metadata"]["name"] == "simple"
+ assert simple_report["is_direct"] is False
+ assert simple_report["requested"] is False
+ assert "requested_extras" not in simple_report
+
+
+@pytest.mark.network
+def test_install_report_editable_local_path_with_extras(
+ script: PipTestEnvironment, tmp_path: Path, shared_data: TestData
+) -> None:
+ """Test report remote editable."""
+ project_path = tmp_path / "pkga"
+ project_path.mkdir()
+ project_path.joinpath("pyproject.toml").write_text(
+ textwrap.dedent(
+ """\
+ [project]
+ name = "pkga"
+ version = "1.0"
+
+ [project.optional-dependencies]
+ test = ["simple"]
+ """
+ )
+ )
+ report_path = tmp_path / "report.json"
+ script.pip(
+ "install",
+ "--dry-run",
+ "--no-build-isolation",
+ "--no-index",
+ "--find-links",
+ str(shared_data.root / "packages/"),
+ "--report",
+ str(report_path),
+ "--editable",
+ str(project_path) + "[test]",
+ )
+ report = json.loads(report_path.read_text())
+ assert len(report["install"]) == 2
+ pkga_report = report["install"][0]
+ assert pkga_report["metadata"]["name"] == "pkga"
+ assert pkga_report["is_direct"] is True
+ assert pkga_report["requested"] is True
+ assert pkga_report["requested_extras"] == ["test"]
+ simple_report = report["install"][1]
+ assert simple_report["metadata"]["name"] == "simple"
+ assert simple_report["is_direct"] is False
+ assert simple_report["requested"] is False
+ assert "requested_extras" not in simple_report
+
+
def test_install_report_to_stdout(
script: PipTestEnvironment, shared_data: TestData
) -> None:
diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py
index 3ad953481..96cff0dc5 100644
--- a/tests/functional/test_install_reqs.py
+++ b/tests/functional/test_install_reqs.py
@@ -2,7 +2,7 @@ import json
import os
import textwrap
from pathlib import Path
-from typing import Any, Callable
+from typing import TYPE_CHECKING, Any
import pytest
@@ -18,6 +18,11 @@ from tests.lib import (
)
from tests.lib.local_repos import local_checkout
+if TYPE_CHECKING:
+ from typing import Protocol
+else:
+ Protocol = object
+
class ArgRecordingSdist:
def __init__(self, sdist_path: Path, args_path: Path) -> None:
@@ -28,33 +33,42 @@ class ArgRecordingSdist:
return json.loads(self._args_path.read_text())
+class ArgRecordingSdistMaker(Protocol):
+ def __call__(self, name: str, **kwargs: Any) -> ArgRecordingSdist:
+ ...
+
+
@pytest.fixture()
def arg_recording_sdist_maker(
script: PipTestEnvironment,
-) -> Callable[[str], ArgRecordingSdist]:
- arg_writing_setup_py = textwrap.dedent(
+) -> ArgRecordingSdistMaker:
+ arg_writing_setup_py_prelude = textwrap.dedent(
"""
import io
import json
import os
import sys
- from setuptools import setup
-
args_path = os.path.join(os.environ["OUTPUT_DIR"], "{name}.json")
with open(args_path, 'w') as f:
json.dump(sys.argv, f)
-
- setup(name={name!r}, version="0.1.0")
"""
)
output_dir = script.scratch_path.joinpath("args_recording_sdist_maker_output")
output_dir.mkdir(parents=True)
script.environ["OUTPUT_DIR"] = str(output_dir)
- def _arg_recording_sdist_maker(name: str) -> ArgRecordingSdist:
- extra_files = {"setup.py": arg_writing_setup_py.format(name=name)}
- sdist_path = create_basic_sdist_for_package(script, name, "0.1.0", extra_files)
+ def _arg_recording_sdist_maker(
+ name: str,
+ **kwargs: Any,
+ ) -> ArgRecordingSdist:
+ sdist_path = create_basic_sdist_for_package(
+ script,
+ name,
+ "0.1.0",
+ setup_py_prelude=arg_writing_setup_py_prelude.format(name=name),
+ **kwargs,
+ )
args_path = output_dir / f"{name}.json"
return ArgRecordingSdist(sdist_path, args_path)
@@ -325,7 +339,6 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg(
"install", "--user", "--no-index", "-f", data.find_links, "requiresupper"
)
# Check that we are really installing a wheel
- assert "Running setup.py install for requiresupper" not in result.stdout
assert "installed requiresupper" in result.stdout
@@ -647,7 +660,7 @@ def test_install_distribution_union_with_constraints(
msg = "Unnamed requirements are not allowed as constraints"
assert msg in result.stderr
else:
- assert "Running setup.py install for LocalExtras" in result.stdout
+ assert "Building wheel for LocalExtras" in result.stdout
result.did_create(script.site_packages / "singlemodule.py")
@@ -728,3 +741,79 @@ def test_install_unsupported_wheel_file(
in result.stderr
)
assert len(result.files_created) == 0
+
+
+def test_config_settings_local_to_package(
+ script: PipTestEnvironment,
+ common_wheels: Path,
+ arg_recording_sdist_maker: ArgRecordingSdistMaker,
+) -> None:
+ pyproject_toml = textwrap.dedent(
+ """
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+ """
+ )
+ simple0_sdist = arg_recording_sdist_maker(
+ "simple0",
+ extra_files={"pyproject.toml": pyproject_toml},
+ depends=["foo"],
+ )
+ foo_sdist = arg_recording_sdist_maker(
+ "foo",
+ extra_files={"pyproject.toml": pyproject_toml},
+ )
+ simple1_sdist = arg_recording_sdist_maker(
+ "simple1",
+ extra_files={"pyproject.toml": pyproject_toml},
+ depends=["bar"],
+ )
+ bar_sdist = arg_recording_sdist_maker(
+ "bar",
+ extra_files={"pyproject.toml": pyproject_toml},
+ depends=["simple3"],
+ )
+ simple3_sdist = arg_recording_sdist_maker(
+ "simple3", extra_files={"pyproject.toml": pyproject_toml}
+ )
+ simple2_sdist = arg_recording_sdist_maker(
+ "simple2",
+ extra_files={"pyproject.toml": pyproject_toml},
+ )
+
+ reqs_file = script.scratch_path.joinpath("reqs.txt")
+ reqs_file.write_text(
+ textwrap.dedent(
+ """
+ simple0 --config-settings "--build-option=--verbose"
+ foo --config-settings "--build-option=--quiet"
+ simple1 --config-settings "--build-option=--verbose"
+ simple2
+ """
+ )
+ )
+
+ script.pip(
+ "install",
+ "--no-index",
+ "-f",
+ script.scratch_path,
+ "-f",
+ common_wheels,
+ "-r",
+ reqs_file,
+ )
+
+ simple0_args = simple0_sdist.args()
+ assert "--verbose" in simple0_args
+ foo_args = foo_sdist.args()
+ assert "--quiet" in foo_args
+ simple1_args = simple1_sdist.args()
+ assert "--verbose" in simple1_args
+ bar_args = bar_sdist.args()
+ assert "--verbose" not in bar_args
+ simple3_args = simple3_sdist.args()
+ assert "--verbose" not in simple3_args
+ simple2_args = simple2_sdist.args()
+ assert "--verbose" not in simple2_args
diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py
index d7e8c2602..971526c51 100644
--- a/tests/functional/test_install_vcs_git.py
+++ b/tests/functional/test_install_vcs_git.py
@@ -392,7 +392,7 @@ def test_git_with_non_editable_unpacking(
)
result = script.pip(
"install",
- "--global-option=--version",
+ "--global-option=--quiet",
local_url,
allow_stderr_warning=True,
)
diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py
index a7e9022a5..b8ec0510a 100644
--- a/tests/functional/test_show.py
+++ b/tests/functional/test_show.py
@@ -1,14 +1,17 @@
import os
import pathlib
import re
+import textwrap
from pip import __version__
from pip._internal.commands.show import search_packages_info
-from pip._internal.operations.install.legacy import (
- write_installed_files_from_setuptools_record,
-)
from pip._internal.utils.unpacking import untar_file
-from tests.lib import PipTestEnvironment, TestData, create_test_package_with_setup
+from tests.lib import (
+ PipTestEnvironment,
+ TestData,
+ create_test_package_with_setup,
+ pyversion,
+)
def test_basic_show(script: PipTestEnvironment) -> None:
@@ -77,10 +80,19 @@ def test_show_with_files_from_legacy(
str(setuptools_record),
cwd=source_dir,
)
- write_installed_files_from_setuptools_record(
- setuptools_record.read_text().splitlines(),
- root=None,
- req_description="simple==1.0",
+ # Emulate the installed-files.txt generation which previous pip version did
+ # after running setup.py install (write_installed_files_from_setuptools_record).
+ egg_info_dir = script.site_packages_path / f"simple-1.0-py{pyversion}.egg-info"
+ egg_info_dir.joinpath("installed-files.txt").write_text(
+ textwrap.dedent(
+ """\
+ ../simple/__init__.py
+ PKG-INFO
+ SOURCES.txt
+ dependency_links.txt
+ top_level.txt
+ """
+ )
)
result = script.pip("show", "--files", "simple")
diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py
index 71a50fca6..daec5fc6c 100644
--- a/tests/unit/test_base_command.py
+++ b/tests/unit/test_base_command.py
@@ -22,7 +22,6 @@ def fixed_time() -> Iterator[None]:
class FakeCommand(Command):
-
_name = "fake"
def __init__(
diff --git a/tests/unit/test_cache.py b/tests/unit/test_cache.py
index f27daa266..d0fee69c3 100644
--- a/tests/unit/test_cache.py
+++ b/tests/unit/test_cache.py
@@ -4,13 +4,12 @@ from pathlib import Path
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._internal.cache import WheelCache, _hash_dict
-from pip._internal.models.format_control import FormatControl
from pip._internal.models.link import Link
from pip._internal.utils.misc import ensure_dir
def test_falsey_path_none() -> None:
- wc = WheelCache("", FormatControl())
+ wc = WheelCache("")
assert wc.cache_dir is None
@@ -18,7 +17,7 @@ def test_subdirectory_fragment() -> None:
"""
Test the subdirectory URL fragment is part of the cache key.
"""
- wc = WheelCache("/tmp/.foo/", FormatControl())
+ wc = WheelCache("/tmp/.foo/")
link1 = Link("git+https://g.c/o/r#subdirectory=d1")
link2 = Link("git+https://g.c/o/r#subdirectory=d2")
assert wc.get_path_for_link(link1) != wc.get_path_for_link(link2)
@@ -29,7 +28,7 @@ def test_wheel_name_filter(tmpdir: Path) -> None:
Test the wheel cache filters on wheel name when several wheels
for different package are stored under the same cache directory.
"""
- wc = WheelCache(os.fspath(tmpdir), FormatControl())
+ wc = WheelCache(os.fspath(tmpdir))
link = Link("https://g.c/package.tar.gz")
cache_path = wc.get_path_for_link(link)
ensure_dir(cache_path)
@@ -57,7 +56,7 @@ def test_link_to_cache(tmpdir: Path) -> None:
Test that Link.from_json() produces Links with consistent cache
locations
"""
- wc = WheelCache(os.fspath(tmpdir), FormatControl())
+ wc = WheelCache(os.fspath(tmpdir))
# Define our expectations for stable cache path.
i_name = interpreter_name()
i_version = interpreter_version()
@@ -95,7 +94,7 @@ def test_link_to_cache(tmpdir: Path) -> None:
def test_get_cache_entry(tmpdir: Path) -> None:
- wc = WheelCache(os.fspath(tmpdir), FormatControl())
+ wc = WheelCache(os.fspath(tmpdir))
persi_link = Link("https://g.c/o/r/persi")
persi_path = wc.get_path_for_link(persi_link)
ensure_dir(persi_path)
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index 47307c00e..e855d78e1 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -1014,6 +1014,7 @@ def test_link_collector_create_find_links_expansion(
"""
Test "~" expansion in --find-links paths.
"""
+
# This is a mock version of expanduser() that expands "~" to the tmpdir.
def expand_path(path: str) -> str:
if path.startswith("~/"):
@@ -1051,6 +1052,21 @@ def test_link_collector_create_find_links_expansion(
LinkHash("sha256", "aa113592bbe"),
),
(
+ "https://pypi.org/pip-18.0.tar.gz#sha256=aa113592bbe&subdirectory=setup",
+ LinkHash("sha256", "aa113592bbe"),
+ ),
+ (
+ "https://pypi.org/pip-18.0.tar.gz#subdirectory=setup&sha256=aa113592bbe",
+ LinkHash("sha256", "aa113592bbe"),
+ ),
+ # "xsha256" is not a valid algorithm, so we discard it.
+ ("https://pypi.org/pip-18.0.tar.gz#xsha256=aa113592bbe", None),
+ # Empty hash.
+ (
+ "https://pypi.org/pip-18.0.tar.gz#sha256=",
+ LinkHash("sha256", ""),
+ ),
+ (
"https://pypi.org/pip-18.0.tar.gz#md5=aa113592bbe",
LinkHash("md5", "aa113592bbe"),
),
@@ -1060,4 +1076,21 @@ def test_link_collector_create_find_links_expansion(
],
)
def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None:
- assert LinkHash.split_hash_name_and_value(url) == result
+ assert LinkHash.find_hash_url_fragment(url) == result
+
+
+@pytest.mark.parametrize(
+ "dist_info_metadata, result",
+ [
+ ("sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe")),
+ ("sha256=", LinkHash("sha256", "")),
+ ("sha500=aa113592bbe", None),
+ ("true", None),
+ ("", None),
+ ("aa113592bbe", None),
+ ],
+)
+def test_pep658_hash_parsing(
+ dist_info_metadata: str, result: Optional[LinkHash]
+) -> None:
+ assert LinkHash.parse_pep658_hash(dist_info_metadata) == result
diff --git a/tests/unit/test_direct_url.py b/tests/unit/test_direct_url.py
index 3ca982b50..151e0a30f 100644
--- a/tests/unit/test_direct_url.py
+++ b/tests/unit/test_direct_url.py
@@ -140,3 +140,33 @@ def test_redact_url() -> None:
== "https://${PIP_TOKEN}@g.c/u/p.git"
)
assert _redact_git("ssh://git@g.c/u/p.git") == "ssh://git@g.c/u/p.git"
+
+
+def test_hash_to_hashes() -> None:
+ direct_url = DirectUrl(url="https://e.c/archive.tar.gz", info=ArchiveInfo())
+ assert isinstance(direct_url.info, ArchiveInfo)
+ direct_url.info.hash = "sha256=abcdef"
+ assert direct_url.info.hashes == {"sha256": "abcdef"}
+
+
+def test_hash_to_hashes_constructor() -> None:
+ direct_url = DirectUrl(
+ url="https://e.c/archive.tar.gz", info=ArchiveInfo(hash="sha256=abcdef")
+ )
+ assert isinstance(direct_url.info, ArchiveInfo)
+ assert direct_url.info.hashes == {"sha256": "abcdef"}
+ direct_url = DirectUrl(
+ url="https://e.c/archive.tar.gz",
+ info=ArchiveInfo(hash="sha256=abcdef", hashes={"sha512": "123456"}),
+ )
+ assert isinstance(direct_url.info, ArchiveInfo)
+ assert direct_url.info.hashes == {"sha256": "abcdef", "sha512": "123456"}
+ # In case of conflict between hash and hashes, hashes wins.
+ direct_url = DirectUrl(
+ url="https://e.c/archive.tar.gz",
+ info=ArchiveInfo(
+ hash="sha256=abcdef", hashes={"sha256": "012345", "sha512": "123456"}
+ ),
+ )
+ assert isinstance(direct_url.info, ArchiveInfo)
+ assert direct_url.info.hashes == {"sha256": "012345", "sha512": "123456"}
diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py
index df4957d59..311be5888 100644
--- a/tests/unit/test_link.py
+++ b/tests/unit/test_link.py
@@ -108,7 +108,7 @@ class TestLink:
)
def test_invalid_egg_fragments(self, fragment: str) -> None:
url = f"git+https://example.com/package#egg={fragment}"
- with pytest.raises(Exception):
+ with pytest.raises(ValueError):
Link(url)
@pytest.mark.parametrize(
diff --git a/tests/unit/test_network_cache.py b/tests/unit/test_network_cache.py
index 8764b1343..a5519864f 100644
--- a/tests/unit/test_network_cache.py
+++ b/tests/unit/test_network_cache.py
@@ -24,7 +24,6 @@ class TestSafeFileCache:
"""
def test_cache_roundtrip(self, cache_tmpdir: Path) -> None:
-
cache = SafeFileCache(os.fspath(cache_tmpdir))
assert cache.get("test key") is None
cache.set("test key", b"a test string")
diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py
index 39396512a..9e3a0a5d6 100644
--- a/tests/unit/test_options.py
+++ b/tests/unit/test_options.py
@@ -454,7 +454,6 @@ class TestCountOptions(AddFakeCommandMixin):
class TestGeneralOptions(AddFakeCommandMixin):
-
# the reason to specifically test general options is due to the
# extra processing they receive, and the number of bugs we've had
diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py
index c46883dc2..c9742812b 100644
--- a/tests/unit/test_req.py
+++ b/tests/unit/test_req.py
@@ -25,7 +25,6 @@ from pip._internal.exceptions import (
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import select_backend
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
-from pip._internal.models.format_control import FormatControl
from pip._internal.models.link import Link
from pip._internal.network.session import PipSession
from pip._internal.operations.build.build_tracker import get_build_tracker
@@ -403,7 +402,7 @@ class TestRequirementSet:
"""Test download_info hash is not set for an archive with legacy cache entry."""
url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri()
finder = make_test_finder()
- wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl())
+ wheel_cache = WheelCache(str(tmp_path / "cache"))
cache_entry_dir = wheel_cache.get_path_for_link(Link(url))
Path(cache_entry_dir).mkdir(parents=True)
wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir)
@@ -412,7 +411,8 @@ class TestRequirementSet:
reqset = resolver.resolve([ireq], True)
assert len(reqset.all_requirements) == 1
req = reqset.all_requirements[0]
- assert req.original_link_is_in_wheel_cache
+ assert req.is_wheel_from_cache
+ assert req.cached_wheel_source_link
assert req.download_info
assert req.download_info.url == url
assert isinstance(req.download_info.info, ArchiveInfo)
@@ -426,7 +426,7 @@ class TestRequirementSet:
url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri()
hash = "sha256=ad977496000576e1b6c41f6449a9897087ce9da6db4f15b603fe8372af4bf3c6"
finder = make_test_finder()
- wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl())
+ wheel_cache = WheelCache(str(tmp_path / "cache"))
cache_entry_dir = wheel_cache.get_path_for_link(Link(url))
Path(cache_entry_dir).mkdir(parents=True)
Path(cache_entry_dir).joinpath("origin.json").write_text(
@@ -438,7 +438,8 @@ class TestRequirementSet:
reqset = resolver.resolve([ireq], True)
assert len(reqset.all_requirements) == 1
req = reqset.all_requirements[0]
- assert req.original_link_is_in_wheel_cache
+ assert req.is_wheel_from_cache
+ assert req.cached_wheel_source_link
assert req.download_info
assert req.download_info.url == url
assert isinstance(req.download_info.info, ArchiveInfo)
diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py
index 30cbcf71c..439c41563 100644
--- a/tests/unit/test_req_file.py
+++ b/tests/unit/test_req_file.py
@@ -1,7 +1,6 @@
import collections
import logging
import os
-import subprocess
import textwrap
from optparse import Values
from pathlib import Path
@@ -74,7 +73,13 @@ def parse_reqfile(
options=options,
constraint=constraint,
):
- yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)
+ yield install_req_from_parsed_requirement(
+ parsed_req,
+ isolated=isolated,
+ config_settings=parsed_req.options.get("config_settings")
+ if parsed_req.options
+ else None,
+ )
def test_read_file_url(tmp_path: Path, session: PipSession) -> None:
@@ -344,10 +349,14 @@ class TestProcessLine:
assert reqs[0].constraint
def test_options_on_a_requirement_line(self, line_processor: LineProcessor) -> None:
- line = 'SomeProject --global-option="yo3" --global-option "yo4"'
+ line = (
+ 'SomeProject --global-option="yo3" --global-option "yo4" '
+ '--config-settings="yo3=yo4" --config-settings "yo1=yo2"'
+ )
filename = "filename"
req = line_processor(line, filename, 1)[0]
assert req.global_options == ["yo3", "yo4"]
+ assert req.config_settings == {"yo3": "yo4", "yo1": "yo2"}
def test_hash_options(self, line_processor: LineProcessor) -> None:
"""Test the --hash option: mostly its value storage.
@@ -613,7 +622,6 @@ class TestBreakOptionsArgs:
class TestOptionVariants:
-
# this suite is really just testing optparse, but added it anyway
def test_variant1(
@@ -881,14 +889,4 @@ class TestParseRequirements:
)
)
- req.source_dir = os.curdir
- with mock.patch.object(subprocess, "Popen") as popen:
- popen.return_value.stdout.readline.return_value = b""
- try:
- req.install([])
- except Exception:
- pass
-
- last_call = popen.call_args_list[-1]
- args = last_call[0][0]
- assert 0 < args.index(global_option) < args.index("install")
+ assert req.global_options == [global_option]
diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py
index 4d99acfd3..b4ae97350 100644
--- a/tests/unit/test_req_uninstall.py
+++ b/tests/unit/test_req_uninstall.py
@@ -21,7 +21,7 @@ from tests.lib import create_file
# Pretend all files are local, so UninstallPathSet accepts files in the tmpdir,
# outside the virtualenv
-def mock_is_local(path: str) -> bool:
+def mock_permitted(ups: UninstallPathSet, path: str) -> bool:
return True
@@ -129,7 +129,11 @@ def test_compressed_listing(tmpdir: Path) -> None:
class TestUninstallPathSet:
def test_add(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local)
+ monkeypatch.setattr(
+ pip._internal.req.req_uninstall.UninstallPathSet,
+ "_permitted",
+ mock_permitted,
+ )
# Fix case for windows tests
file_extant = os.path.normcase(os.path.join(tmpdir, "foo"))
file_nonexistent = os.path.normcase(os.path.join(tmpdir, "nonexistent"))
@@ -145,7 +149,11 @@ class TestUninstallPathSet:
assert ups._paths == {file_extant}
def test_add_pth(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local)
+ monkeypatch.setattr(
+ pip._internal.req.req_uninstall.UninstallPathSet,
+ "_permitted",
+ mock_permitted,
+ )
# Fix case for windows tests
tmpdir = os.path.normcase(tmp_path)
on_windows = sys.platform == "win32"
@@ -175,7 +183,11 @@ class TestUninstallPathSet:
@pytest.mark.skipif("sys.platform == 'win32'")
def test_add_symlink(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local)
+ monkeypatch.setattr(
+ pip._internal.req.req_uninstall.UninstallPathSet,
+ "_permitted",
+ mock_permitted,
+ )
f = os.path.join(tmpdir, "foo")
with open(f, "w"):
pass
@@ -187,7 +199,11 @@ class TestUninstallPathSet:
assert ups._paths == {foo_link}
def test_compact_shorter_path(self, monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local)
+ monkeypatch.setattr(
+ pip._internal.req.req_uninstall.UninstallPathSet,
+ "_permitted",
+ mock_permitted,
+ )
monkeypatch.setattr("os.path.exists", lambda p: True)
# This deals with nt/posix path differences
short_path = os.path.normcase(
@@ -202,7 +218,11 @@ class TestUninstallPathSet:
def test_detect_symlink_dirs(
self, monkeypatch: pytest.MonkeyPatch, tmpdir: Path
) -> None:
- monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local)
+ monkeypatch.setattr(
+ pip._internal.req.req_uninstall.UninstallPathSet,
+ "_permitted",
+ mock_permitted,
+ )
# construct 2 paths:
# tmpdir/dir/file
diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py
index 1daaecbf4..450081cfd 100644
--- a/tests/unit/test_utils.py
+++ b/tests/unit/test_utils.py
@@ -53,7 +53,6 @@ class Tests_EgglinkPath:
"util.egg_link_path_from_location() tests"
def setup_method(self) -> None:
-
project = "foo"
self.mock_dist = Mock(project_name=project)
@@ -426,6 +425,14 @@ class TestHashes:
cache[Hashes({"sha256": ["ab", "cd"]})] = 42
assert cache[Hashes({"sha256": ["ab", "cd"]})] == 42
+ def test_has_one_of(self) -> None:
+ hashes = Hashes({"sha256": ["abcd", "efgh"], "sha384": ["ijkl"]})
+ assert hashes.has_one_of({"sha256": "abcd"})
+ assert hashes.has_one_of({"sha256": "efgh"})
+ assert not hashes.has_one_of({"sha256": "xyzt"})
+ empty_hashes = Hashes()
+ assert not empty_hashes.has_one_of({"sha256": "xyzt"})
+
class TestEncoding:
"""Tests for pip._internal.utils.encoding"""
diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py
index c5a8f3be4..6d6d1a3dc 100644
--- a/tests/unit/test_wheel.py
+++ b/tests/unit/test_wheel.py
@@ -518,7 +518,6 @@ class TestInstallUnpackedWheel:
class TestMessageAboutScriptsNotOnPATH:
-
tilde_warning_msg = (
"NOTE: The current PATH contains path(s) starting with `~`, "
"which may not be expanded by all applications."
@@ -589,6 +588,12 @@ class TestMessageAboutScriptsNotOnPATH:
)
assert retval is None
+ def test_PATH_check_path_normalization(self) -> None:
+ retval = self._template(
+ paths=["/a/./b/../b//c/", "/d/e/bin"], scripts=["/a/b/c/foo"]
+ )
+ assert retval is None
+
def test_single_script__single_dir_on_PATH(self) -> None:
retval = self._template(paths=["/a/b", "/c/d/bin"], scripts=["/a/b/foo"])
assert retval is None
diff --git a/tools/vendoring/patches/pkg_resources.patch b/tools/vendoring/patches/pkg_resources.patch
index 39bb2eac2..48ae95431 100644
--- a/tools/vendoring/patches/pkg_resources.patch
+++ b/tools/vendoring/patches/pkg_resources.patch
@@ -1,26 +1,4 @@
diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py
-index d59226af9..3b9565893 100644
---- a/src/pip/_vendor/pkg_resources/__init__.py
-+++ b/src/pip/_vendor/pkg_resources/__init__.py
-@@ -77,7 +77,7 @@
- join_continuation,
- )
-
--from pkg_resources.extern import appdirs
-+from pkg_resources.extern import platformdirs
- from pkg_resources.extern import packaging
- __import__('pkg_resources.extern.packaging.version')
- __import__('pkg_resources.extern.packaging.specifiers')
-@@ -1321,7 +1321,7 @@ def get_default_cache():
- """
- return (
- os.environ.get('PYTHON_EGG_CACHE')
-- or appdirs.user_cache_dir(appname='Python-Eggs')
-+ or platformdirs.user_cache_dir(appname='Python-Eggs')
- )
-
-
-diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py
index 3f2476a0c..8d5727d35 100644
--- a/src/pip/_vendor/pkg_resources/__init__.py
+++ b/src/pip/_vendor/pkg_resources/__init__.py
diff --git a/tools/vendoring/patches/pygments.patch b/tools/vendoring/patches/pygments.patch
index 3cabf9d6d..035c7dcae 100644
--- a/tools/vendoring/patches/pygments.patch
+++ b/tools/vendoring/patches/pygments.patch
@@ -35,3 +35,25 @@ index c6e2517df..76255b525 100644
+ sys.exit(main(sys.argv))
except KeyboardInterrupt:
sys.exit(1)
+diff --git a/src/pip/_vendor/pygments/sphinxext.py b/src/pip/_vendor/pygments/sphinxext.py
+index 3ea2e36e1..23c19504c 100644
+--- a/src/pip/_vendor/pygments/sphinxext.py
++++ b/src/pip/_vendor/pygments/sphinxext.py
+@@ -91,7 +91,7 @@ class PygmentsDoc(Directive):
+ The columns are the lexer name, the extensions handled by this lexer
+ (or "None"), the aliases and a link to the lexer class."""
+ from pygments.lexers._mapping import LEXERS
+- import pygments.lexers
++ from pygments.lexers import find_lexer_class
+ out = []
+
+ table = []
+@@ -102,7 +102,7 @@ class PygmentsDoc(Directive):
+ return name
+
+ for classname, data in sorted(LEXERS.items(), key=lambda x: x[1][1].lower()):
+- lexer_cls = pygments.lexers.find_lexer_class(data[1])
++ lexer_cls = find_lexer_class(data[1])
+ extensions = lexer_cls.filenames + lexer_cls.alias_filenames
+
+ table.append({
diff --git a/tools/vendoring/patches/tenacity.patch b/tools/vendoring/patches/tenacity.patch
index 85b29c60c..c87b1c5b2 100644
--- a/tools/vendoring/patches/tenacity.patch
+++ b/tools/vendoring/patches/tenacity.patch
@@ -2,14 +2,14 @@ diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__i
index 88c28d2d6..086ad46e1 100644
--- a/src/pip/_vendor/tenacity/__init__.py
+++ b/src/pip/_vendor/tenacity/__init__.py
-@@ -76,10 +76,12 @@ from .after import after_nothing # noqa
+@@ -82,10 +82,12 @@ from .after import after_nothing # noqa
from .before_sleep import before_sleep_log # noqa
from .before_sleep import before_sleep_nothing # noqa
-try:
-- import tornado # type: ignore
+- import tornado
-except ImportError:
-- tornado = None # type: ignore
+- tornado = None
+# Replace a conditional import with a hard-coded None so that pip does
+# not attempt to use tornado even if it is present in the environment.
+# If tornado is non-None, tenacity will attempt to execute some code
@@ -22,7 +22,7 @@ index 88c28d2d6..086ad46e1 100644
--- a/src/pip/_vendor/tenacity/__init__.py
+++ b/src/pip/_vendor/tenacity/__init__.py
-@@ -190,7 +190,7 @@ class RetryError(Exception):
+@@ -153,7 +153,7 @@ class RetryError(Exception):
self.last_attempt = last_attempt
super().__init__(last_attempt)