Bug 1670784 - Upgrade vendored `pip-tools` r=firefox-build-system-reviewers,glandium

The previous version of `pip-tools` was incompatible with our vendored `virtualenv`.

Differential Revision: https://phabricator.services.mozilla.com/D93263
This commit is contained in:
Ricky Stewart 2020-10-13 15:42:45 +00:00
Родитель 21ab31bac4
Коммит 5deaff02dc
31 изменённых файлов: 1221 добавлений и 736 удалений

115
third_party/python/pip-tools/.appveyor.yml поставляемый
Просмотреть файл

@ -3,126 +3,28 @@ environment:
PYTHON: "C:\\Python36"
matrix:
- TOXENV: py27-pip8.1.1-coverage
PIP: 8.1.1
- TOXENV: py27-pip9.0.1
PIP: 9.0.1
- TOXENV: py27-pip9.0.3
PIP: 9.0.3
- TOXENV: py27-pip10.0.1
PIP: 10.0.1
- TOXENV: py27-pip18.0
PIP: 18.0
- TOXENV: py27-pip19.0.3
PIP: 19.0.3
- TOXENV: py27-pip19.1
PIP: 19.1
- TOXENV: py27-pip19.2.3
PIP: 19.2.3
- TOXENV: py27-pip19.3
PIP: 19.3
- TOXENV: py27-pip20.0
- TOXENV: py27-pip20.0-coverage
PIP: 20.0
- TOXENV: py27-pipmaster
PIP: master
- TOXENV: py27-piplatest-coverage
PIP: latest
- TOXENV: py35-pip8.1.1
PIP: 8.1.1
- TOXENV: py35-pip9.0.1
PIP: 9.0.1
- TOXENV: py35-pip9.0.3
PIP: 9.0.3
- TOXENV: py35-pip10.0.1
PIP: 10.0.1
- TOXENV: py35-pip18.0-coverage
PIP: 18.0
- TOXENV: py35-pip19.0.3
PIP: 19.0.3
- TOXENV: py35-pip19.1
PIP: 19.1
- TOXENV: py35-pip19.2.3
PIP: 19.2.3
- TOXENV: py35-pip19.3
PIP: 19.3
- TOXENV: py35-pip20.0
PIP: 20.0
- TOXENV: py35-pipmaster
PIP: master
- TOXENV: py35-piplatest
PIP: latest
- TOXENV: py36-pip8.1.1
PIP: 8.1.1
- TOXENV: py36-pip9.0.1
PIP: 9.0.1
- TOXENV: py36-pip9.0.3
PIP: 9.0.3
- TOXENV: py36-pip10.0.1
PIP: 10.0.1
- TOXENV: py36-pip18.0
PIP: 18.0
- TOXENV: py36-pip19.0.3-coverage
PIP: 19.0.3
- TOXENV: py36-pip19.1
PIP: 19.1
- TOXENV: py36-pip19.2.3
PIP: 19.2.3
- TOXENV: py36-pip19.3
PIP: 19.3
- TOXENV: py36-pip20.0
PIP: 20.0
- TOXENV: py36-pipmaster
PIP: master
- TOXENV: py36-piplatest
PIP: latest
- TOXENV: py37-pip8.1.1
PIP: 8.1.1
- TOXENV: py37-pip9.0.1
PIP: 9.0.1
- TOXENV: py37-pip9.0.3
PIP: 9.0.3
- TOXENV: py37-pip10.0.1
PIP: 10.0.1
- TOXENV: py37-pip18.0
PIP: 18.0
- TOXENV: py37-pip19.0.3
PIP: 19.0.3
- TOXENV: py37-pip19.1-coverage
PIP: 19.1
- TOXENV: py37-pip19.2.3
PIP: 19.2.3
- TOXENV: py37-pip19.3
PIP: 19.3
- TOXENV: py37-pip20.0
PIP: 20.0
- TOXENV: py37-pipmaster-coverage
PIP: master
- TOXENV: py37-piplatest-coverage
- TOXENV: py37-piplatest
PIP: latest
- TOXENV: py38-pip9.0.1
PIP: 9.0.1
- TOXENV: py38-pip9.0.3
PIP: 9.0.3
- TOXENV: py38-pip10.0.1
PIP: 10.0.1
- TOXENV: py38-pip18.0
PIP: 18.0
- TOXENV: py38-pip19.0.3
PIP: 19.0.3
- TOXENV: py38-pip19.1
PIP: 19.1
- TOXENV: py38-pip19.2.3-coverage
PIP: 19.2.3
- TOXENV: py38-pip19.3-coverage
PIP: 19.3
- TOXENV: py38-pip20.0
- TOXENV: py38-pip20.0-coverage
PIP: 20.0
- TOXENV: py38-pipmaster-coverage
PIP: master
- TOXENV: py38-piplatest-coverage
PIP: latest
@ -130,19 +32,10 @@ matrix:
fast_finish: true
allow_failures:
- PIP: master
exclude:
# platform.linux_distribution() is removed in Python 3.8 (bpo-28167).
- TOXENV: py38-pip8.1.1
install:
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
# Temporary workaround to prevent tests failing.
# See GH-983 for the details.
# TODO remove this after tox>3.14.0 being released
- pip install "virtualenv>=16.0.0"
- pip install tox
- python -m pip install -U tox virtualenv
build: false

66
third_party/python/pip-tools/.github/workflows/ci.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,66 @@
name: CI
on:
pull_request:
push:
branches:
- master
tags:
schedule:
# Run everyday at 03:53 UTC
- cron: 53 3 * * *
jobs:
test:
name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
runs-on: ${{ matrix.os }}-latest
strategy:
matrix:
os:
- Ubuntu
- Windows
- macOS
python-version:
- 3.8
- 2.7
- 3.5
- 3.6
- 3.7
pip-version:
- "latest"
- "20.2" # TODO: update to 20.1 after pip-20.2 being released
- "20.0"
include:
- os: Ubuntu
python-version: 3.9-dev
pip-version: latest
env:
PY_COLORS: 1
TOXENV: pip${{ matrix.pip-version }}-coverage
TOX_PARALLEL_NO_SPINNER: 1
steps:
- uses: actions/checkout@master
- name: Set up Python ${{ matrix.python-version }} from GitHub
if: "!endsWith(matrix.python-version, '-dev')"
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Set up Python ${{ matrix.python-version }} from deadsnakes
if: endsWith(matrix.python-version, '-dev')
uses: deadsnakes/action@v1.0.0
with:
python-version: ${{ matrix.python-version }}
- name: Log python version info (${{ matrix.python-version }})
run: python --version --version
- name: Install test dependencies
run: python -m pip install -U tox virtualenv
- name: Prepare test environment
run: tox --notest -p auto --parallel-live
- name: Test pip ${{ matrix.pip-version }}
run: tox
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1.0.6
with:
file: ./coverage.xml
name: ${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.pip-version }}

Просмотреть файл

@ -1,28 +1,73 @@
name: cron
name: Cron
on:
schedule:
# Run every day at 00:00 UTC
- cron: 0 0 * * *
# Run every day at 00:00 UTC
- cron: 0 0 * * *
jobs:
test:
runs-on: ${{ matrix.os }}
master:
name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
runs-on: ${{ matrix.os }}-latest
strategy:
matrix:
python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
env:
- TOXENV: pipmaster
os: [ubuntu-latest, windows-latest]
os:
- Ubuntu
- Windows
- MacOS
python-version:
- 3.8
- 2.7
- 3.5
- 3.6
- 3.7
pip-version:
- master
env:
PY_COLORS: 1
TOXENV: pip${{ matrix.pip-version }}
TOX_PARALLEL_NO_SPINNER: 1
steps:
- uses: actions/checkout@master
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install test dependencies
run: python -m pip install -U tox virtualenv
- name: Prepare test environment
run: tox --notest -p auto --parallel-live
- name: Test pip ${{ matrix.pip-version }}
run: tox
pypy:
name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
runs-on: ${{ matrix.os }}-latest
strategy:
matrix:
os:
- Ubuntu
- MacOS
# TODO: fix test_realistic_complex_sub_dependencies test on Windows
# - Windows
python-version:
- pypy3
- pypy2
pip-version:
- latest
env:
PY_COLORS: 1
TOXENV: pip${{ matrix.pip-version }}
TOX_PARALLEL_NO_SPINNER: 1
steps:
- uses: actions/checkout@master
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install tox
run: pip install tox
- name: Test with tox ${{ matrix.env.TOXENV }}
- name: Prepare test environment
run: tox --notest -p auto --parallel-live
- name: Test pip ${{ matrix.pip-version }}
run: tox
env: ${{ matrix.env }}

43
third_party/python/pip-tools/.github/workflows/qa.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,43 @@
name: QA
on:
pull_request:
push:
branches:
- master
tags:
jobs:
qa:
name: ${{ matrix.toxenv }}
runs-on: ubuntu-latest
strategy:
matrix:
toxenv:
- checkqa
- readme
python-version:
- "3.x"
env:
PY_COLORS: 1
TOXENV: ${{ matrix.toxenv }}
TOX_PARALLEL_NO_SPINNER: 1
steps:
- uses: actions/checkout@master
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Prepare cache key
id: cache-key
run: echo "::set-output name=sha-256::$(python -VV | sha256sum | cut -d' ' -f1)"
- uses: actions/cache@v1
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ steps.cache-key.outputs.sha-256 }}|${{ hashFiles('.pre-commit-config.yaml') }}
- name: Install tox
run: pip install tox
- name: Prepare test environment
run: tox --notest -p auto --parallel-live
- name: Test ${{ matrix.toxenv }}
run: tox

Просмотреть файл

@ -1,21 +1,23 @@
repos:
- repo: https://github.com/psf/black
rev: 19.3b0
rev: 19.10b0
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/mirrors-isort
rev: v4.3.16
rev: v4.3.21
hooks:
- id: isort
language_version: python3
- repo: https://gitlab.com/pycqa/flake8
rev: 3.7.7
rev: 3.8.1
hooks:
- id: flake8
language_version: python3
additional_dependencies:
- flake8-pytest-style
- repo: https://github.com/PyCQA/bandit
rev: 1.6.0
rev: 1.6.2
hooks:
- id: bandit
language_version: python3

7
third_party/python/pip-tools/.pre-commit-hooks.yaml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,7 @@
- id: pip-compile
name: pip-compile
description: Automatically compile requirements.
entry: pip-compile
language: python
files: ^requirements\.(in|txt)$
pass_filenames: false

40
third_party/python/pip-tools/.travis.yml поставляемый
Просмотреть файл

@ -10,57 +10,23 @@ python:
env:
# NOTE: keep this in sync with envlist in tox.ini for tox-travis.
- PIP=8.1.1
- PIP=9.0.1
- PIP=9.0.3
- PIP=10.0.1
- PIP=18.0
- PIP=19.0.3
- PIP=19.1
- PIP=19.2.3
- PIP=19.3
- PIP=20.0
- PIP=latest
- PIP=master
- PIP=20.2 # TODO: update to 20.1 after pip-20.2 being released
- PIP=20.0
cache: false
install:
- travis_retry pip install tox-travis
- travis_retry python -m pip install -U tox-travis virtualenv
script:
- tox
stages:
- baseline
- test
- name: deploy
if: repo = jazzband/pip-tools AND tag IS present
jobs:
exclude:
- python: "pypy3.5-6.0"
env: PIP=8.1.1
# platform.linux_distribution() is removed in Python 3.8 (bpo-28167).
- python: "3.8"
env: PIP=8.1.1
# Baseline jobs (included there/below).
- env: PIP=latest
python: "3.8"
- env: PIP=latest
python: "3.7"
- env: PIP=latest
python: "2.7"
include:
# Baseline stage to abort early.
- stage: baseline
env: PIP=latest
python: "3.8"
- env: PIP=latest
python: "3.7"
- env: PIP=latest
python: "2.7"
# QA checks.
- env: TOXENV=checkqa
python: 3.7

112
third_party/python/pip-tools/CHANGELOG.md поставляемый
Просмотреть файл

@ -1,3 +1,115 @@
# 5.3.1 (2020-07-31)
Bug Fixes:
- Fix `pip-20.2` compatibility issue that caused `pip-tools` to sometime fail to
stabilize in a constant number of rounds
([1194](https://github.com/jazzband/pip-tools/pull/1194)). Thanks @vphilippon
# 5.3.0 (2020-07-26)
Features:
- Add `-h` alias for `--help` option to `pip-sync` and `pip-compile`
([1163](https://github.com/jazzband/pip-tools/pull/1163)). Thanks @jan25
- Add `pip>=20.2` support
([1168](https://github.com/jazzband/pip-tools/pull/1168)). Thanks @atugushev
- `pip-sync` now exists with code `1` on `--dry-run`
([1172](https://github.com/jazzband/pip-tools/pull/1172)). Thanks @francisbrito
- `pip-compile` now doesn't resolve constraints from `-c constraints.txt`that are not
(yet) requirements
([1175](https://github.com/jazzband/pip-tools/pull/1175)). Thanks @clslgrnc
- Add `--reuse-hashes/--no-reuse-hashes` options to `pip-compile`
([1177](https://github.com/jazzband/pip-tools/pull/1177)). Thanks @graingert
# 5.2.1 (2020-06-09)
Bug Fixes:
- Fix a bug where `pip-compile` would lose some dependencies on update
a `requirements.txt`
([1159](https://github.com/jazzband/pip-tools/pull/1159)). Thanks @richafrank
# 5.2.0 (2020-05-27)
Features:
- Show basename of URLs when `pip-compile` generates hashes in a verbose mode
([1113](https://github.com/jazzband/pip-tools/pull/1113)). Thanks @atugushev
- Add `--emit-index-url/--no-emit-index-url` options to `pip-compile`
([1130](https://github.com/jazzband/pip-tools/pull/1130)). Thanks @atugushev
Bug Fixes:
- Fix a bug where `pip-compile` would ignore some of package versions when
`PIP_PREFER_BINARY` is set on
([1119](https://github.com/jazzband/pip-tools/pull/1119)). Thanks @atugushev
- Fix leaked URLs with credentials in the debug output of `pip-compile`.
([1146](https://github.com/jazzband/pip-tools/pull/1146)). Thanks @atugushev
- Fix a bug where URL requirements would have name collisions
([1149](https://github.com/jazzband/pip-tools/pull/1149)). Thanks @geokala
Deprecations:
- Deprecate `--index/--no-index` in favor of `--emit-index-url/--no-emit-index-url`
options in `pip-compile`
([1130](https://github.com/jazzband/pip-tools/pull/1130)). Thanks @atugushev
Other Changes:
- Switch to `setuptools` declarative syntax through `setup.cfg`
([1141](https://github.com/jazzband/pip-tools/pull/1141)). Thanks @jdufresne
# 5.1.2 (2020-05-05)
Bug Fixes:
- Fix grouping of editables and non-editables requirements
([1132](https://github.com/jazzband/pip-tools/pull/1132)). Thanks @richafrank
# 5.1.1 (2020-05-01)
Bug Fixes:
- Fix a bug where `pip-compile` would generate hashes for `*.egg` files
([#1122](https://github.com/jazzband/pip-tools/pull/1122)). Thanks @atugushev
# 5.1.0 (2020-04-27)
Features:
- Show progress bar when downloading packages in `pip-compile` verbose mode
([#949](https://github.com/jazzband/pip-tools/pull/949)). Thanks @atugushev
- `pip-compile` now gets hashes from `PyPI` JSON API (if available) which significantly
increases the speed of hashes generation
([#1109](https://github.com/jazzband/pip-tools/pull/1109)). Thanks @atugushev
# 5.0.0 (2020-04-16)
Backwards Incompatible Changes:
- `pip-tools` now requires `pip>=20.0` (previosly `8.1.x` - `20.0.x`). Windows users,
make sure to use `python -m pip install pip-tools` to avoid issues with `pip`
self-update from now on
([#1055](https://github.com/jazzband/pip-tools/pull/1055)). Thanks @atugushev
- `--build-isolation` option now set on by default for `pip-compile`
([#1060](https://github.com/jazzband/pip-tools/pull/1060)). Thanks @hramezani
Features:
- Exclude requirements with non-matching markers from `pip-sync`
([#927](https://github.com/jazzband/pip-tools/pull/927)). Thanks @AndydeCleyre
- Add `pre-commit` hook for `pip-compile`
([#976](https://github.com/jazzband/pip-tools/pull/976)). Thanks @atugushev
- `pip-compile` and `pip-sync` now pass anything provided to the new `--pip-args` option on to `pip`
([#1080](https://github.com/jazzband/pip-tools/pull/1080)). Thanks @AndydeCleyre
- `pip-compile` output headers are now more accurate when `--` is used to escape filenames
([#1080](https://github.com/jazzband/pip-tools/pull/1080)). Thanks @AndydeCleyre
- Add `pip>=20.1` support
([#1088](https://github.com/jazzband/pip-tools/pull/1088)). Thanks @atugushev
Bug Fixes:
- Fix a bug where editables that are both direct requirements and constraints wouldn't appear in `pip-compile` output
([#1093](https://github.com/jazzband/pip-tools/pull/1093)). Thanks @richafrank
- `pip-compile` now sorts format controls (`--no-binary/--only-binary`) to ensure consistent results
([#1098](https://github.com/jazzband/pip-tools/pull/1098)). Thanks @richafrank
Improved Documentation:
- Add cross-environment usage documentation to `README`
([#651](https://github.com/jazzband/pip-tools/pull/651)). Thanks @vphilippon
- Add versions compatibility table to `README`
([#1106](https://github.com/jazzband/pip-tools/pull/1106)). Thanks @atugushev
# 4.5.1 (2020-02-26)
Bug Fixes:

Просмотреть файл

@ -7,6 +7,7 @@ and follow the [guidelines](https://jazzband.co/about/guidelines).
## Project Contribution Guidelines
Here are a few additional or emphasized guidelines to follow when contributing to pip-tools:
- Install pip-tools in development mode and its test dependencies with `pip install -e .[testing]`.
- Check with `tox -e checkqa` to see your changes are not breaking the style conventions.
- Always provide tests for your changes.
- Give a clear one-line description in the PR (that the maintainers can add to [CHANGELOG](CHANGELOG.md) afterwards).

123
third_party/python/pip-tools/PKG-INFO поставляемый
Просмотреть файл

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: pip-tools
Version: 4.5.1
Version: 5.3.1
Summary: pip-tools keeps your pinned dependencies fresh.
Home-page: https://github.com/jazzband/pip-tools/
Author: Vincent Driessen
@ -48,7 +48,7 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
.. code-block:: bash
$ source /path/to/venv/bin/activate
(venv)$ pip install pip-tools
(venv)$ python -m pip install pip-tools
**Note**: all of the remaining example commands assume you've activated your
project's virtual environment.
@ -96,8 +96,7 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
sqlparse==0.3.0 # via django
``pip-compile`` will produce your ``requirements.txt``, with all the Django
dependencies (and all underlying dependencies) pinned. You should put
``requirements.txt`` under version control.
dependencies (and all underlying dependencies) pinned.
Without ``setup.py``
--------------------
@ -127,8 +126,7 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
sqlparse==0.3.0 # via django
And it will produce your ``requirements.txt``, with all the Django dependencies
(and all underlying dependencies) pinned. You should put both
``requirements.in`` and ``requirements.txt`` under version control.
(and all underlying dependencies) pinned.
.. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
@ -211,6 +209,16 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
$ pip-compile --output-file=- > requirements.txt
$ pip-compile - --output-file=- < requirements.in > requirements.txt
Forwarding options to ``pip``
-----------------------------
Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
``--pip-args`` option, e.g.
.. code-block:: bash
$ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
Configuration
-------------
@ -309,6 +317,34 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
$ pip-sync requirements.txt dev-requirements.txt
Version control integration
---------------------------
You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
Sample ``.pre-commit-config.yaml``:
.. code-block:: yaml
repos:
- repo: https://github.com/jazzband/pip-tools
rev: 5.0.0
hooks:
- id: pip-compile
You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
.. code-block:: yaml
repos:
- repo: https://github.com/jazzband/pip-tools
rev: 5.0.0
hooks:
- id: pip-compile
files: ^requirements/production\.(in|txt)$
args: [--index-url=https://example.com, requirements/production.in]
Example usage for ``pip-sync``
==============================
@ -350,14 +386,57 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
Passing in empty arguments would cause it to default to ``requirements.txt``.
Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
``--pip-args`` option, e.g.
.. code-block:: bash
$ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
If you use multiple Python versions, you can run ``pip-sync`` as
``py -X.Y -m piptools sync ...`` on Windows and
``pythonX.Y -m piptools sync ...`` on other systems.
**Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``pip install --upgrade``
``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
to upgrade those packages.
Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
===============================================================================
Generally, yes. If you want a reproducible environment installation available from your source control,
then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
Note that if you are deploying on multiple Python environments (read the section below),
then you must commit a seperate output file for each Python environment.
We suggest to use the ``{env}-requirements.txt`` format
(ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
=======================================================================================
The dependencies of a package can change depending on the Python environment in which it
is installed. Here, we define a Python environment as the combination of Operating
System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
etc.). For an exact definition, refer to the possible combinations of `PEP 508
environment markers`_.
As the resulting ``requirements.txt`` can differ for each environment, users must
execute ``pip-compile`` **on each Python environment separately** to generate a
``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
be used as the source file for all environments, using `PEP 508 environment markers`_ as
needed, the same way it would be done for regular ``pip`` cross-environment usage.
If the generated ``requirements.txt`` remains exactly the same for all Python
environments, then it can be used across Python environments safely. **But** users
should be careful as any package update can introduce environment-dependant
dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
As a general rule, it's advised that users should still always execute ``pip-compile``
on each targeted Python environment to avoid issues.
.. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
Other useful tools
==================
@ -371,7 +450,30 @@ Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-ap
.. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
.. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
Platform: any
Deprecations
============
This section lists ``pip-tools`` features that are currently deprecated.
- ``--index/--no-index`` command-line options, use instead
``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
Versions and compatibility
==========================
The table below summarizes the latest ``pip-tools`` versions with the required ``pip``
versions.
+-----------+-----------------+
| pip-tools | pip |
+===========+=================+
| 4.5.x | 8.1.3 - 20.0.x |
+-----------+-----------------+
| 5.x | 20.0.x - 20.1.x |
+-----------+-----------------+
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: System Administrators
@ -388,5 +490,6 @@ Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: System :: Systems Administration
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
Description-Content-Type: text/x-rst
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
Provides-Extra: testing
Provides-Extra: coverage

114
third_party/python/pip-tools/README.rst поставляемый
Просмотреть файл

@ -40,7 +40,7 @@ Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
.. code-block:: bash
$ source /path/to/venv/bin/activate
(venv)$ pip install pip-tools
(venv)$ python -m pip install pip-tools
**Note**: all of the remaining example commands assume you've activated your
project's virtual environment.
@ -88,8 +88,7 @@ If you have a ``setup.py`` with ``install_requires=['django']``, then run
sqlparse==0.3.0 # via django
``pip-compile`` will produce your ``requirements.txt``, with all the Django
dependencies (and all underlying dependencies) pinned. You should put
``requirements.txt`` under version control.
dependencies (and all underlying dependencies) pinned.
Without ``setup.py``
--------------------
@ -119,8 +118,7 @@ Now, run ``pip-compile requirements.in``:
sqlparse==0.3.0 # via django
And it will produce your ``requirements.txt``, with all the Django dependencies
(and all underlying dependencies) pinned. You should put both
``requirements.in`` and ``requirements.txt`` under version control.
(and all underlying dependencies) pinned.
.. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
@ -203,6 +201,16 @@ Or to output to standard output, use ``--output-file=-``:
$ pip-compile --output-file=- > requirements.txt
$ pip-compile - --output-file=- < requirements.in > requirements.txt
Forwarding options to ``pip``
-----------------------------
Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
``--pip-args`` option, e.g.
.. code-block:: bash
$ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
Configuration
-------------
@ -301,6 +309,34 @@ You can install requirements in development stage by:
$ pip-sync requirements.txt dev-requirements.txt
Version control integration
---------------------------
You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
Sample ``.pre-commit-config.yaml``:
.. code-block:: yaml
repos:
- repo: https://github.com/jazzband/pip-tools
rev: 5.0.0
hooks:
- id: pip-compile
You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
.. code-block:: yaml
repos:
- repo: https://github.com/jazzband/pip-tools
rev: 5.0.0
hooks:
- id: pip-compile
files: ^requirements/production\.(in|txt)$
args: [--index-url=https://example.com, requirements/production.in]
Example usage for ``pip-sync``
==============================
@ -342,14 +378,57 @@ line arguments, e.g.
Passing in empty arguments would cause it to default to ``requirements.txt``.
Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
``--pip-args`` option, e.g.
.. code-block:: bash
$ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
If you use multiple Python versions, you can run ``pip-sync`` as
``py -X.Y -m piptools sync ...`` on Windows and
``pythonX.Y -m piptools sync ...`` on other systems.
**Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``pip install --upgrade``
``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
to upgrade those packages.
Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
===============================================================================
Generally, yes. If you want a reproducible environment installation available from your source control,
then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
Note that if you are deploying on multiple Python environments (read the section below),
then you must commit a seperate output file for each Python environment.
We suggest to use the ``{env}-requirements.txt`` format
(ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
=======================================================================================
The dependencies of a package can change depending on the Python environment in which it
is installed. Here, we define a Python environment as the combination of Operating
System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
etc.). For an exact definition, refer to the possible combinations of `PEP 508
environment markers`_.
As the resulting ``requirements.txt`` can differ for each environment, users must
execute ``pip-compile`` **on each Python environment separately** to generate a
``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
be used as the source file for all environments, using `PEP 508 environment markers`_ as
needed, the same way it would be done for regular ``pip`` cross-environment usage.
If the generated ``requirements.txt`` remains exactly the same for all Python
environments, then it can be used across Python environments safely. **But** users
should be careful as any package update can introduce environment-dependant
dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
As a general rule, it's advised that users should still always execute ``pip-compile``
on each targeted Python environment to avoid issues.
.. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
Other useful tools
==================
@ -362,3 +441,26 @@ Other useful tools
.. _pipdeptree: https://github.com/naiquevin/pipdeptree
.. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
.. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
Deprecations
============
This section lists ``pip-tools`` features that are currently deprecated.
- ``--index/--no-index`` command-line options, use instead
``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
Versions and compatibility
==========================
The table below summarizes the latest ``pip-tools`` versions with the required ``pip``
versions.
+-----------+-----------------+
| pip-tools | pip |
+===========+=================+
| 4.5.x | 8.1.3 - 20.0.x |
+-----------+-----------------+
| 5.x | 20.0.x - 20.1.x |
+-----------+-----------------+

Просмотреть файл

@ -1,5 +0,0 @@
-e .
mock
pytest!=5.1.2
pytest-rerunfailures
wheel

Просмотреть файл

@ -4,37 +4,7 @@ from __future__ import absolute_import, division, print_function, unicode_litera
import six
from .pip_compat import (
DEV_PKGS,
FAVORITE_HASH,
PIP_VERSION,
FormatControl,
InstallationCandidate,
InstallCommand,
InstallRequirement,
Link,
PackageFinder,
PyPI,
RequirementSet,
Resolver,
Wheel,
WheelCache,
cmdoptions,
get_installed_distributions,
get_requirement_tracker,
global_tempdir_manager,
install_req_from_editable,
install_req_from_line,
is_dir_url,
is_file_url,
is_vcs_url,
normalize_path,
parse_requirements,
path_to_url,
stdlib_pkgs,
url_to_path,
user_cache_dir,
)
from .pip_compat import BAR_TYPES, PIP_VERSION, parse_requirements
if six.PY2:
from .tempfile import TemporaryDirectory

Просмотреть файл

@ -1,118 +1,29 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import
import importlib
from contextlib import contextmanager
import pip
from pip._internal.req import parse_requirements as _parse_requirements
from pip._vendor.packaging.version import parse as parse_version
PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split(".")))
try:
from pip._internal.req.req_tracker import RequirementTracker
except ImportError:
@contextmanager
def RequirementTracker():
yield
if PIP_VERSION[:2] <= (20, 0):
def install_req_from_parsed_requirement(req, **kwargs):
return req
# Introduced in pip 20.0
try:
from pip._internal.utils.temp_dir import global_tempdir_manager
except ImportError:
from pip._internal.utils.ui import BAR_TYPES
@contextmanager
def global_tempdir_manager():
yield
def do_import(module_path, subimport=None, old_path=None):
old_path = old_path or module_path
prefixes = ["pip._internal", "pip"]
paths = [module_path, old_path]
search_order = [
"{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None
]
package = subimport if subimport else None
for to_import in search_order:
if not subimport:
to_import, _, package = to_import.rpartition(".")
try:
imported = importlib.import_module(to_import)
except ImportError:
continue
else:
return getattr(imported, package)
InstallRequirement = do_import("req.req_install", "InstallRequirement")
InstallationCandidate = do_import(
"models.candidate", "InstallationCandidate", old_path="index"
)
parse_requirements = do_import("req.req_file", "parse_requirements")
RequirementSet = do_import("req.req_set", "RequirementSet")
user_cache_dir = do_import("utils.appdirs", "user_cache_dir")
FAVORITE_HASH = do_import("utils.hashes", "FAVORITE_HASH")
path_to_url = do_import("utils.urls", "path_to_url", old_path="download")
url_to_path = do_import("utils.urls", "url_to_path", old_path="download")
PackageFinder = do_import("index.package_finder", "PackageFinder", old_path="index")
FormatControl = do_import("models.format_control", "FormatControl", old_path="index")
InstallCommand = do_import("commands.install", "InstallCommand")
Wheel = do_import("models.wheel", "Wheel", old_path="wheel")
cmdoptions = do_import("cli.cmdoptions", old_path="cmdoptions")
get_installed_distributions = do_import(
"utils.misc", "get_installed_distributions", old_path="utils"
)
PyPI = do_import("models.index", "PyPI")
stdlib_pkgs = do_import("utils.compat", "stdlib_pkgs", old_path="compat")
DEV_PKGS = do_import("commands.freeze", "DEV_PKGS")
Link = do_import("models.link", "Link", old_path="index")
Session = do_import("_vendor.requests.sessions", "Session")
Resolver = do_import("legacy_resolve", "Resolver", old_path="resolve")
WheelCache = do_import("cache", "WheelCache", old_path="wheel")
normalize_path = do_import("utils.misc", "normalize_path", old_path="utils")
# pip 18.1 has refactored InstallRequirement constructors use by pip-tools.
if PIP_VERSION < (18, 1):
install_req_from_line = InstallRequirement.from_line
install_req_from_editable = InstallRequirement.from_editable
else:
install_req_from_line = do_import("req.constructors", "install_req_from_line")
install_req_from_editable = do_import(
"req.constructors", "install_req_from_editable"
)
from pip._internal.req.constructors import install_req_from_parsed_requirement
from pip._internal.cli.progress_bars import BAR_TYPES
def is_vcs_url(link):
if PIP_VERSION < (19, 3):
_is_vcs_url = do_import("download", "is_vcs_url")
return _is_vcs_url(link)
return link.is_vcs
def is_file_url(link):
if PIP_VERSION < (19, 3):
_is_file_url = do_import("download", "is_file_url")
return _is_file_url(link)
return link.is_file
def is_dir_url(link):
if PIP_VERSION < (19, 3):
_is_dir_url = do_import("download", "is_dir_url")
return _is_dir_url(link)
return link.is_existing_dir()
def get_requirement_tracker():
if PIP_VERSION[:2] <= (19, 3):
return RequirementTracker()
from pip._internal.req import req_tracker
return req_tracker.get_requirement_tracker()
def parse_requirements(
filename, session, finder=None, options=None, constraint=False, isolated=False
):
for parsed_req in _parse_requirements(
filename, session, finder=finder, options=options, constraint=constraint
):
yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)

Просмотреть файл

@ -1,3 +1,6 @@
from pip._internal.utils.misc import redact_auth_from_url
class PipToolsError(Exception):
pass
@ -40,11 +43,14 @@ class NoCandidateFound(PipToolsError):
source_ireqs = getattr(self.ireq, "_source_ireqs", [])
lines.extend(" {}".format(ireq) for ireq in source_ireqs)
else:
redacted_urls = tuple(
redact_auth_from_url(url) for url in self.finder.index_urls
)
lines.append("No versions found")
lines.append(
"{} {} reachable?".format(
"Were" if len(self.finder.index_urls) > 1 else "Was",
" or ".join(self.finder.index_urls),
"Were" if len(redacted_urls) > 1 else "Was",
" or ".join(redacted_urls),
)
)
return "\n".join(lines)

Просмотреть файл

@ -1,7 +1,8 @@
import os
from shutil import rmtree
from ._compat import user_cache_dir
from pip._internal.utils.appdirs import user_cache_dir
from .click import secho
# The user_cache_dir helper comes straight from pip itself

Просмотреть файл

@ -1,7 +1,9 @@
# coding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import contextlib
import logging
import sys
from . import click
@ -11,12 +13,17 @@ logging.basicConfig()
class LogContext(object):
def __init__(self, verbosity=0):
self.verbosity = verbosity
stream = sys.stderr
def log(self, *args, **kwargs):
def __init__(self, verbosity=0, indent_width=2):
self.verbosity = verbosity
self.current_indent = 0
self._indent_width = indent_width
def log(self, message, *args, **kwargs):
kwargs.setdefault("err", True)
click.secho(*args, **kwargs)
prefix = " " * self.current_indent
click.secho(prefix + message, *args, **kwargs)
def debug(self, *args, **kwargs):
if self.verbosity >= 1:
@ -34,5 +41,22 @@ class LogContext(object):
kwargs.setdefault("fg", "red")
self.log(*args, **kwargs)
def _indent(self):
self.current_indent += self._indent_width
def _dedent(self):
self.current_indent -= self._indent_width
@contextlib.contextmanager
def indentation(self):
"""
Increase indentation.
"""
self._indent()
try:
yield
finally:
self._dedent()
log = LogContext()

Просмотреть файл

@ -44,3 +44,12 @@ class BaseRepository(object):
"""
Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
"""
@abstractmethod
def copy_ireq_dependencies(self, source, dest):
"""
Notifies the repository that `dest` is a copy of `source`, and so it
has the same dependencies. Otherwise, once we prepare an ireq to assign
it its name, we would lose track of those dependencies on combining
that ireq with others.
"""

Просмотреть файл

@ -3,7 +3,9 @@ from __future__ import absolute_import, division, print_function, unicode_litera
from contextlib import contextmanager
from .._compat import FAVORITE_HASH
from pip._internal.utils.hashes import FAVORITE_HASH
from .._compat import PIP_VERSION
from .base import BaseRepository
from piptools.utils import as_tuple, key_from_ireq, make_install_requirement
@ -15,7 +17,9 @@ def ireq_satisfied_by_existing_pin(ireq, existing_pin):
previously encountered version pin.
"""
version = next(iter(existing_pin.req.specifier)).version
return version in ireq.req.specifier
return ireq.req.specifier.contains(
version, prereleases=existing_pin.req.specifier.prereleases
)
class LocalRequirementsRepository(BaseRepository):
@ -29,7 +33,8 @@ class LocalRequirementsRepository(BaseRepository):
PyPI. This keeps updates to the requirements.txt down to a minimum.
"""
def __init__(self, existing_pins, proxied_repository):
def __init__(self, existing_pins, proxied_repository, reuse_hashes=True):
self._reuse_hashes = reuse_hashes
self.repository = proxied_repository
self.existing_pins = existing_pins
@ -70,10 +75,14 @@ class LocalRequirementsRepository(BaseRepository):
return self.repository.get_dependencies(ireq)
def get_hashes(self, ireq):
key = key_from_ireq(ireq)
existing_pin = self.existing_pins.get(key)
existing_pin = self._reuse_hashes and self.existing_pins.get(
key_from_ireq(ireq)
)
if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
hashes = existing_pin.options.get("hashes", {})
if PIP_VERSION[:2] <= (20, 0):
hashes = existing_pin.options.get("hashes", {})
else:
hashes = existing_pin.hash_options
hexdigests = hashes.get(FAVORITE_HASH)
if hexdigests:
return {
@ -85,3 +94,6 @@ class LocalRequirementsRepository(BaseRepository):
def allow_all_wheels(self):
with self.repository.allow_all_wheels():
yield
def copy_ireq_dependencies(self, source, dest):
self.repository.copy_ireq_dependencies(source, dest)

Просмотреть файл

@ -3,36 +3,32 @@ from __future__ import absolute_import, division, print_function, unicode_litera
import collections
import hashlib
import itertools
import logging
import os
from contextlib import contextmanager
from functools import partial
from shutil import rmtree
from .._compat import (
FAVORITE_HASH,
PIP_VERSION,
Link,
PyPI,
RequirementSet,
Resolver as PipResolver,
TemporaryDirectory,
Wheel,
WheelCache,
contextlib,
get_requirement_tracker,
global_tempdir_manager,
is_dir_url,
is_file_url,
is_vcs_url,
normalize_path,
path_to_url,
url_to_path,
)
from pip._internal.cache import WheelCache
from pip._internal.commands import create_command
from pip._internal.models.index import PackageIndex, PyPI
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.hashes import FAVORITE_HASH
from pip._internal.utils.logging import indent_log, setup_logging
from pip._internal.utils.misc import normalize_path
from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager
from pip._internal.utils.urls import path_to_url, url_to_path
from pip._vendor.requests import RequestException
from .._compat import BAR_TYPES, PIP_VERSION, TemporaryDirectory, contextlib
from ..click import progressbar
from ..exceptions import NoCandidateFound
from ..logging import log
from ..utils import (
create_install_command,
as_tuple,
fs_str,
is_pinned_requirement,
is_url_requirement,
@ -47,6 +43,7 @@ FileStream = collections.namedtuple("FileStream", "stream size")
class PyPIRepository(BaseRepository):
DEFAULT_INDEX_URL = PyPI.simple_url
HASHABLE_PACKAGE_TYPES = {"bdist_wheel", "sdist"}
"""
The PyPIRepository will use the provided Finder instance to lookup
@ -55,17 +52,18 @@ class PyPIRepository(BaseRepository):
changed/configured on the Finder.
"""
def __init__(self, pip_args, cache_dir, build_isolation=False):
self.build_isolation = build_isolation
def __init__(self, pip_args, cache_dir):
# Use pip's parser for pip.conf management and defaults.
# General options (find_links, index_url, extra_index_url, trusted_host,
# and pre) are deferred to pip.
self.command = create_install_command()
self.command = create_command("install")
self.options, _ = self.command.parse_args(pip_args)
if self.options.cache_dir:
self.options.cache_dir = normalize_path(self.options.cache_dir)
self.options.require_hashes = False
self.options.ignore_dependencies = False
self.session = self.command._build_session(self.options)
self.finder = self.command._build_package_finder(
options=self.options, session=self.session
@ -88,6 +86,8 @@ class PyPIRepository(BaseRepository):
self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))
self._setup_logging()
def freshen_build_caches(self):
"""
Start with fresh build/source caches. Will remove any old build
@ -123,134 +123,71 @@ class PyPIRepository(BaseRepository):
return ireq # return itself as the best match
all_candidates = self.find_all_candidates(ireq.name)
candidates_by_version = lookup_table(
all_candidates, key=lambda c: c.version, unique=True
)
candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
matching_versions = ireq.specifier.filter(
(candidate.version for candidate in all_candidates), prereleases=prereleases
)
# Reuses pip's internal candidate sort key to sort
matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
matching_candidates = list(
itertools.chain.from_iterable(
candidates_by_version[ver] for ver in matching_versions
)
)
if not matching_candidates:
raise NoCandidateFound(ireq, all_candidates, self.finder)
if PIP_VERSION < (19, 1):
best_candidate = max(
matching_candidates, key=self.finder._candidate_sort_key
)
elif PIP_VERSION < (19, 2):
evaluator = self.finder.candidate_evaluator
best_candidate = evaluator.get_best_candidate(matching_candidates)
elif PIP_VERSION < (19, 3):
evaluator = self.finder.make_candidate_evaluator(ireq.name)
best_candidate = evaluator.get_best_candidate(matching_candidates)
else:
evaluator = self.finder.make_candidate_evaluator(ireq.name)
best_candidate_result = evaluator.compute_best_candidate(
matching_candidates
)
best_candidate = best_candidate_result.best_candidate
if PIP_VERSION[:2] <= (19, 3):
best_candidate_name = best_candidate.project
else:
best_candidate_name = best_candidate.name
evaluator = self.finder.make_candidate_evaluator(ireq.name)
best_candidate_result = evaluator.compute_best_candidate(matching_candidates)
best_candidate = best_candidate_result.best_candidate
# Turn the candidate into a pinned InstallRequirement
return make_install_requirement(
best_candidate_name,
best_candidate.name,
best_candidate.version,
ireq.extras,
constraint=ireq.constraint,
)
def resolve_reqs(self, download_dir, ireq, wheel_cache):
results = None
if PIP_VERSION < (10,):
reqset = RequirementSet(
self.build_dir,
self.source_dir,
with get_requirement_tracker() as req_tracker, TempDirectory(
kind="resolver"
) as temp_dir, indent_log():
preparer = self.command.make_requirement_preparer(
temp_build_dir=temp_dir,
options=self.options,
req_tracker=req_tracker,
session=self.session,
finder=self.finder,
use_user_site=False,
download_dir=download_dir,
wheel_download_dir=self._wheel_download_dir,
session=self.session,
wheel_cache=wheel_cache,
)
results = reqset._prepare_file(self.finder, ireq)
else:
from pip._internal.operations.prepare import RequirementPreparer
preparer_kwargs = {
"build_dir": self.build_dir,
"src_dir": self.source_dir,
"download_dir": download_dir,
"wheel_download_dir": self._wheel_download_dir,
"progress_bar": "off",
"build_isolation": self.build_isolation,
}
resolver_kwargs = {
"finder": self.finder,
"session": self.session,
"upgrade_strategy": "to-satisfy-only",
"force_reinstall": False,
"ignore_dependencies": False,
"ignore_requires_python": False,
"ignore_installed": True,
"use_user_site": False,
}
make_install_req_kwargs = {"isolated": False, "wheel_cache": wheel_cache}
if PIP_VERSION < (19, 3):
resolver_kwargs.update(**make_install_req_kwargs)
else:
from pip._internal.req.constructors import install_req_from_req_string
make_install_req = partial(
install_req_from_req_string, **make_install_req_kwargs
)
resolver_kwargs["make_install_req"] = make_install_req
if PIP_VERSION >= (20,):
del resolver_kwargs["session"]
del preparer_kwargs["progress_bar"]
resolver = None
preparer = None
if PIP_VERSION[:2] <= (19, 3):
tmp_dir_cm = contextlib.nullcontext()
else:
from pip._internal.utils.temp_dir import TempDirectory
tmp_dir_cm = TempDirectory(kind="req-tracker")
with get_requirement_tracker() as req_tracker, tmp_dir_cm as temp_build_dir:
# Pip 18 uses a requirement tracker to prevent fork bombs
if req_tracker:
preparer_kwargs["req_tracker"] = req_tracker
if PIP_VERSION[:2] <= (19, 3):
preparer = RequirementPreparer(**preparer_kwargs)
else:
preparer = self.command.make_requirement_preparer(
temp_build_dir=temp_build_dir,
options=self.options,
req_tracker=req_tracker,
session=self.session,
finder=self.finder,
use_user_site=self.options.use_user_site,
)
resolver_kwargs["preparer"] = preparer
reqset = RequirementSet()
reqset = RequirementSet()
if PIP_VERSION[:2] <= (20, 1):
ireq.is_direct = True
reqset.add_requirement(ireq)
else:
ireq.user_supplied = True
reqset.add_requirement(ireq)
resolver = PipResolver(**resolver_kwargs)
resolver.require_hashes = False
results = resolver._resolve_one(reqset, ireq)
resolver = self.command.make_resolver(
preparer=preparer,
finder=self.finder,
options=self.options,
wheel_cache=wheel_cache,
use_user_site=False,
ignore_installed=True,
ignore_requires_python=False,
force_reinstall=False,
upgrade_strategy="to-satisfy-only",
)
results = resolver._resolve_one(reqset, ireq)
if not ireq.prepared:
# If still not prepared, e.g. a constraint, do enough to assign
# the ireq a name:
resolver._get_abstract_dist_for(ireq)
if PIP_VERSION[:2] <= (20, 0):
reqset.cleanup_files()
return set(results)
@ -276,36 +213,98 @@ class PyPIRepository(BaseRepository):
# If a download_dir is passed, pip will unnecessarely
# archive the entire source directory
download_dir = None
elif ireq.link and is_vcs_url(ireq.link):
elif ireq.link and ireq.link.is_vcs:
# No download_dir for VCS sources. This also works around pip
# using git-checkout-index, which gets rid of the .git dir.
download_dir = None
else:
download_dir = self._download_dir
download_dir = self._get_download_path(ireq)
if not os.path.isdir(download_dir):
os.makedirs(download_dir)
if not os.path.isdir(self._wheel_download_dir):
os.makedirs(self._wheel_download_dir)
wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
prev_tracker = os.environ.get("PIP_REQ_TRACKER")
try:
with global_tempdir_manager():
with global_tempdir_manager():
wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
prev_tracker = os.environ.get("PIP_REQ_TRACKER")
try:
self._dependencies_cache[ireq] = self.resolve_reqs(
download_dir, ireq, wheel_cache
)
finally:
if "PIP_REQ_TRACKER" in os.environ:
if prev_tracker:
os.environ["PIP_REQ_TRACKER"] = prev_tracker
else:
del os.environ["PIP_REQ_TRACKER"]
finally:
if "PIP_REQ_TRACKER" in os.environ:
if prev_tracker:
os.environ["PIP_REQ_TRACKER"] = prev_tracker
else:
del os.environ["PIP_REQ_TRACKER"]
if PIP_VERSION[:2] <= (20, 0):
wheel_cache.cleanup()
# WheelCache.cleanup() introduced in pip==10.0.0
if PIP_VERSION >= (10,):
wheel_cache.cleanup()
return self._dependencies_cache[ireq]
def copy_ireq_dependencies(self, source, dest):
try:
self._dependencies_cache[dest] = self._dependencies_cache[source]
except KeyError:
# `source` may not be in cache yet.
pass
def _get_project(self, ireq):
"""
Return a dict of a project info from PyPI JSON API for a given
InstallRequirement. Return None on HTTP/JSON error or if a package
is not found on PyPI server.
API reference: https://warehouse.readthedocs.io/api-reference/json/
"""
package_indexes = (
PackageIndex(url=index_url, file_storage_domain="")
for index_url in self.finder.search_scope.index_urls
)
for package_index in package_indexes:
url = "{url}/{name}/json".format(url=package_index.pypi_url, name=ireq.name)
try:
response = self.session.get(url)
except RequestException as e:
log.debug(
"Fetch package info from PyPI failed: {url}: {e}".format(
url=url, e=e
)
)
continue
# Skip this PyPI server, because there is no package
# or JSON API might be not supported
if response.status_code == 404:
continue
try:
data = response.json()
except ValueError as e:
log.debug(
"Cannot parse JSON response from PyPI: {url}: {e}".format(
url=url, e=e
)
)
continue
return data
return None
def _get_download_path(self, ireq):
"""
Determine the download dir location in a way which avoids name
collisions.
"""
if ireq.link:
salt = hashlib.sha224(ireq.link.url_without_fragment.encode()).hexdigest()
# Nest directories to avoid running out of top level dirs on some FS
# (see pypi _get_cache_path_parts, which inspired this)
salt = [salt[:2], salt[2:4], salt[4:6], salt[6:]]
return os.path.join(self._download_dir, *salt)
else:
return self._download_dir
def get_hashes(self, ireq):
"""
Given an InstallRequirement, return a set of hashes that represent all
@ -316,7 +315,7 @@ class PyPIRepository(BaseRepository):
if ireq.link:
link = ireq.link
if is_vcs_url(link) or (is_file_url(link) and is_dir_url(link)):
if link.is_vcs or (link.is_file and link.is_existing_dir()):
# Return empty set for unhashable requirements.
# Unhashable logic modeled on pip's
# RequirementPreparer.prepare_linked_requirement
@ -326,7 +325,7 @@ class PyPIRepository(BaseRepository):
# Directly hash URL requirements.
# URL requirements may have been previously downloaded and cached
# locally by self.resolve_reqs()
cached_path = os.path.join(self._download_dir, link.filename)
cached_path = os.path.join(self._get_download_path(ireq), link.filename)
if os.path.exists(cached_path):
cached_link = Link(path_to_url(cached_path))
else:
@ -336,6 +335,51 @@ class PyPIRepository(BaseRepository):
if not is_pinned_requirement(ireq):
raise TypeError("Expected pinned requirement, got {}".format(ireq))
log.debug("{}".format(ireq.name))
with log.indentation():
hashes = self._get_hashes_from_pypi(ireq)
if hashes is None:
log.log("Couldn't get hashes from PyPI, fallback to hashing files")
return self._get_hashes_from_files(ireq)
return hashes
def _get_hashes_from_pypi(self, ireq):
"""
Return a set of hashes from PyPI JSON API for a given InstallRequirement.
Return None if fetching data is failed or missing digests.
"""
project = self._get_project(ireq)
if project is None:
return None
_, version, _ = as_tuple(ireq)
try:
release_files = project["releases"][version]
except KeyError:
log.debug("Missing release files on PyPI")
return None
try:
hashes = {
"{algo}:{digest}".format(
algo=FAVORITE_HASH, digest=file_["digests"][FAVORITE_HASH]
)
for file_ in release_files
if file_["packagetype"] in self.HASHABLE_PACKAGE_TYPES
}
except KeyError:
log.debug("Missing digests of release files on PyPI")
return None
return hashes
def _get_hashes_from_files(self, ireq):
"""
Return a set of hashes for all release files of a given InstallRequirement.
"""
# We need to get all of the candidates that match our current version
# pin, these will represent all of the files that could possibly
# satisfy this constraint.
@ -346,20 +390,12 @@ class PyPIRepository(BaseRepository):
)
matching_candidates = candidates_by_version[matching_versions[0]]
log.debug(" {}".format(ireq.name))
def get_candidate_link(candidate):
if PIP_VERSION < (19, 2):
return candidate.location
return candidate.link
return {
self._get_file_hash(get_candidate_link(candidate))
for candidate in matching_candidates
self._get_file_hash(candidate.link) for candidate in matching_candidates
}
def _get_file_hash(self, link):
log.debug(" Hashing {}".format(link.url_without_fragment))
log.debug("Hashing {}".format(link.show_url))
h = hashlib.new(FAVORITE_HASH)
with open_local_or_remote_file(link, self.session) as f:
# Chunks to iterate
@ -368,7 +404,18 @@ class PyPIRepository(BaseRepository):
# Choose a context manager depending on verbosity
if log.verbosity >= 1:
iter_length = f.size / FILE_CHUNK_SIZE if f.size else None
context_manager = progressbar(chunks, length=iter_length, label=" ")
bar_template = "{prefix} |%(bar)s| %(info)s".format(
prefix=" " * log.current_indent
)
context_manager = progressbar(
chunks,
length=iter_length,
# Make it look like default pip progress bar
fill_char="",
empty_char=" ",
bar_template=bar_template,
width=32,
)
else:
context_manager = contextlib.nullcontext(chunks)
@ -410,6 +457,34 @@ class PyPIRepository(BaseRepository):
Wheel.support_index_min = original_support_index_min
self._available_candidates_cache = original_cache
def _setup_logging(self):
"""
Setup pip's logger. Ensure pip is verbose same as pip-tools and sync
pip's log stream with LogContext.stream.
"""
# Default pip's logger is noisy, so decrease it's verbosity
setup_logging(
verbosity=log.verbosity - 1,
no_color=self.options.no_color,
user_log_file=self.options.log,
)
# Sync pip's console handler stream with LogContext.stream
logger = logging.getLogger()
for handler in logger.handlers:
if handler.name == "console": # pragma: no branch
handler.stream = log.stream
break
else: # pragma: no cover
# There is always a console handler. This warning would be a signal that
# this block should be removed/revisited, because of pip possibly
# refactored-out logging config.
log.warning("Couldn't find a 'console' logging handler")
# Sync pip's progress bars stream with LogContext.stream
for bar_cls in itertools.chain(*BAR_TYPES.values()):
bar_cls.file = log.stream
@contextmanager
def open_local_or_remote_file(link, session):
@ -423,7 +498,7 @@ def open_local_or_remote_file(link, session):
"""
url = link.url_without_fragment
if is_file_url(link):
if link.is_file:
# Local URL
local_path = url_to_path(url)
if os.path.isdir(local_path):

Просмотреть файл

@ -4,16 +4,16 @@ from __future__ import absolute_import, division, print_function, unicode_litera
import copy
import os
from functools import partial
from itertools import chain, count
from itertools import chain, count, groupby
from pip._internal.req.constructors import install_req_from_line
from . import click
from ._compat import install_req_from_line
from .logging import log
from .utils import (
UNSAFE_PACKAGES,
format_requirement,
format_specifier,
full_groupby,
is_pinned_requirement,
is_url_requirement,
key_from_ireq,
@ -31,20 +31,24 @@ class RequirementSummary(object):
def __init__(self, ireq):
self.req = ireq.req
self.key = key_from_ireq(ireq)
self.extras = str(sorted(ireq.extras))
self.specifier = str(ireq.specifier)
self.extras = frozenset(ireq.extras)
self.specifier = ireq.specifier
def __eq__(self, other):
return str(self) == str(other)
return (
self.key == other.key
and self.specifier == other.specifier
and self.extras == other.extras
)
def __hash__(self):
return hash(str(self))
return hash((self.key, self.specifier, self.extras))
def __str__(self):
return repr([self.key, self.specifier, self.extras])
return repr((self.key, str(self.specifier), sorted(self.extras)))
def combine_install_requirements(ireqs):
def combine_install_requirements(repository, ireqs):
"""
Return a single install requirement that reflects a combination of
all the inputs.
@ -55,11 +59,21 @@ def combine_install_requirements(ireqs):
for ireq in ireqs:
source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
# Optimization. Don't bother with combination logic.
if len(source_ireqs) == 1:
return source_ireqs[0]
# deepcopy the accumulator so as to not modify the inputs
combined_ireq = copy.deepcopy(source_ireqs[0])
repository.copy_ireq_dependencies(source_ireqs[0], combined_ireq)
for ireq in source_ireqs[1:]:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
if combined_ireq.constraint:
# We don't find dependencies for constraint ireqs, so copy them
# from non-constraints:
repository.copy_ireq_dependencies(ireq, combined_ireq)
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(
@ -115,12 +129,7 @@ class Resolver(object):
@property
def constraints(self):
return set(
self._group_constraints(
chain(
sorted(self.our_constraints, key=str),
sorted(self.their_constraints, key=str),
)
)
self._group_constraints(chain(self.our_constraints, self.their_constraints))
)
def resolve_hashes(self, ireqs):
@ -129,7 +138,7 @@ class Resolver(object):
"""
log.debug("")
log.debug("Generating hashes:")
with self.repository.allow_all_wheels():
with self.repository.allow_all_wheels(), log.indentation():
return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
def resolve(self, max_rounds=10):
@ -218,15 +227,22 @@ class Resolver(object):
flask~=0.7
"""
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
ireqs = list(ireqs)
editable_ireq = next((ireq for ireq in ireqs if ireq.editable), None)
if editable_ireq:
# ignore all the other specs: the editable one is the one that counts
yield editable_ireq
continue
constraints = list(constraints)
for ireq in constraints:
if ireq.name is None:
# get_dependencies has side-effect of assigning name to ireq
# (so we can group by the name below).
self.repository.get_dependencies(ireq)
yield combine_install_requirements(ireqs)
# Sort first by name, i.e. the groupby key. Then within each group,
# sort editables first.
# This way, we don't bother with combining editables, since the first
# ireq will be editable, if one exists.
for _, ireqs in groupby(
sorted(constraints, key=(lambda x: (key_from_ireq(x), not x.editable))),
key=key_from_ireq,
):
yield combine_install_requirements(self.repository, ireqs)
def _resolve_one_round(self):
"""
@ -243,22 +259,25 @@ class Resolver(object):
constraints = sorted(self.constraints, key=key_from_ireq)
log.debug("Current constraints:")
for constraint in constraints:
log.debug(" {}".format(constraint))
with log.indentation():
for constraint in constraints:
log.debug(str(constraint))
log.debug("")
log.debug("Finding the best candidates:")
best_matches = {self.get_best_match(ireq) for ireq in constraints}
with log.indentation():
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug("")
log.debug("Finding secondary dependencies:")
their_constraints = []
for best_match in best_matches:
their_constraints.extend(self._iter_dependencies(best_match))
with log.indentation():
for best_match in best_matches:
their_constraints.extend(self._iter_dependencies(best_match))
# Grouping constraints to make clean diff between rounds
theirs = set(self._group_constraints(sorted(their_constraints, key=str)))
theirs = set(self._group_constraints(their_constraints))
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
@ -273,11 +292,13 @@ class Resolver(object):
if has_changed:
log.debug("")
log.debug("New dependencies found in this round:")
for new_dependency in sorted(diff, key=key_from_ireq):
log.debug(" adding {}".format(new_dependency))
with log.indentation():
for new_dependency in sorted(diff, key=key_from_ireq):
log.debug("adding {}".format(new_dependency))
log.debug("Removed dependencies in this round:")
for removed_dependency in sorted(removed, key=key_from_ireq):
log.debug(" removing {}".format(removed_dependency))
with log.indentation():
for removed_dependency in sorted(removed, key=key_from_ireq):
log.debug("removing {}".format(removed_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
@ -306,6 +327,10 @@ class Resolver(object):
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
elif ireq.constraint:
# NOTE: This is not a requirement (yet) and does not need
# to be resolved
best_match = ireq
else:
best_match = self.repository.find_best_match(
ireq, prereleases=self.prereleases
@ -313,7 +338,7 @@ class Resolver(object):
# Format the best match
log.debug(
" found candidate {} (constraint was {})".format(
"found candidate {} (constraint was {})".format(
format_requirement(best_match), format_specifier(ireq)
)
)
@ -357,9 +382,7 @@ class Resolver(object):
# from there
if ireq not in self.dependency_cache:
log.debug(
" {} not in cache, need to check index".format(
format_requirement(ireq)
),
"{} not in cache, need to check index".format(format_requirement(ireq)),
fg="yellow",
)
dependencies = self.repository.get_dependencies(ireq)
@ -368,7 +391,7 @@ class Resolver(object):
# Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
dependency_strings = self.dependency_cache[ireq]
log.debug(
" {:25} requires {}".format(
"{:25} requires {}".format(
format_requirement(ireq),
", ".join(sorted(dependency_strings, key=lambda s: s.lower())) or "-",
)

Просмотреть файл

@ -2,38 +2,66 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shlex
import sys
import tempfile
import warnings
from click import Command
from click.utils import safecall
from pip._internal.commands import create_command
from pip._internal.req.constructors import install_req_from_line
from pip._internal.utils.misc import redact_auth_from_url
from .. import click
from .._compat import install_req_from_line, parse_requirements
from .._compat import parse_requirements
from ..cache import DependencyCache
from ..exceptions import PipToolsError
from ..locations import CACHE_DIR
from ..logging import log
from ..repositories import LocalRequirementsRepository, PyPIRepository
from ..resolver import Resolver
from ..utils import (
UNSAFE_PACKAGES,
create_install_command,
dedup,
get_trusted_hosts,
is_pinned_requirement,
key_from_ireq,
)
from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq
from ..writer import OutputWriter
DEFAULT_REQUIREMENTS_FILE = "requirements.in"
DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt"
# Get default values of the pip's options (including options from pip.conf).
install_command = create_install_command()
pip_defaults = install_command.parser.get_default_values()
def _get_default_option(option_name):
"""
Get default value of the pip's option (including option from pip.conf)
by a given option name.
"""
install_command = create_command("install")
default_values = install_command.parser.get_default_values()
return getattr(default_values, option_name)
@click.command()
class BaseCommand(Command):
_os_args = None
def parse_args(self, ctx, args):
"""
Override base `parse_args` to store the argument part of `sys.argv`.
"""
self._os_args = set(args)
return super(BaseCommand, self).parse_args(ctx, args)
def has_arg(self, arg_name):
"""
Detect whether a given arg name (including negative counterparts
to the arg, e.g. --no-arg) is present in the argument part of `sys.argv`.
"""
command_options = {option.name: option for option in self.params}
option = command_options[arg_name]
args = set(option.opts + option.secondary_opts)
return bool(self._os_args & args)
@click.command(
cls=BaseCommand, context_settings={"help_option_names": ("-h", "--help")}
)
@click.version_option()
@click.pass_context
@click.option("-v", "--verbose", count=True, help="Show more output")
@ -67,7 +95,9 @@ pip_defaults = install_command.parser.get_default_values()
@click.option(
"-i",
"--index-url",
help="Change index URL (defaults to {})".format(pip_defaults.index_url),
help="Change index URL (defaults to {index_url})".format(
index_url=redact_auth_from_url(_get_default_option("index_url"))
),
envvar="PIP_INDEX_URL",
)
@click.option(
@ -99,7 +129,7 @@ pip_defaults = install_command.parser.get_default_values()
"--index/--no-index",
is_flag=True,
default=True,
help="Add index URL to generated file",
help="DEPRECATED: Add index URL to generated file",
)
@click.option(
"--emit-trusted-host/--no-emit-trusted-host",
@ -153,6 +183,15 @@ pip_defaults = install_command.parser.get_default_values()
default=False,
help="Generate pip 8 style hashes in the resulting requirements file.",
)
@click.option(
"--reuse-hashes/--no-reuse-hashes",
is_flag=True,
default=True,
help=(
"Improve the speed of --generate-hashes by reusing the hashes from an "
"existing output file."
),
)
@click.option(
"--max-rounds",
default=10,
@ -162,7 +201,7 @@ pip_defaults = install_command.parser.get_default_values()
@click.option(
"--build-isolation/--no-build-isolation",
is_flag=True,
default=False,
default=True,
help="Enable isolation when building a modern source distribution. "
"Build dependencies specified by PEP 518 must be already installed "
"if build isolation is disabled.",
@ -182,6 +221,13 @@ pip_defaults = install_command.parser.get_default_values()
show_envvar=True,
type=click.Path(file_okay=False, writable=True),
)
@click.option("--pip-args", help="Arguments to pass directly to the pip command.")
@click.option(
"--emit-index-url/--no-emit-index-url",
is_flag=True,
default=True,
help="Add index URL to generated file",
)
def cli(
ctx,
verbose,
@ -204,11 +250,14 @@ def cli(
output_file,
allow_unsafe,
generate_hashes,
reuse_hashes,
src_files,
max_rounds,
build_isolation,
emit_find_links,
cache_dir,
pip_args,
emit_index_url,
):
"""Compiles requirements.txt from requirements.in specs."""
log.verbosity = verbose - quiet
@ -248,10 +297,24 @@ def cli(
# Close the file at the end of the context execution
ctx.call_on_close(safecall(output_file.close_intelligently))
if cli.has_arg("index") and cli.has_arg("emit_index_url"):
raise click.BadParameter(
"--index/--no-index and --emit-index-url/--no-emit-index-url "
"are mutually exclusive."
)
elif cli.has_arg("index"):
warnings.warn(
"--index and --no-index are deprecated and will be removed "
"in future versions. Use --emit-index-url/--no-emit-index-url instead.",
category=FutureWarning,
)
emit_index_url = index
###
# Setup
###
right_args = shlex.split(pip_args or "")
pip_args = []
if find_links:
for link in find_links:
@ -271,9 +334,11 @@ def cli(
for host in trusted_host:
pip_args.extend(["--trusted-host", host])
repository = PyPIRepository(
pip_args, build_isolation=build_isolation, cache_dir=cache_dir
)
if not build_isolation:
pip_args.append("--no-build-isolation")
pip_args.extend(right_args)
repository = PyPIRepository(pip_args, cache_dir=cache_dir)
# Parse all constraints coming from --upgrade-package/-P
upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages)
@ -288,9 +353,7 @@ def cli(
if not upgrade and os.path.exists(output_file.name):
# Use a temporary repository to ensure outdated(removed) options from
# existing requirements.txt wouldn't get into the current repository.
tmp_repository = PyPIRepository(
pip_args, build_isolation=build_isolation, cache_dir=cache_dir
)
tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir)
ireqs = parse_requirements(
output_file.name,
finder=tmp_repository.finder,
@ -307,7 +370,9 @@ def cli(
existing_pins_to_upgrade.add(key)
else:
existing_pins[key] = ireq
repository = LocalRequirementsRepository(existing_pins, repository)
repository = LocalRequirementsRepository(
existing_pins, repository, reuse_hashes=reuse_hashes
)
###
# Parsing/collecting initial requirements
@ -370,14 +435,16 @@ def cli(
]
log.debug("Using indexes:")
for index_url in dedup(repository.finder.index_urls):
log.debug(" {}".format(index_url))
with log.indentation():
for index_url in dedup(repository.finder.index_urls):
log.debug(redact_auth_from_url(index_url))
if repository.finder.find_links:
log.debug("")
log.debug("Configuration:")
for find_link in dedup(repository.finder.find_links):
log.debug(" -f {}".format(find_link))
log.debug("Using links:")
with log.indentation():
for find_link in dedup(repository.finder.find_links):
log.debug(redact_auth_from_url(find_link))
try:
resolver = Resolver(
@ -409,13 +476,13 @@ def cli(
click_ctx=ctx,
dry_run=dry_run,
emit_header=header,
emit_index=index,
emit_index_url=emit_index_url,
emit_trusted_host=emit_trusted_host,
annotate=annotate,
generate_hashes=generate_hashes,
default_index_url=repository.DEFAULT_INDEX_URL,
index_urls=repository.finder.index_urls,
trusted_hosts=get_trusted_hosts(repository.finder),
trusted_hosts=repository.finder.trusted_hosts,
format_control=repository.finder.format_control,
allow_unsafe=allow_unsafe,
find_links=repository.finder.find_links,

Просмотреть файл

@ -1,19 +1,25 @@
# coding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import itertools
import os
import shlex
import sys
from pip._internal.commands import create_command
from pip._internal.utils.misc import get_installed_distributions
from .. import click, sync
from .._compat import get_installed_distributions, parse_requirements
from .._compat import parse_requirements
from ..exceptions import PipToolsError
from ..logging import log
from ..repositories import PyPIRepository
from ..utils import flat_map
DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
@click.command()
@click.command(context_settings={"help_option_names": ("-h", "--help")})
@click.version_option()
@click.option(
"-a",
@ -68,6 +74,7 @@ DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
"the private key and the certificate in PEM format.",
)
@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
@click.option("--pip-args", help="Arguments to pass directly to pip install.")
def cli(
ask,
dry_run,
@ -82,6 +89,7 @@ def cli(
cert,
client_cert,
src_files,
pip_args,
):
"""Synchronize virtual environment with requirements.txt."""
if not src_files:
@ -104,8 +112,15 @@ def cli(
log.error("ERROR: " + msg)
sys.exit(2)
install_command = create_command("install")
options, _ = install_command.parse_args([])
session = install_command._build_session(options)
finder = install_command._build_package_finder(options=options, session=session)
# Parse requirements file. Note, all options inside requirements file
# will be collected by the finder.
requirements = flat_map(
lambda src: parse_requirements(src, session=True), src_files
lambda src: parse_requirements(src, finder=finder, session=session), src_files
)
try:
@ -117,26 +132,17 @@ def cli(
installed_dists = get_installed_distributions(skip=[], user_only=user_only)
to_install, to_uninstall = sync.diff(requirements, installed_dists)
install_flags = []
for link in find_links or []:
install_flags.extend(["-f", link])
if no_index:
install_flags.append("--no-index")
if index_url:
install_flags.extend(["-i", index_url])
if extra_index_url:
for extra_index in extra_index_url:
install_flags.extend(["--extra-index-url", extra_index])
if trusted_host:
for host in trusted_host:
install_flags.extend(["--trusted-host", host])
if user_only:
install_flags.append("--user")
if cert:
install_flags.extend(["--cert", cert])
if client_cert:
install_flags.extend(["--client-cert", client_cert])
install_flags = _compose_install_flags(
finder,
no_index=no_index,
index_url=index_url,
extra_index_url=extra_index_url,
trusted_host=trusted_host,
find_links=find_links,
user_only=user_only,
cert=cert,
client_cert=client_cert,
) + shlex.split(pip_args or "")
sys.exit(
sync.sync(
to_install,
@ -147,3 +153,65 @@ def cli(
ask=ask,
)
)
def _compose_install_flags(
finder,
no_index=False,
index_url=None,
extra_index_url=None,
trusted_host=None,
find_links=None,
user_only=False,
cert=None,
client_cert=None,
):
"""
Compose install flags with the given finder and CLI options.
"""
result = []
# Build --index-url/--extra-index-url/--no-index
if no_index:
result.append("--no-index")
elif index_url:
result.extend(["--index-url", index_url])
elif finder.index_urls:
finder_index_url = finder.index_urls[0]
if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL:
result.extend(["--index-url", finder_index_url])
for extra_index in finder.index_urls[1:]:
result.extend(["--extra-index-url", extra_index])
else:
result.append("--no-index")
for extra_index in extra_index_url or []:
result.extend(["--extra-index-url", extra_index])
# Build --trusted-hosts
for host in itertools.chain(trusted_host or [], finder.trusted_hosts):
result.extend(["--trusted-host", host])
# Build --find-links
for link in itertools.chain(find_links or [], finder.find_links):
result.extend(["--find-links", link])
# Build format controls --no-binary/--only-binary
for format_control in ("no_binary", "only_binary"):
formats = getattr(finder.format_control, format_control)
if not formats:
continue
result.extend(
["--" + format_control.replace("_", "-"), ",".join(sorted(formats))]
)
if user_only:
result.append("--user")
if cert:
result.extend(["--cert", cert])
if client_cert:
result.extend(["--client-cert", client_cert])
return result

38
third_party/python/pip-tools/piptools/sync.py поставляемый
Просмотреть файл

@ -4,8 +4,10 @@ import sys
import tempfile
from subprocess import check_call # nosec
from pip._internal.commands.freeze import DEV_PKGS
from pip._internal.utils.compat import stdlib_pkgs
from . import click
from ._compat import DEV_PKGS, stdlib_pkgs
from .exceptions import IncompatibleRequirements
from .utils import (
flat_map,
@ -80,18 +82,19 @@ def merge(requirements, ignore_conflicts):
# Limitation: URL requirements are merged by precise string match, so
# "file:///example.zip#egg=example", "file:///example.zip", and
# "example==1.0" will not merge with each other
key = key_from_ireq(ireq)
if ireq.match_markers():
key = key_from_ireq(ireq)
if not ignore_conflicts:
existing_ireq = by_key.get(key)
if existing_ireq:
# NOTE: We check equality here since we can assume that the
# requirements are all pinned
if ireq.specifier != existing_ireq.specifier:
raise IncompatibleRequirements(ireq, existing_ireq)
if not ignore_conflicts:
existing_ireq = by_key.get(key)
if existing_ireq:
# NOTE: We check equality here since we can assume that the
# requirements are all pinned
if ireq.specifier != existing_ireq.specifier:
raise IncompatibleRequirements(ireq, existing_ireq)
# TODO: Always pick the largest specifier in case of a conflict
by_key[key] = ireq
# TODO: Always pick the largest specifier in case of a conflict
by_key[key] = ireq
return by_key.values()
@ -155,10 +158,12 @@ def sync(
"""
Install and uninstalls the given sets of modules.
"""
exit_code = 0
if not to_uninstall and not to_install:
if verbose:
click.echo("Everything up-to-date")
return 0
return exit_code
pip_flags = []
if not verbose:
@ -170,16 +175,19 @@ def sync(
if dry_run:
if to_uninstall:
click.echo("Would uninstall:")
for pkg in to_uninstall:
for pkg in sorted(to_uninstall):
click.echo(" {}".format(pkg))
if to_install:
click.echo("Would install:")
for ireq in to_install:
for ireq in sorted(to_install, key=key_from_ireq):
click.echo(" {}".format(format_requirement(ireq)))
exit_code = 1
if ask and click.confirm("Would you like to proceed with these changes?"):
dry_run = False
exit_code = 0
if not dry_run:
if to_uninstall:
@ -212,4 +220,4 @@ def sync(
finally:
os.unlink(tmp_req_file.name)
return 0
return exit_code

Просмотреть файл

@ -3,13 +3,16 @@ from __future__ import absolute_import, division, print_function, unicode_litera
import sys
from collections import OrderedDict
from itertools import chain, groupby
from itertools import chain
import six
from click.utils import LazyFile
from pip._internal.req.constructors import install_req_from_line
from pip._internal.utils.misc import redact_auth_from_url
from pip._internal.vcs import is_url
from six.moves import shlex_quote
from ._compat import PIP_VERSION, InstallCommand, install_req_from_line
from ._compat import PIP_VERSION
from .click import style
UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
@ -143,11 +146,6 @@ def as_tuple(ireq):
return name, version, extras
def full_groupby(iterable, key=None):
"""Like groupby(), but sorts the input on the group key first."""
return groupby(sorted(iterable, key=key), key=key)
def flat_map(fn, collection):
"""Map a function over a collection and flatten the result by one-level"""
return chain.from_iterable(map(fn, collection))
@ -233,7 +231,7 @@ def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
def dedup(iterable):
"""Deduplicate an iterable object like iter(set(iterable)) but
order-reserved.
order-preserved.
"""
return iter(OrderedDict.fromkeys(iterable))
@ -278,7 +276,10 @@ def get_hashes_from_ireq(ireq):
in the requirement options.
"""
result = []
ireq_hashes = ireq.options.get("hashes", {})
if PIP_VERSION[:2] <= (20, 0):
ireq_hashes = ireq.options.get("hashes", {})
else:
ireq_hashes = ireq.hash_options
for algorithm, hexdigests in ireq_hashes.items():
for hash_ in hexdigests:
result.append("{}:{}".format(algorithm, hash_))
@ -324,6 +325,10 @@ def get_compile_command(click_ctx):
# Collect variadic args separately, they will be added
# at the end of the command later
if option.nargs < 0:
# These will necessarily be src_files
# Re-add click-stripped '--' if any start with '-'
if any(val.startswith("-") and val != "-" for val in value):
right_args.append("--")
right_args.extend([shlex_quote(force_text(val)) for val in value])
continue
@ -362,32 +367,22 @@ def get_compile_command(click_ctx):
left_args.append(shlex_quote(arg))
# Append to args the option with a value
else:
left_args.append(
"{option}={value}".format(
option=option_long_name, value=shlex_quote(force_text(val))
if isinstance(val, six.string_types) and is_url(val):
val = redact_auth_from_url(val)
if option.name == "pip_args":
# shlex_quote would produce functional but noisily quoted results,
# e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"''
# Instead, we try to get more legible quoting via repr:
left_args.append(
"{option}={value}".format(
option=option_long_name, value=repr(fs_str(force_text(val)))
)
)
else:
left_args.append(
"{option}={value}".format(
option=option_long_name, value=shlex_quote(force_text(val))
)
)
)
return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args))
def create_install_command():
"""
Return an instance of InstallCommand.
"""
if PIP_VERSION < (19, 3):
return InstallCommand()
from pip._internal.commands import create_command
return create_command("install")
def get_trusted_hosts(finder):
"""
Returns an iterable of trusted hosts from a given finder.
"""
if PIP_VERSION < (19, 2):
return (host for _, host, _ in finder.secure_origins)
return finder.trusted_hosts

Просмотреть файл

@ -56,7 +56,7 @@ class OutputWriter(object):
click_ctx,
dry_run,
emit_header,
emit_index,
emit_index_url,
emit_trusted_host,
annotate,
generate_hashes,
@ -73,7 +73,7 @@ class OutputWriter(object):
self.click_ctx = click_ctx
self.dry_run = dry_run
self.emit_header = emit_header
self.emit_index = emit_index
self.emit_index_url = emit_index_url
self.emit_trusted_host = emit_trusted_host
self.annotate = annotate
self.generate_hashes = generate_hashes
@ -101,7 +101,7 @@ class OutputWriter(object):
yield comment("#")
def write_index_options(self):
if self.emit_index:
if self.emit_index_url:
for index, index_url in enumerate(dedup(self.index_urls)):
if index_url.rstrip("/") == self.default_index_url:
continue
@ -114,9 +114,9 @@ class OutputWriter(object):
yield "--trusted-host {}".format(trusted_host)
def write_format_controls(self):
for nb in dedup(self.format_control.no_binary):
for nb in dedup(sorted(self.format_control.no_binary)):
yield "--no-binary {}".format(nb)
for ob in dedup(self.format_control.only_binary):
for ob in dedup(sorted(self.format_control.only_binary)):
yield "--only-binary {}".format(ob)
def write_find_links(self):

59
third_party/python/pip-tools/setup.cfg поставляемый
Просмотреть файл

@ -1,9 +1,57 @@
[metadata]
name = pip-tools
url = https://github.com/jazzband/pip-tools/
license = BSD
author = Vincent Driessen
author_email = me@nvie.com
description = pip-tools keeps your pinned dependencies fresh.
long_description = file: README.rst
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Intended Audience :: System Administrators
License :: OSI Approved :: BSD License
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: System :: Systems Administration
[options]
python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
setup_requires = setuptools_scm
packages = find:
zip_safe = false
install_requires =
click >= 7
six
pip >= 20.0
[options.packages.find]
exclude = tests
[options.extras_require]
testing =
mock
pytest
pytest-rerunfailures
coverage = pytest-cov
[options.entry_points]
console_scripts =
pip-compile = piptools.scripts.compile:cli
pip-sync = piptools.scripts.sync:cli
[bdist_wheel]
universal = 1
[metadata]
license_file = LICENSE
[tool:pytest]
norecursedirs = .* build dist venv test_data piptools/_compat/*
testpaths = tests piptools
@ -16,6 +64,11 @@ markers =
[flake8]
max-line-length = 88
exclude = build/*, dist/*, pip_tools.egg-info/*, piptools/_compat/*, .tox/*, .venv/*, .git/*, .eggs/*
extend-ignore = E203 # E203 conflicts with PEP8; see https://github.com/psf/black#slices
pytest-fixture-no-parentheses = true
pytest-parametrize-names-type = tuple
pytest-parametrize-values-type = tuple
pytest-parametrize-values-row-type = tuple
[isort]
combine_as_imports = True

57
third_party/python/pip-tools/setup.py поставляемый
Просмотреть файл

@ -1,56 +1,3 @@
"""
pip-tools keeps your pinned dependencies fresh.
"""
from os.path import abspath, dirname, join
from setuptools import setup
from setuptools import find_packages, setup
def read_file(filename):
"""Read the contents of a file located relative to setup.py"""
with open(join(abspath(dirname(__file__)), filename)) as thefile:
return thefile.read()
setup(
name="pip-tools",
use_scm_version=True,
url="https://github.com/jazzband/pip-tools/",
license="BSD",
author="Vincent Driessen",
author_email="me@nvie.com",
description=__doc__.strip(),
long_description=read_file("README.rst"),
long_description_content_type="text/x-rst",
packages=find_packages(exclude=["tests"]),
package_data={},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
setup_requires=["setuptools_scm"],
install_requires=["click>=7", "six"],
zip_safe=False,
entry_points={
"console_scripts": [
"pip-compile = piptools.scripts.compile:cli",
"pip-sync = piptools.scripts.sync:cli",
]
},
platforms="any",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: System :: Systems Administration",
],
)
setup(use_scm_version=True)

48
third_party/python/pip-tools/tox.ini поставляемый
Просмотреть файл

@ -1,48 +1,35 @@
[tox]
envlist =
# NOTE: keep this in sync with the env list in .travis.yml for tox-travis.
py{27,35,36,37,38,py,py3}-pip{8.1.1,9.0.1,9.0.3,10.0.1,18.0,19.0.3,19.1,19.2.3,19.3,20.0,latest,master}-coverage
py{27,35,36,37,38,39,py,py3}-pip{20.0,20.1,20.2,latest,master}-coverage
checkqa
readme
skip_missing_interpreters = True
[testenv]
extras =
testing
coverage: coverage
deps =
pipmaster: -e git+https://github.com/pypa/pip.git@master#egg=pip
pip8.1.1: pip==8.1.1
pip9.0.1: pip==9.0.1
pip9.0.3: pip==9.0.3
pip10.0.1: pip==10.0.1
pip18.0: pip==18.0
pip19.0.3: pip==19.0.3
pip19.1: pip==19.1
pip19.2.3: pip==19.2.3
pip19.3: pip==19.3
; TODO: remove all 20.0 mentions after pip-20.2 being released
pip20.0: pip==20.0.*
mock
pytest!=5.1.2
pytest-rerunfailures
coverage: pytest-cov
pip20.1: pip==20.1.*
pip20.2: pip==20.2.*
setenv =
piplatest: PIP=latest
pipmaster: PIP=master
pip8.1.1: PIP=8.1.1
pip9.0.1: PIP=9.0.1
pip9.0.3: PIP=9.0.3
pip10.0.1: PIP=10.0.1
pip18.0: PIP=18.0
pip19.0.3: PIP==19.0.3
pip19.1: PIP==19.1
pip19.2.3: PIP==19.2.3
pip19.3: PIP==19.3
pip20.0: PIP==20.0
pip20.1: PIP==20.1
pip20.2: PIP==20.2
coverage: PYTEST_ADDOPTS=--strict --doctest-modules --cov --cov-report=term-missing {env:PYTEST_ADDOPTS:}
coverage: PYTEST_ADDOPTS=--strict --doctest-modules --cov --cov-report=term-missing --cov-report=xml {env:PYTEST_ADDOPTS:}
commands_pre =
piplatest: python -m pip install -U pip
pip --version
commands = pytest {posargs}
passenv = CI
passenv = CI GITHUB_ACTIONS
pip_pre=True
[testenv:checkqa]
basepython = python3
@ -58,15 +45,8 @@ commands = twine check {distdir}/*
[travis:env]
PIP =
8.1.1: pip8.1.1
9.0.1: pip9.0.1
9.0.3: pip9.0.3
10.0.1: pip10.0.1
18.0: pip18.0
19.0.3: pip19.0.3
19.1: pip19.1
19.2.3: pip19.2.3
19.3: pip19.3
20.0: pip20.0
20.1: pip20.1
20.2: pip20.2
latest: piplatest
master: pipmaster

2
third_party/python/requirements.in поставляемый
Просмотреть файл

@ -33,7 +33,7 @@ json-e==2.7.0
mozilla-version==0.3.4
pathlib2==2.3.2
pathspec==0.8
pip-tools==4.5.1
pip-tools==5.3.1
ply==3.10
psutil==5.7.0
pyasn1==0.4.8

7
third_party/python/requirements.txt поставляемый
Просмотреть файл

@ -160,9 +160,9 @@ pathspec==0.8 \
--hash=sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0 \
--hash=sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061 \
# via -r requirements-mach-vendor-python.in, yamllint
pip-tools==4.5.1 \
--hash=sha256:693f30e451875796b1b25203247f0b4cf48a4c4a5ab7341f4f33ffd498cdcc98 \
--hash=sha256:be9c796aa88b2eec5cabf1323ba1cb60a08212b84bfb75b8b4037a8ef8cb8cb6 \
pip-tools==5.3.1 \
--hash=sha256:5672c2b6ca0f1fd803f3b45568c2cf7fadf135b4971e7d665232b2075544c0ef \
--hash=sha256:73787e23269bf8a9230f376c351297b9037ed0d32ab0f9bef4a187d976acc054 \
# via -r requirements-mach-vendor-python.in
pluggy==0.6.0 \
--hash=sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff \
@ -262,4 +262,5 @@ yamllint==1.23 \
# WARNING: The following packages were not pinned, but pip requires them to be
# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
# pip
# setuptools