summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-29 04:20:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-29 04:20:41 +0000
commitb49f1524e250764592ff132af8fb0d39182620f7 (patch)
treea2c4da0c1bfc3be79c9b80180d8958804e91a07d
parentInitial commit. (diff)
downloadpython-build-upstream.tar.xz
python-build-upstream.zip
Adding upstream version 0.9.0.upstream/0.9.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
-rw-r--r--.dockerignore6
-rw-r--r--.github/CODEOWNERS1
-rw-r--r--.github/dependabot.yml6
-rw-r--r--.github/workflows/check.yml33
-rw-r--r--.github/workflows/test.yml131
-rw-r--r--.gitignore25
-rw-r--r--.pre-commit-config.yaml68
-rw-r--r--.readthedocs.yml12
-rw-r--r--CHANGELOG.rst330
-rw-r--r--LICENSE20
-rw-r--r--README.md39
-rw-r--r--codecov.yml6
-rw-r--r--docs/api.rst26
l---------docs/changelog.rst1
-rw-r--r--docs/conf.py67
-rw-r--r--docs/differences.rst30
-rw-r--r--docs/index.rst54
-rw-r--r--docs/installation.rst77
-rw-r--r--docs/mission.rst20
-rw-r--r--docs/test_suite.rst51
-rw-r--r--pyproject.toml72
-rw-r--r--setup.cfg74
-rw-r--r--setup.py4
-rw-r--r--src/build/__init__.py539
-rw-r--r--src/build/__main__.py397
-rw-r--r--src/build/env.py340
-rw-r--r--src/build/py.typed0
-rw-r--r--src/build/util.py60
-rw-r--r--tests/conftest.py111
-rw-r--r--tests/constraints.txt9
-rw-r--r--tests/packages/inline/build.py63
-rw-r--r--tests/packages/inline/pyproject.toml4
-rw-r--r--tests/packages/legacy/legacy/__init__.py6
-rw-r--r--tests/packages/legacy/setup.py17
-rw-r--r--tests/packages/test-bad-backend/pyproject.toml3
-rw-r--r--tests/packages/test-bad-syntax/pyproject.toml2
-rw-r--r--tests/packages/test-bad-wheel/backend_bad_wheel.py7
-rw-r--r--tests/packages/test-bad-wheel/pyproject.toml4
-rw-r--r--tests/packages/test-bad-wheel/setup.cfg3
-rw-r--r--tests/packages/test-cant-build-via-sdist/backend_bad_sdist.py23
-rw-r--r--tests/packages/test-cant-build-via-sdist/pyproject.toml4
-rw-r--r--tests/packages/test-cant-build-via-sdist/some-file-that-is-needed-for-build.txt0
-rw-r--r--tests/packages/test-flit/pyproject.toml13
-rw-r--r--tests/packages/test-flit/test_flit/__init__.py6
-rw-r--r--tests/packages/test-invalid-requirements/pyproject.toml3
-rw-r--r--tests/packages/test-invalid-requirements/setup.cfg3
-rw-r--r--tests/packages/test-metadata/backend.py43
-rw-r--r--tests/packages/test-metadata/pyproject.toml14
-rw-r--r--tests/packages/test-no-backend/pyproject.toml2
-rw-r--r--tests/packages/test-no-permission/pyproject.toml0
-rw-r--r--tests/packages/test-no-prepare/backend_no_prepare.py3
-rw-r--r--tests/packages/test-no-prepare/pyproject.toml4
-rw-r--r--tests/packages/test-no-prepare/setup.cfg3
-rw-r--r--tests/packages/test-no-project/empty.txt0
-rw-r--r--tests/packages/test-no-requires/pyproject.toml2
-rw-r--r--tests/packages/test-optional-hooks/hookless_backend.py0
-rw-r--r--tests/packages/test-optional-hooks/pyproject.toml4
-rw-r--r--tests/packages/test-setuptools/pyproject.toml3
-rw-r--r--tests/packages/test-setuptools/setup.cfg6
-rw-r--r--tests/packages/test-typo/pyproject.toml3
-rw-r--r--tests/test_env.py177
-rw-r--r--tests/test_integration.py136
-rw-r--r--tests/test_main.py437
-rw-r--r--tests/test_module.py16
-rw-r--r--tests/test_projectbuilder.py672
-rw-r--r--tests/test_self_packaging.py103
-rw-r--r--tests/test_util.py44
-rw-r--r--tox.ini109
68 files changed, 4551 insertions, 0 deletions
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..de4edc8
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,6 @@
+.tox
+.*_cache
+*.egg-info
+Dockerfile
+build
+dist
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..ca06f98
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @FFY00 @gaborbernat @layday @henryiii
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..1230149
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "daily"
diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml
new file mode 100644
index 0000000..c6e9bfb
--- /dev/null
+++ b/.github/workflows/check.yml
@@ -0,0 +1,33 @@
+name: check
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+ branches:
+ - main
+ schedule:
+ - cron: "0 8 * * *"
+
+jobs:
+ docs:
+ runs-on: ubuntu-latest
+ env:
+ PY_COLORS: 1
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Setup Python 3.9
+ uses: actions/setup-python@v4
+ with:
+ python-version: 3.9
+
+ - name: Install tox
+ run: python -m pip install tox
+
+ - name: Setup run environment
+ run: tox -vv --notest -e docs
+
+ - name: Run check for docs
+ run: tox -e docs --skip-pkg-install
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..21e1321
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,131 @@
+name: test
+on:
+ push:
+ branches:
+ - main
+ paths-ignore:
+ - "docs/**"
+ - "*.md"
+ pull_request:
+ branches:
+ - main
+ paths-ignore:
+ - "docs/**"
+ - "*.md"
+ schedule:
+ - cron: "0 8 * * *"
+ workflow_dispatch:
+
+concurrency:
+ group: test-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ pytest:
+ runs-on: ${{ matrix.os }}-latest
+ env:
+ PYTEST_ADDOPTS: "--run-integration --showlocals -vv --durations=10 --reruns 5 --only-rerun subprocess.CalledProcessError"
+ strategy:
+ fail-fast: false
+ matrix:
+ os:
+ - ubuntu
+ - macos
+ - windows
+ py:
+ - "pypy-3.7"
+ - "pypy-3.8"
+ - "pypy-3.9"
+ - "3.11"
+ - "3.10"
+ - "3.9"
+ - "3.8"
+ - "3.7"
+ - "3.6"
+ tox-target:
+ - "tox"
+ - "min"
+
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+
+ - name: Setup python for test ${{ matrix.py }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.py }}
+
+ - name: Pick environment to run
+ run: |
+ import platform
+ import os
+ import sys
+
+ if platform.python_implementation() == "PyPy":
+ base = f"pypy{sys.version_info.major}{sys.version_info.minor}"
+ else:
+ base = f"py{sys.version_info.major}{sys.version_info.minor}"
+ env = f"BASE={base}\n"
+ print(f"Picked:\n{env}for {sys.version}")
+ with open(os.environ["GITHUB_ENV"], "a", encoding="utf-8") as file:
+ file.write(env)
+ shell: python
+
+ - name: Setup python for tox
+ uses: actions/setup-python@v4
+ with:
+ python-version: 3.9
+
+ - name: Install tox
+ run: python -m pip install tox
+
+ - name: Run test suite via tox
+ if: matrix.tox-target == 'tox'
+ run: |
+ tox -vv --notest -e ${{env.BASE}}
+ tox -e ${{env.BASE}} --skip-pkg-install
+
+ - name: Run minimum version test
+ if: matrix.tox-target == 'min'
+ run: tox -e ${{env.BASE}}-${{ matrix.tox-target }}
+
+ - name: Run path test
+ if: matrix.tox-target == 'tox' && matrix.py == '3.10'
+ run: tox -e path
+
+ - name: Combine coverage files
+ if: always()
+ run: tox -e coverage
+
+ - uses: codecov/codecov-action@v3
+ if: always()
+ env:
+ PYTHON: ${{ matrix.python }}
+ with:
+ file: ./.tox/coverage.xml
+ flags: tests
+ env_vars: PYTHON
+ name: ${{ matrix.py }} - ${{ matrix.os }}
+
+ type:
+ runs-on: ubuntu-latest
+ env:
+ PY_COLORS: 1
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Setup Python 3.9
+ uses: actions/setup-python@v4
+ with:
+ python-version: 3.9
+
+ - name: Install tox
+ run: python -m pip install tox
+
+ - name: Setup run environment
+ run: tox -vv --notest -e type
+
+ - name: Run check for type
+ run: tox -e type --skip-pkg-install
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..ac76f5d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,25 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# Distribution / packaging
+build/lib/
+dist/
+*.egg-info/
+.eggs
+
+# Testing and coverage
+.cache
+.nox/
+.mypy_cache/
+.pytest_cache
+htmlcov/
+.coverage
+coverage.xml
+tests/integration/
+.integration-sources
+.tox
+Dockerfile
+
+# Restore src/build in case user ignores 'build'
+!src/build
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..7c72733
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,68 @@
+ci:
+ autofix_prs: false
+ autoupdate_commit_msg: "pre-commit: bump repositories"
+
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.3.0
+ hooks:
+ - id: check-ast
+ - id: check-builtin-literals
+ - id: check-docstring-first
+ - id: check-merge-conflict
+ - id: check-yaml
+ - id: check-toml
+ exclude: tests/packages/test-(bad-syntax|no-permission)/pyproject.toml
+ - id: debug-statements
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+ - id: double-quote-string-fixer
+ - repo: https://github.com/asottile/pyupgrade
+ rev: v3.1.0
+ hooks:
+ - id: pyupgrade
+ args: ["--py36-plus"]
+ - repo: https://github.com/psf/black
+ rev: 22.10.0
+ hooks:
+ - id: black
+ - repo: https://github.com/asottile/blacken-docs
+ rev: v1.12.1
+ hooks:
+ - id: blacken-docs
+ additional_dependencies: [black==22.6]
+ - repo: https://github.com/pre-commit/mirrors-prettier
+ rev: "v3.0.0-alpha.2"
+ hooks:
+ - id: prettier
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.10.1
+ hooks:
+ - id: isort
+ - repo: https://github.com/asottile/setup-cfg-fmt
+ rev: v2.1.0
+ hooks:
+ - id: setup-cfg-fmt
+ args: [--include-version-classifiers, --max-py-version=3.11]
+ - repo: https://github.com/PyCQA/flake8
+ rev: "5.0.4"
+ hooks:
+ - id: flake8
+ additional_dependencies: ["flake8-bugbear==22.7.1"]
+ language_version: python3.9
+ - repo: https://github.com/codespell-project/codespell
+ rev: "v2.2.2"
+ hooks:
+ - id: codespell
+ args: ["-L", "sur"]
+ - repo: https://github.com/pre-commit/pygrep-hooks
+ rev: "v1.9.0"
+ hooks:
+ - id: python-check-blanket-noqa
+ - id: python-check-blanket-type-ignore
+ - id: python-no-log-warn
+ - id: python-no-eval
+ - id: python-use-type-annotations
+ - id: rst-backticks
+ - id: rst-directive-colons
+ - id: rst-inline-touching-normal
diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 0000000..d115133
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,12 @@
+version: 2
+
+build:
+ os: ubuntu-20.04
+ tools:
+ python: "3.10"
+
+python:
+ install:
+ - method: pip
+ path: .
+ extra_requirements: [docs]
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
new file mode 100644
index 0000000..3e6bde6
--- /dev/null
+++ b/CHANGELOG.rst
@@ -0,0 +1,330 @@
++++++++++
+Changelog
++++++++++
+
+
+0.9.0 (2022-10-27)
+==================
+
+- Hide a Python 3.11.0 unavoidable warning with venv (`PR #527`_)
+- Fix infinite recursion error in ``check_dependency`` with circular
+ dependencies (`PR #512`_, Fixes `#511`_)
+- Only import colorama on Windows (`PR #494`_, Fixes `#493`_)
+- Flush output more often to reduce interleaved output (`PR #494`_)
+- Small API cleanup, like better ``__all__`` and srcdir being read only. (`PR #477`_)
+- Only use ``importlib_metadata`` when needed (`PR #401`_)
+- Clarify in printout when build dependencies are being installed (`PR #514`_)
+
+.. _PR #401: https://github.com/pypa/build/pull/401
+.. _PR #477: https://github.com/pypa/build/pull/477
+.. _PR #494: https://github.com/pypa/build/pull/494
+.. _PR #512: https://github.com/pypa/build/pull/512
+.. _PR #514: https://github.com/pypa/build/pull/514
+.. _PR #527: https://github.com/pypa/build/pull/527
+.. _#493: https://github.com/pypa/build/issues/493
+.. _#511: https://github.com/pypa/build/issues/511
+
+
+0.8.0 (2022-05-22)
+==================
+
+- Accept ``os.PathLike[str]`` in addition to ``str`` for paths in public
+ API (`PR #392`_, Fixes `#372`_)
+- Add schema validation for ``build-system`` table to check conformity
+ with PEP 517 and PEP 518 (`PR #365`_, Fixes `#364`_)
+- Better support for Python 3.11 (sysconfig schemes `PR #434`_, `PR #463`_, tomllib `PR #443`_, warnings `PR #420`_)
+- Improved error printouts (`PR #442`_)
+- Avoid importing packaging unless needed (`PR #395`_, Fixes `#393`_)
+
+
+Breaking Changes
+----------------
+
+- Failure to create a virtual environment in the ``build.env`` module now raises
+ ``build.FailedProcessError`` (`PR #442`_)
+
+.. _PR #365: https://github.com/pypa/build/pull/365
+.. _PR #392: https://github.com/pypa/build/pull/392
+.. _PR #395: https://github.com/pypa/build/pull/395
+.. _PR #420: https://github.com/pypa/build/pull/420
+.. _PR #434: https://github.com/pypa/build/pull/434
+.. _PR #442: https://github.com/pypa/build/pull/442
+.. _PR #443: https://github.com/pypa/build/pull/443
+.. _PR #463: https://github.com/pypa/build/pull/463
+.. _#364: https://github.com/pypa/build/issues/364
+.. _#372: https://github.com/pypa/build/issues/372
+.. _#393: https://github.com/pypa/build/pull/393
+
+
+0.7.0 (2021-09-16)
+==================
+
+- Add ``build.util`` module with an high-level utility API (`PR #340`_)
+
+.. _PR #340: https://github.com/pypa/build/pull/340
+
+
+0.6.0.post1 (2021-08-05)
+========================
+
+- Fix compatibility with Python 3.6 and 3.7 (`PR #339`_, Fixes `#338`_)
+
+.. _PR #339: https://github.com/pypa/build/pull/339
+.. _#338: https://github.com/pypa/build/issues/338
+
+
+
+0.6.0 (2021-08-02)
+==================
+
+- Improved output (`PR #333`_, Fixes `#142`_)
+- The CLI now honors `NO_COLOR`_ (`PR #333`_)
+- The CLI can now be forced to colorize the output by setting the ``FORCE_COLOR`` environment variable (`PR #335`_)
+- Added logging to ``build`` and ``build.env`` (`PR #333`_)
+- Switch to a TOML v1 compliant parser (`PR #336`_, Fixes `#308`_)
+
+
+Breaking Changes
+----------------
+
+- Dropped support for Python 2 and 3.5.
+
+.. _PR #333: https://github.com/pypa/build/pull/333
+.. _PR #335: https://github.com/pypa/build/pull/335
+.. _PR #336: https://github.com/pypa/build/pull/336
+.. _#142: https://github.com/pypa/build/issues/142
+.. _#308: https://github.com/pypa/build/issues/308
+.. _NO_COLOR: https://no-color.org
+
+
+
+0.5.1 (2021-06-22)
+==================
+
+- Fix invoking the backend on an inexistent output directory with multiple levels (`PR #318`_, Fixes `#316`_)
+- When building wheels via sdists, use an isolated temporary directory (`PR #321`_, Fixes `#320`_)
+
+.. _PR #318: https://github.com/pypa/build/pull/318
+.. _PR #321: https://github.com/pypa/build/pull/321
+.. _#316: https://github.com/pypa/build/issues/316
+.. _#320: https://github.com/pypa/build/issues/320
+
+
+
+0.5.0 (2021-06-19)
+==================
+
+- Add ``ProjectBuilder.metadata_path`` helper (`PR #303`_, Fixes `#301`_)
+- Added a ``build.__main__.build_package_via_sdist`` method (`PR #304`_)
+- Use appropriate installation scheme for Apple Python venvs (`PR #314`_, Fixes `#310`_)
+
+Breaking Changes
+----------------
+
+- Binary distributions are now built via the sdist by default in the CLI (`PR #304`_, Fixes `#257`_)
+ - ``python -m build`` will now build a sdist, extract it, and build a wheel from the source
+- As a side-effect of `PR #304`_, ``build.__main__.build_package`` no longer does CLI error handling (print nice message and exit the program)
+- Importing ``build.__main__`` no longer has any side-effects, it no longer overrides ``warnings.showwarning`` or runs ``colorama.init`` on import (`PR #312`_)
+
+.. _PR #303: https://github.com/pypa/build/pull/303
+.. _PR #304: https://github.com/pypa/build/pull/304
+.. _PR #312: https://github.com/pypa/build/pull/312
+.. _PR #314: https://github.com/pypa/build/pull/314
+.. _#257: https://github.com/pypa/build/issues/257
+.. _#301: https://github.com/pypa/build/issues/301
+.. _#310: https://github.com/pypa/build/issues/310
+
+
+
+0.4.0 (2021-05-23)
+==================
+
+- Validate that the supplied source directory is valid (`PR #260`_, Fixes `#259`_)
+- Set and test minimum versions of build's runtime dependencies (`PR #267`_, Fixes `#263`_)
+- Use symlinks on creating venv's when available (`PR #274`_, Fixes `#271`_)
+- Error sooner if pip upgrade is required and fails (`PR #288`_, Fixes `#256`_)
+- Add a ``runner`` argument to ``ProjectBuilder`` (`PR #290`_, Fixes `#289`_)
+- Hide irrelevant ``pep517`` error traceback and improve error messages (`PR #296`_)
+- Try to use ``colorama`` to fix colors on Windows (`PR #300`_)
+
+.. _PR #260: https://github.com/pypa/build/pull/260
+.. _PR #267: https://github.com/pypa/build/pull/267
+.. _PR #274: https://github.com/pypa/build/pull/274
+.. _PR #288: https://github.com/pypa/build/pull/288
+.. _PR #290: https://github.com/pypa/build/pull/290
+.. _PR #296: https://github.com/pypa/build/pull/296
+.. _PR #300: https://github.com/pypa/build/pull/300
+.. _#256: https://github.com/pypa/build/issues/256
+.. _#259: https://github.com/pypa/build/issues/259
+.. _#263: https://github.com/pypa/build/issues/263
+.. _#271: https://github.com/pypa/build/issues/271
+.. _#289: https://github.com/pypa/build/issues/289
+
+Breaking Changes
+----------------
+
+- As a side-effect of `PR #260`_, projects not containing either a ``pyproject.toml`` or ``setup.py`` will be reported as invalid. This affects projects specifying only a ``setup.cfg``, such projects are recommended to add a ``pyproject.toml``. The new behavior is on par with what pip currently does, so if you are affected by this, your project should not be pip installable.
+- The ``--skip-dependencies`` option has been renamed to ``--skip-dependency-check`` (`PR #297`_)
+- The ``skip_dependencies`` argument of ``build.__main__.build_package`` has been renamed to ``skip_dependency_check`` (`PR #297`_)
+- ``build.ConfigSettings`` has been renamed to ``build.ConfigSettingsType`` (`PR #298`_)
+- ``build.ProjectBuilder.build_dependencies`` to ``build.ProjectBuilder.build_system_requires`` (`PR #284`_, Fixes `#182`_)
+- ``build.ProjectBuilder.get_dependencies`` to ``build.ProjectBuilder.get_requires_for_build`` (`PR #284`_, Fixes `#182`_)
+
+.. _PR #284: https://github.com/pypa/build/pull/284
+.. _PR #297: https://github.com/pypa/build/pull/297
+.. _PR #298: https://github.com/pypa/build/pull/298
+.. _#182: https://github.com/pypa/build/issues/182
+
+
+
+0.3.1 (2021-03-09)
+==================
+
+- Support direct usage from pipx run in 0.16.1.0+ (`PR #247`_)
+- Use UTF-8 encoding when reading pyproject.toml (`PR #251`_, Fixes `#250`_)
+
+.. _PR #247: https://github.com/pypa/build/pull/247
+.. _PR #251: https://github.com/pypa/build/pull/251
+.. _#250: https://github.com/pypa/build/issues/250
+
+
+
+0.3.0 (2021-02-19)
+==================
+
+- Upgrade pip based on venv pip version, avoids error on Debian Python 3.6.5-3.8 or issues installing wheels on Big Sur (`PR #229`_, `PR #230`_, Fixes `#228`_)
+- Build dependencies in isolation, instead of in the build environment (`PR #232`_, Fixes `#231`_)
+- Fallback on venv if virtualenv is too old (`PR #241`_)
+- Add metadata preparation hook (`PR #217`_, Fixes `#130`_)
+
+.. _PR #217: https://github.com/pypa/build/pull/217
+.. _PR #229: https://github.com/pypa/build/pull/229
+.. _PR #230: https://github.com/pypa/build/pull/230
+.. _PR #232: https://github.com/pypa/build/pull/232
+.. _PR #241: https://github.com/pypa/build/pull/241
+.. _#130: https://github.com/pypa/build/issues/130
+.. _#228: https://github.com/pypa/build/issues/228
+.. _#231: https://github.com/pypa/build/issues/231
+
+
+
+0.2.1 (2021-02-09)
+==================
+
+- Fix error from unrecognised pip flag on Python 3.6.0 to 3.6.5 (`PR #227`_, Fixes `#226`_)
+
+.. _PR #227: https://github.com/pypa/build/pull/227
+.. _#226: https://github.com/pypa/build/issues/226
+
+
+
+0.2.0 (2021-02-07)
+==================
+
+- Check dependencies recursively (`PR #183`_, Fixes `#25`_)
+- Build wheel and sdist distributions in separate environments, as they may have different dependencies (`PR #195`_, Fixes `#194`_)
+- Add support for pre-releases in ``check_dependency`` (`PR #204`_, Fixes `#191`_)
+- Fixes console scripts not being available during build (`PR #221`_, Fixes `#214`_)
+- Do not add the default backend requirements to ``requires`` when no backend is specified (`PR #177`_, Fixes `#107`_)
+- Return the sdist name in ``ProjectBuild.build`` (`PR #197`_)
+- Improve documentation (`PR #178`_, `PR #203`_)
+- Add changelog (`PR #219`_, Fixes `#169`_)
+
+Breaking changes
+----------------
+
+- Move ``config_settings`` argument to the hook calls (`PR #218`_, Fixes `#216`_)
+
+.. _PR #177: https://github.com/pypa/build/pull/177
+.. _PR #178: https://github.com/pypa/build/pull/178
+.. _PR #183: https://github.com/pypa/build/pull/183
+.. _PR #195: https://github.com/pypa/build/pull/195
+.. _PR #197: https://github.com/pypa/build/pull/197
+.. _PR #203: https://github.com/pypa/build/pull/203
+.. _PR #204: https://github.com/pypa/build/pull/204
+.. _PR #218: https://github.com/pypa/build/pull/218
+.. _PR #219: https://github.com/pypa/build/pull/219
+.. _PR #221: https://github.com/pypa/build/pull/221
+.. _#25: https://github.com/pypa/build/issues/25
+.. _#107: https://github.com/pypa/build/issues/107
+.. _#109: https://github.com/pypa/build/issues/109
+.. _#169: https://github.com/pypa/build/issues/169
+.. _#191: https://github.com/pypa/build/issues/191
+.. _#194: https://github.com/pypa/build/issues/194
+.. _#214: https://github.com/pypa/build/issues/214
+.. _#216: https://github.com/pypa/build/issues/216
+
+
+
+0.1.0 (2020-10-29)
+==================
+
+- Moved the upstream to PyPA
+- Fixed building with isolation in a virtual environment
+- Added env.IsolatedEnv abstract class
+- Added env.IsolatedEnvBuilder (replaces env.IsolatedEnvironment usages)
+- Added python_executable argument to the ProjectBuilder constructor
+- Added --version/-V option to the CLI
+- Added support for Python 3.9
+- Added py.typed marker
+- Various miscellaneous fixes in the virtual environment creation
+- Many general improvements in the documentation
+- Documentation moved to the furo theme
+- Updated the CoC to the PSF CoC, which PyPA has adopted
+
+Breaking changes
+----------------
+
+- Renamed the entrypoint script to pyproject-build
+- Removed default arguments from all paths in ProjectBuilder
+- Removed ProjectBuilder.hook
+- Renamed __main__.build to __main__.build_package
+- Changed the default outdir value to {srcdir}/dest
+- Removed env.IsolatedEnvironment
+
+
+
+0.0.4 (2020-09-08)
+==================
+
+- Packages are now built in isolation by default
+- Added --no-isolation/-n flag to build in the current environment
+- Add --config-setting/-C option to pass options to the backend
+- Add IsolatedEnvironment class
+- Fix creating the output directory if it doesn't exit
+- Fix building with in-tree backends
+- Fix broken entrypoint script (python-build)
+- Add warning about incomplete verification when verifying extras
+- Automatically detect typos in the build system table
+- Minor documentation improvements
+
+
+
+0.0.3.1 (2020-06-10)
+====================
+
+- Fix bug preventing the CLI from being invoked
+- Improved documentation
+
+
+
+0.0.3 (2020-06-09)
+==================
+
+- Misc improvements
+- Added documentation
+
+
+
+0.0.2 (2020-05-29)
+==================
+
+- Add setuptools as a default fallback backend
+- Fix extras handling in requirement strings
+
+
+
+0.0.1 (2020-05-17)
+==================
+
+- Initial release
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..c3713cd
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,20 @@
+Copyright © 2019 Filipe Laíns <filipe.lains@gmail.com>
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next
+paragraph) shall be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..0932fd1
--- /dev/null
+++ b/README.md
@@ -0,0 +1,39 @@
+# build
+
+[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pypa/build/main.svg)](https://results.pre-commit.ci/latest/github/pypa/build/main)
+[![CI check](https://github.com/pypa/build/workflows/check/badge.svg)](https://github.com/pypa/build/actions)
+[![CI test](https://github.com/pypa/build/actions/workflows/test.yml/badge.svg)](https://github.com/pypa/build/actions/workflows/test.yml)
+[![codecov](https://codecov.io/gh/pypa/build/branch/main/graph/badge.svg)](https://codecov.io/gh/pypa/build)
+
+[![Documentation Status](https://readthedocs.org/projects/pypa-build/badge/?version=latest)](https://pypa-build.readthedocs.io/en/latest/?badge=latest)
+[![PyPI version](https://badge.fury.io/py/build.svg)](https://pypi.org/project/build/)
+[![Discord](https://img.shields.io/discord/803025117553754132?label=Discord%20chat%20%23build&style=flat-square)](https://discord.gg/pypa)
+
+A simple, correct PEP 517 build frontend.
+
+See the [documentation](https://pypa-build.readthedocs.io/en/latest/) for more information.
+
+### Installation
+
+`build` can be installed via `pip` or an equivalent via:
+
+```console
+$ pip install build
+```
+
+### Usage
+
+```console
+$ python -m build
+```
+
+This will build the package in an isolated environment, generating a
+source-distribution and wheel in the directory `dist/`.
+See the [documentation](https://pypa-build.readthedocs.io/en/latest/) for full information.
+
+### Code of Conduct
+
+Everyone interacting in the build's codebase, issue trackers, chat rooms, and mailing lists is expected to follow
+the [PSF Code of Conduct].
+
+[psf code of conduct]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000..5f91842
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,6 @@
+coverage:
+ status:
+ patch:
+ default:
+ informational: true
+comment: false
diff --git a/docs/api.rst b/docs/api.rst
new file mode 100644
index 0000000..254db23
--- /dev/null
+++ b/docs/api.rst
@@ -0,0 +1,26 @@
+API Documentation
+=================
+
+``build`` module
+----------------
+
+.. automodule:: build
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+``build.env`` module
+--------------------
+
+.. automodule:: build.env
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+``build.util`` module
+---------------------
+
+.. automodule:: build.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/changelog.rst b/docs/changelog.rst
new file mode 120000
index 0000000..e22698b
--- /dev/null
+++ b/docs/changelog.rst
@@ -0,0 +1 @@
+../CHANGELOG.rst \ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..26e77d0
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,67 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+
+import build
+
+
+# -- Project information -----------------------------------------------------
+
+project = 'build'
+copyright = '2020, Filipe Laíns'
+author = 'Filipe Laíns'
+
+# The short X.Y version
+version = build.__version__
+# The full version, including alpha/beta/rc tags
+release = build.__version__
+
+
+# -- General configuration ---------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.intersphinx',
+ 'sphinx_autodoc_typehints',
+ 'sphinx_argparse_cli',
+]
+
+intersphinx_mapping = {
+ 'python': ('https://docs.python.org/3/', None),
+}
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path.
+exclude_patterns = []
+
+default_role = 'any'
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'furo'
+html_title = f'build {version}'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named 'default.css' will overwrite the builtin 'default.css'.
+# html_static_path = ['_static']
+
+autoclass_content = 'both'
diff --git a/docs/differences.rst b/docs/differences.rst
new file mode 100644
index 0000000..6fbc401
--- /dev/null
+++ b/docs/differences.rst
@@ -0,0 +1,30 @@
+============================
+Differences from other tools
+============================
+
+``pep517.build``
+----------------
+
+build implements a CLI tailored to end users.
+
+``pep517.build`` contained a proof-of-concept of a :pep:`517`
+frontend. It *"implement[ed] essentially the simplest possible frontend
+tool, to exercise and illustrate how the core functionality can be
+used"*. It has since been `deprecated and is scheduled for removal`_.
+
+``setup.py sdist bdist_wheel``
+------------------------------
+
+build is roughly the equivalent of ``setup.py sdist bdist_wheel`` but
+with :pep:`517` support, allowing use with projects that don't use setuptools.
+
+.. _deprecated and is scheduled for removal: https://github.com/pypa/pep517/pull/83
+
+Custom Behaviors
+----------------
+
+Fallback Backend
+^^^^^^^^^^^^^^^^
+
+As recommended in :pep:`517`, if no backend is specified, ``build`` will
+fallback to ``setuptools.build_meta:__legacy__``.
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..f13aa7a
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,54 @@
+:hide-toc:
+
+*****
+build
+*****
+
+A simple, correct :pep:`517` build frontend.
+
+build will invoke the :pep:`517` hooks to build a distribution package.
+It is a simple build tool and does not perform any dependency management.
+
+.. sphinx_argparse_cli::
+ :module: build.__main__
+ :func: main_parser
+ :prog: python -m build
+ :title: python -m build
+ :usage_width: 97
+
+.. note::
+
+ A ``pyproject-build`` CLI script is also available, so that tools such as pipx_
+ can use it.
+
+By default build will build the package in an isolated
+environment, but this behavior can be disabled with ``--no-isolation``.
+
+.. toctree::
+ :hidden:
+
+ mission
+ differences
+
+.. toctree::
+ :caption: Usage
+ :hidden:
+
+ installation
+ changelog
+ api
+
+.. toctree::
+ :caption: Contributing
+ :hidden:
+
+ test_suite
+
+.. toctree::
+ :caption: Project Links
+ :hidden:
+
+ Source Code <https://github.com/pypa/build/>
+ Issue Tracker <https://github.com/pypa/build/issues>
+
+.. _pipx: https://github.com/pipxproject/pipx
diff --git a/docs/installation.rst b/docs/installation.rst
new file mode 100644
index 0000000..563ff57
--- /dev/null
+++ b/docs/installation.rst
@@ -0,0 +1,77 @@
+============
+Installation
+============
+
+You can download a tarball_ from Github, checkout the latest `git tag`_ or fetch
+the artifacts from `project page`_ on PyPI.
+
+The recommended way is to checkout the git tags, as they are PGP signed with one
+of the following keys:
+
+- |3DCE51D60930EBA47858BA4146F633CBB0EB4BF2|_ *(Filipe Laíns)*
+
+``build`` may also be installed via `pip`_ or an equivalent:
+
+.. code-block:: sh
+
+ $ pip install build
+
+.. tip::
+ If you prefer, or are already using virtualenv_ in your workflow, you can
+ install ``build`` with the optional ``virtualenv`` dependency:
+
+ .. code-block:: sh
+
+ $ pip install 'build[virtualenv]'
+
+ this way, ``build`` will use virtualenv_ for isolation, instead of venv_.
+ This can be particularly useful, for example, when using automation tools
+ that rely on virtualenv_, such as tox_, or when your operating system's
+ Python package does not include venv_ in the standard installation (such as
+ some versions of Ubuntu).
+
+Bootstrapping
+=============
+
+This package can build itself with only the ``toml`` and ``pep517``
+dependencies. The ``--skip-dependency-check`` flag should be used in this
+case.
+
+On Python 3.10 and older, we have a dependency on tomli_, but toml_ can be
+used instead, which may make bootstrapping easier.
+
+
+Compatibility
+=============
+
+``build`` is verified to be compatible with the following Python
+versions:
+
+- 2.7
+- 3.5
+- 3.6
+- 3.7
+- 3.8
+- 3.9
+- PyPy(2)
+- PyPy3
+
+
+.. _pipx: https://github.com/pipxproject/pipx
+.. _pip: https://github.com/pypa/pip
+.. _PyPI: https://pypi.org/
+
+.. _tox: https://tox.readthedocs.org/
+.. _virtualenv: https://virtualenv.pypa.io
+.. _venv: https://docs.python.org/3/library/venv.html
+
+.. _tarball: https://github.com/pypa/build/releases
+.. _git tag: https://github.com/pypa/build/tags
+.. _project page: https://pypi.org/project/build/
+
+.. _tomli: https://github.com/hukkin/tomli
+.. _toml: https://github.com/uiri/toml
+
+
+.. |3DCE51D60930EBA47858BA4146F633CBB0EB4BF2| replace:: ``3DCE51D60930EBA47858BA4146F633CBB0EB4BF2``
+.. _3DCE51D60930EBA47858BA4146F633CBB0EB4BF2: https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x3dce51d60930eba47858ba4146f633cbb0eb4bf2
diff --git a/docs/mission.rst b/docs/mission.rst
new file mode 100644
index 0000000..f83f60e
--- /dev/null
+++ b/docs/mission.rst
@@ -0,0 +1,20 @@
+=================
+Mission Statement
+=================
+
+In the Python ecosystem, the build system tools and the package management
+are very intertwined. While it might be useful for user to be able to access
+all this capabilities in a single project (such as pip_), there are several
+use cases where this is not desirable. The main being custom environments
+(outside PyPI_) or situations where the user does its own package management,
+such as Linux distributions.
+
+This project aims to fit the "building packages hole" for such use-cases in
+:pep:`517`/:pep:`518` workflows.
+
+As it is intended to be used by users that do their own package management,
+we will try to keep dependencies to a minimum, in order to try make
+bootstrapping easier.
+
+.. _pip: https://github.com/pypa/pip
+.. _PyPI: https://pypi.org/
diff --git a/docs/test_suite.rst b/docs/test_suite.rst
new file mode 100644
index 0000000..d486106
--- /dev/null
+++ b/docs/test_suite.rst
@@ -0,0 +1,51 @@
+**********
+Test Suite
+**********
+
+Due to its nature, ``build`` has a somewhat complex test suite, which we will
+try to go through in this document.
+
+Firstly, there are two set of tests, unit tests and integration tests. In unit
+tests, we test the actual code implementation. In integration tests, we test
+``build`` on a few real world projects; this is mostly a sanity test.
+
+Integration tests take a long time to run, and are not very helpful tracking
+down issues, so they are **disabled by default**. They can be enabled by passing
+either ``--run-integration`` or ``--only-integration`` arguments to pytest,
+where the latter will disable the unit tests and only run the integration ones.
+Even though these tests are disabled by default, they will be run in CI,
+where test suite run durations are not a big issue.
+
+To run the test suite we use ``tox``, which automates running the test suite on
+different environments:
+
+
+.. code-block:: console
+
+ tox
+
+
+You can find out more about how to run ``tox`` and its arguments in the
+`tox documentation`_.
+
+We have a fairly large environment matrix. We run tests for all supported Python
+versions and implementations, and with the module being invoked from path,
+sdist install, or wheel install. Additionally, we have an environment for type
+checking, and one to produce the documentation. There are some other extra
+environments, like checking the code with the minimum version of each
+dependency.
+
+Some examples commands for this project:
+ - Run type checking: ``tox -e type``
+ - Only run unit tests against Python 3.9: ``tox -e py39``
+ - Run both unit and integration tests: ``tox -- --run-integration``
+ - Only run integration tests: ``tox -- --only-integration``
+ - Only run integration tests with parallel tasks: ``tox -- -n auto --only-integration``
+ - Only run unit tests against Python 3.9 with the module installed via wheel: ``tox -e py39-wheel``
+
+
+We have CI testing, where we the test suite across all supported operating
+systems, and have test coverage reports.
+
+
+.. _tox documentation: https://tox.readthedocs.io/
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..a4f7f4d
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,72 @@
+[build-system]
+requires = ["setuptools >=42.0"]
+build-backend = "setuptools.build_meta"
+
+[tool.coverage.run]
+source = [
+ "build",
+ "tests",
+]
+
+[tool.coverage.report]
+exclude_lines = [
+ '\#\s*pragma: no cover',
+ '^\s*raise NotImplementedError\b',
+]
+
+[tool.coverage.paths]
+build = [
+ "src",
+ "*/site-packages",
+ '*\site-packages',
+]
+
+[tool.coverage.html]
+show_contexts = true
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = ["--strict-config", "--strict-markers"]
+log_cli_level = "info"
+testpaths = ["tests"]
+xfail_strict = true
+junit_family = "xunit2"
+norecursedirs = "tests/integration/*"
+markers = [
+ "isolated",
+ "pypy3323bug",
+]
+filterwarnings = [
+ "error",
+ "ignore:path is deprecated.:DeprecationWarning",
+ "ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning",
+]
+
+[tool.mypy]
+files = "src"
+python_version = "3.6"
+strict = true
+show_error_codes = true
+enable_error_code = ["ignore-without-code", "truthy-bool", "redundant-expr"]
+
+
+[[tool.mypy.overrides]]
+module = [
+ "colorama", # Optional dependency
+ "pep517.*", # Untyped
+ "virtualenv", # Optional dependency
+]
+ignore_missing_imports = true
+
+[tool.black]
+line-length = 127
+skip-string-normalization = true
+target-version = ["py39", "py38", "py37", "py36"]
+
+[tool.isort]
+profile = "black"
+lines_between_types = 1
+lines_after_imports = 2
+line_length = 127
+known_first_party = "build"
+skip = [] # "build" is included in the default skip list
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..19688d1
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,74 @@
+[metadata]
+name = build
+version = 0.9.0
+description = A simple, correct PEP 517 build frontend
+long_description = file: README.md
+long_description_content_type = text/markdown
+author = Filipe Laíns
+author_email = lains@riseup.net
+license = MIT
+license_file = LICENSE
+classifiers =
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Programming Language :: Python :: 3.11
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+project_urls =
+ homepage = https://github.com/pypa/build
+ changelog = https://pypa-build.readthedocs.io/en/stable/changelog.html
+
+[options]
+packages = find:
+install_requires =
+ packaging>=19.0
+ pep517>=0.9.1
+ colorama;os_name == "nt" # not actually a runtime dependency, only supplied as there is not "recommended dependency" support
+ importlib-metadata>=0.22;python_version < "3.8"
+ tomli>=1.0.0;python_version < "3.11" # toml can be used instead -- in case it makes bootstrapping easier
+python_requires = >=3.6
+package_dir =
+ =src
+
+[options.packages.find]
+where = src
+
+[options.entry_points]
+console_scripts =
+ pyproject-build = build.__main__:entrypoint
+pipx.run =
+ build = build.__main__:entrypoint
+
+[options.extras_require]
+docs =
+ furo>=2021.08.31
+ sphinx~=4.0
+ sphinx-argparse-cli>=1.5
+ sphinx-autodoc-typehints>=1.10
+test =
+ filelock>=3
+ pytest>=6.2.4
+ pytest-cov>=2.12
+ pytest-mock>=2
+ pytest-rerunfailures>=9.1
+ pytest-xdist>=1.34
+ toml>=0.10.0
+ wheel>=0.36.0
+ setuptools>=42.0.0;python_version < "3.10"
+ setuptools>=56.0.0;python_version >= "3.10"
+typing =
+ importlib-metadata>=4.6.4
+ mypy==0.950
+ typing-extensions>=3.7.4.3;python_version < "3.8"
+virtualenv =
+ virtualenv>=20.0.35
+
+[options.package_data]
+build =
+ py.typed
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..b024da8
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,4 @@
+from setuptools import setup
+
+
+setup()
diff --git a/src/build/__init__.py b/src/build/__init__.py
new file mode 100644
index 0000000..0425a85
--- /dev/null
+++ b/src/build/__init__.py
@@ -0,0 +1,539 @@
+# SPDX-License-Identifier: MIT
+
+"""
+build - A simple, correct PEP 517 build frontend
+"""
+
+__version__ = '0.9.0'
+
+import contextlib
+import difflib
+import logging
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import types
+import warnings
+import zipfile
+
+from collections import OrderedDict
+from typing import (
+ AbstractSet,
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Type,
+ Union,
+)
+
+import pep517.wrappers
+
+
+TOMLDecodeError: Type[Exception]
+toml_loads: Callable[[str], MutableMapping[str, Any]]
+
+if sys.version_info >= (3, 11):
+ from tomllib import TOMLDecodeError
+ from tomllib import loads as toml_loads
+else:
+ try:
+ from tomli import TOMLDecodeError
+ from tomli import loads as toml_loads
+ except ModuleNotFoundError: # pragma: no cover
+ from toml import TomlDecodeError as TOMLDecodeError # type: ignore[import,no-redef]
+ from toml import loads as toml_loads # type: ignore[no-redef]
+
+
+RunnerType = Callable[[Sequence[str], Optional[str], Optional[Mapping[str, str]]], None]
+ConfigSettingsType = Mapping[str, Union[str, Sequence[str]]]
+PathType = Union[str, 'os.PathLike[str]']
+_ExcInfoType = Union[Tuple[Type[BaseException], BaseException, types.TracebackType], Tuple[None, None, None]]
+
+
+_WHEEL_NAME_REGEX = re.compile(
+ r'(?P<distribution>.+)-(?P<version>.+)'
+ r'(-(?P<build_tag>.+))?-(?P<python_tag>.+)'
+ r'-(?P<abi_tag>.+)-(?P<platform_tag>.+)\.whl'
+)
+
+
+_DEFAULT_BACKEND = {
+ 'build-backend': 'setuptools.build_meta:__legacy__',
+ 'requires': ['setuptools >= 40.8.0', 'wheel'],
+}
+
+
+_logger = logging.getLogger(__name__)
+
+
+class BuildException(Exception):
+ """
+ Exception raised by :class:`ProjectBuilder`
+ """
+
+
+class BuildBackendException(Exception):
+ """
+ Exception raised when a backend operation fails
+ """
+
+ def __init__(
+ self, exception: Exception, description: Optional[str] = None, exc_info: _ExcInfoType = (None, None, None)
+ ) -> None:
+ super().__init__()
+ self.exception = exception
+ self.exc_info = exc_info
+ self._description = description
+
+ def __str__(self) -> str:
+ if self._description:
+ return self._description
+ return f'Backend operation failed: {self.exception!r}'
+
+
+class BuildSystemTableValidationError(BuildException):
+ """
+ Exception raised when the ``[build-system]`` table in pyproject.toml is invalid.
+ """
+
+ def __str__(self) -> str:
+ return f'Failed to validate `build-system` in pyproject.toml: {self.args[0]}'
+
+
+class FailedProcessError(Exception):
+ """
+ Exception raised when an setup or prepration operation fails.
+ """
+
+ def __init__(self, exception: subprocess.CalledProcessError, description: str) -> None:
+ super().__init__()
+ self.exception = exception
+ self._description = description
+
+ def __str__(self) -> str:
+ cmd = ' '.join(self.exception.cmd)
+ description = f"{self._description}\n Command '{cmd}' failed with return code {self.exception.returncode}"
+ for stream_name in ('stdout', 'stderr'):
+ stream = getattr(self.exception, stream_name)
+ if stream:
+ description += f'\n {stream_name}:\n'
+ description += textwrap.indent(stream.decode(), ' ')
+ return description
+
+
+class TypoWarning(Warning):
+ """
+ Warning raised when a possible typo is found
+ """
+
+
+@contextlib.contextmanager
+def _working_directory(path: str) -> Iterator[None]:
+ current = os.getcwd()
+
+ os.chdir(path)
+
+ try:
+ yield
+ finally:
+ os.chdir(current)
+
+
+def _validate_source_directory(srcdir: PathType) -> None:
+ if not os.path.isdir(srcdir):
+ raise BuildException(f'Source {srcdir} is not a directory')
+ pyproject_toml = os.path.join(srcdir, 'pyproject.toml')
+ setup_py = os.path.join(srcdir, 'setup.py')
+ if not os.path.exists(pyproject_toml) and not os.path.exists(setup_py):
+ raise BuildException(f'Source {srcdir} does not appear to be a Python project: no pyproject.toml or setup.py')
+
+
+def check_dependency(
+ req_string: str, ancestral_req_strings: Tuple[str, ...] = (), parent_extras: AbstractSet[str] = frozenset()
+) -> Iterator[Tuple[str, ...]]:
+ """
+ Verify that a dependency and all of its dependencies are met.
+
+ :param req_string: Requirement string
+ :param parent_extras: Extras (eg. "test" in myproject[test])
+ :yields: Unmet dependencies
+ """
+ import packaging.requirements
+
+ if sys.version_info >= (3, 8):
+ import importlib.metadata as importlib_metadata
+ else:
+ import importlib_metadata
+
+ req = packaging.requirements.Requirement(req_string)
+ normalised_req_string = str(req)
+
+ # ``Requirement`` doesn't implement ``__eq__`` so we cannot compare reqs for
+ # equality directly but the string representation is stable.
+ if normalised_req_string in ancestral_req_strings:
+ # cyclical dependency, already checked.
+ return
+
+ if req.marker:
+ extras = frozenset(('',)).union(parent_extras)
+ # a requirement can have multiple extras but ``evaluate`` can
+ # only check one at a time.
+ if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
+ # if the marker conditions are not met, we pretend that the
+ # dependency is satisfied.
+ return
+
+ try:
+ dist = importlib_metadata.distribution(req.name) # type: ignore[no-untyped-call]
+ except importlib_metadata.PackageNotFoundError:
+ # dependency is not installed in the environment.
+ yield ancestral_req_strings + (normalised_req_string,)
+ else:
+ if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
+ # the installed version is incompatible.
+ yield ancestral_req_strings + (normalised_req_string,)
+ elif dist.requires:
+ for other_req_string in dist.requires:
+ # yields transitive dependencies that are not satisfied.
+ yield from check_dependency(other_req_string, ancestral_req_strings + (normalised_req_string,), req.extras)
+
+
+def _find_typo(dictionary: Mapping[str, str], expected: str) -> None:
+ for obj in dictionary:
+ if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
+ warnings.warn(
+ f"Found '{obj}' in pyproject.toml, did you mean '{expected}'?",
+ TypoWarning,
+ )
+
+
+def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Dict[str, Any]:
+ # If pyproject.toml is missing (per PEP 517) or [build-system] is missing
+ # (per PEP 518), use default values
+ if 'build-system' not in pyproject_toml:
+ _find_typo(pyproject_toml, 'build-system')
+ return _DEFAULT_BACKEND
+
+ build_system_table = dict(pyproject_toml['build-system'])
+
+ # If [build-system] is present, it must have a ``requires`` field (per PEP 518)
+ if 'requires' not in build_system_table:
+ _find_typo(build_system_table, 'requires')
+ raise BuildSystemTableValidationError('`requires` is a required property')
+ elif not isinstance(build_system_table['requires'], list) or not all(
+ isinstance(i, str) for i in build_system_table['requires']
+ ):
+ raise BuildSystemTableValidationError('`requires` must be an array of strings')
+
+ if 'build-backend' not in build_system_table:
+ _find_typo(build_system_table, 'build-backend')
+ # If ``build-backend`` is missing, inject the legacy setuptools backend
+ # but leave ``requires`` intact to emulate pip
+ build_system_table['build-backend'] = _DEFAULT_BACKEND['build-backend']
+ elif not isinstance(build_system_table['build-backend'], str):
+ raise BuildSystemTableValidationError('`build-backend` must be a string')
+
+ if 'backend-path' in build_system_table and (
+ not isinstance(build_system_table['backend-path'], list)
+ or not all(isinstance(i, str) for i in build_system_table['backend-path'])
+ ):
+ raise BuildSystemTableValidationError('`backend-path` must be an array of strings')
+
+ unknown_props = build_system_table.keys() - {'requires', 'build-backend', 'backend-path'}
+ if unknown_props:
+ raise BuildSystemTableValidationError(f'Unknown properties: {", ".join(unknown_props)}')
+
+ return build_system_table
+
+
+class ProjectBuilder:
+ """
+ The PEP 517 consumer API.
+ """
+
+ def __init__(
+ self,
+ srcdir: PathType,
+ python_executable: str = sys.executable,
+ scripts_dir: Optional[str] = None,
+ runner: RunnerType = pep517.wrappers.default_subprocess_runner,
+ ) -> None:
+ """
+ :param srcdir: The source directory
+ :param scripts_dir: The location of the scripts dir (defaults to the folder where the python executable lives)
+ :param python_executable: The python executable where the backend lives
+ :param runner: An alternative runner for backend subprocesses
+
+ The 'runner', if provided, must accept the following arguments:
+
+ - cmd: a list of strings representing the command and arguments to
+ execute, as would be passed to e.g. 'subprocess.check_call'.
+ - cwd: a string representing the working directory that must be
+ used for the subprocess. Corresponds to the provided srcdir.
+ - extra_environ: a dict mapping environment variable names to values
+ which must be set for the subprocess execution.
+
+ The default runner simply calls the backend hooks in a subprocess, writing backend output
+ to stdout/stderr.
+ """
+ self._srcdir: str = os.path.abspath(srcdir)
+ _validate_source_directory(srcdir)
+
+ spec_file = os.path.join(srcdir, 'pyproject.toml')
+
+ try:
+ with open(spec_file, 'rb') as f:
+ spec = toml_loads(f.read().decode())
+ except FileNotFoundError:
+ spec = {}
+ except PermissionError as e:
+ raise BuildException(f"{e.strerror}: '{e.filename}' ") # noqa: B904 # use raise from
+ except TOMLDecodeError as e:
+ raise BuildException(f'Failed to parse {spec_file}: {e} ') # noqa: B904 # use raise from
+
+ self._build_system = _parse_build_system_table(spec)
+ self._backend = self._build_system['build-backend']
+ self._scripts_dir = scripts_dir
+ self._hook_runner = runner
+ self._hook = pep517.wrappers.Pep517HookCaller(
+ self.srcdir,
+ self._backend,
+ backend_path=self._build_system.get('backend-path'),
+ python_executable=python_executable,
+ runner=self._runner,
+ )
+
+ def _runner(
+ self, cmd: Sequence[str], cwd: Optional[str] = None, extra_environ: Optional[Mapping[str, str]] = None
+ ) -> None:
+ # if script dir is specified must be inserted at the start of PATH (avoid duplicate path while doing so)
+ if self.scripts_dir is not None:
+ paths: Dict[str, None] = OrderedDict()
+ paths[str(self.scripts_dir)] = None
+ if 'PATH' in os.environ:
+ paths.update((i, None) for i in os.environ['PATH'].split(os.pathsep))
+ extra_environ = {} if extra_environ is None else dict(extra_environ)
+ extra_environ['PATH'] = os.pathsep.join(paths)
+ self._hook_runner(cmd, cwd, extra_environ)
+
+ @property
+ def srcdir(self) -> str:
+ """Project source directory."""
+ return self._srcdir
+
+ @property
+ def python_executable(self) -> str:
+ """
+ The Python executable used to invoke the backend.
+ """
+ # make mypy happy
+ exe: str = self._hook.python_executable
+ return exe
+
+ @python_executable.setter
+ def python_executable(self, value: str) -> None:
+ self._hook.python_executable = value
+
+ @property
+ def scripts_dir(self) -> Optional[str]:
+ """
+ The folder where the scripts are stored for the python executable.
+ """
+ return self._scripts_dir
+
+ @scripts_dir.setter
+ def scripts_dir(self, value: Optional[str]) -> None:
+ self._scripts_dir = value
+
+ @property
+ def build_system_requires(self) -> Set[str]:
+ """
+ The dependencies defined in the ``pyproject.toml``'s
+ ``build-system.requires`` field or the default build dependencies
+ if ``pyproject.toml`` is missing or ``build-system`` is undefined.
+ """
+ return set(self._build_system['requires'])
+
+ def get_requires_for_build(self, distribution: str, config_settings: Optional[ConfigSettingsType] = None) -> Set[str]:
+ """
+ Return the dependencies defined by the backend in addition to
+ :attr:`build_system_requires` for a given distribution.
+
+ :param distribution: Distribution to get the dependencies of
+ (``sdist`` or ``wheel``)
+ :param config_settings: Config settings for the build backend
+ """
+ self.log(f'Getting build dependencies for {distribution}...')
+ hook_name = f'get_requires_for_build_{distribution}'
+ get_requires = getattr(self._hook, hook_name)
+
+ with self._handle_backend(hook_name):
+ return set(get_requires(config_settings))
+
+ def check_dependencies(
+ self, distribution: str, config_settings: Optional[ConfigSettingsType] = None
+ ) -> Set[Tuple[str, ...]]:
+ """
+ Return the dependencies which are not satisfied from the combined set of
+ :attr:`build_system_requires` and :meth:`get_requires_for_build` for a given
+ distribution.
+
+ :param distribution: Distribution to check (``sdist`` or ``wheel``)
+ :param config_settings: Config settings for the build backend
+ :returns: Set of variable-length unmet dependency tuples
+ """
+ dependencies = self.get_requires_for_build(distribution, config_settings).union(self.build_system_requires)
+ return {u for d in dependencies for u in check_dependency(d)}
+
+ def prepare(
+ self, distribution: str, output_directory: PathType, config_settings: Optional[ConfigSettingsType] = None
+ ) -> Optional[str]:
+ """
+ Prepare metadata for a distribution.
+
+ :param distribution: Distribution to build (must be ``wheel``)
+ :param output_directory: Directory to put the prepared metadata in
+ :param config_settings: Config settings for the build backend
+ :returns: The full path to the prepared metadata directory
+ """
+ self.log(f'Getting metadata for {distribution}...')
+ try:
+ return self._call_backend(
+ f'prepare_metadata_for_build_{distribution}',
+ output_directory,
+ config_settings,
+ _allow_fallback=False,
+ )
+ except BuildBackendException as exception:
+ if isinstance(exception.exception, pep517.wrappers.HookMissing):
+ return None
+ raise
+
+ def build(
+ self,
+ distribution: str,
+ output_directory: PathType,
+ config_settings: Optional[ConfigSettingsType] = None,
+ metadata_directory: Optional[str] = None,
+ ) -> str:
+ """
+ Build a distribution.
+
+ :param distribution: Distribution to build (``sdist`` or ``wheel``)
+ :param output_directory: Directory to put the built distribution in
+ :param config_settings: Config settings for the build backend
+ :param metadata_directory: If provided, should be the return value of a
+ previous ``prepare`` call on the same ``distribution`` kind
+ :returns: The full path to the built distribution
+ """
+ self.log(f'Building {distribution}...')
+ kwargs = {} if metadata_directory is None else {'metadata_directory': metadata_directory}
+ return self._call_backend(f'build_{distribution}', output_directory, config_settings, **kwargs)
+
+ def metadata_path(self, output_directory: PathType) -> str:
+ """
+ Generate the metadata directory of a distribution and return its path.
+
+ If the backend does not support the ``prepare_metadata_for_build_wheel``
+ hook, a wheel will be built and the metadata will be extracted from it.
+
+ :param output_directory: Directory to put the metadata distribution in
+ :returns: The path of the metadata directory
+ """
+ # prepare_metadata hook
+ metadata = self.prepare('wheel', output_directory)
+ if metadata is not None:
+ return metadata
+
+ # fallback to build_wheel hook
+ wheel = self.build('wheel', output_directory)
+ match = _WHEEL_NAME_REGEX.match(os.path.basename(wheel))
+ if not match:
+ raise ValueError('Invalid wheel')
+ distinfo = f"{match['distribution']}-{match['version']}.dist-info"
+ member_prefix = f'{distinfo}/'
+ with zipfile.ZipFile(wheel) as w:
+ w.extractall(
+ output_directory,
+ (member for member in w.namelist() if member.startswith(member_prefix)),
+ )
+ return os.path.join(output_directory, distinfo)
+
+ def _call_backend(
+ self, hook_name: str, outdir: PathType, config_settings: Optional[ConfigSettingsType] = None, **kwargs: Any
+ ) -> str:
+ outdir = os.path.abspath(outdir)
+
+ callback = getattr(self._hook, hook_name)
+
+ if os.path.exists(outdir):
+ if not os.path.isdir(outdir):
+ raise BuildException(f"Build path '{outdir}' exists and is not a directory")
+ else:
+ os.makedirs(outdir)
+
+ with self._handle_backend(hook_name):
+ basename: str = callback(outdir, config_settings, **kwargs)
+
+ return os.path.join(outdir, basename)
+
+ @contextlib.contextmanager
+ def _handle_backend(self, hook: str) -> Iterator[None]:
+ with _working_directory(self.srcdir):
+ try:
+ yield
+ except pep517.wrappers.BackendUnavailable as exception:
+ raise BuildBackendException( # noqa: B904 # use raise from
+ exception,
+ f"Backend '{self._backend}' is not available.",
+ sys.exc_info(),
+ )
+ except subprocess.CalledProcessError as exception:
+ raise BuildBackendException( # noqa: B904 # use raise from
+ exception, f'Backend subprocess exited when trying to invoke {hook}'
+ )
+ except Exception as exception:
+ raise BuildBackendException(exception, exc_info=sys.exc_info()) # noqa: B904 # use raise from
+
+ @staticmethod
+ def log(message: str) -> None:
+ """
+ Log a message.
+
+ The default implementation uses the logging module but this function can be
+ overridden by users to have a different implementation.
+
+ :param message: Message to output
+ """
+ if sys.version_info >= (3, 8):
+ _logger.log(logging.INFO, message, stacklevel=2)
+ else:
+ _logger.log(logging.INFO, message)
+
+
+__all__ = [
+ '__version__',
+ 'BuildSystemTableValidationError',
+ 'BuildBackendException',
+ 'BuildException',
+ 'ConfigSettingsType',
+ 'FailedProcessError',
+ 'ProjectBuilder',
+ 'RunnerType',
+ 'TypoWarning',
+ 'check_dependency',
+]
+
+
+def __dir__() -> List[str]:
+ return __all__
diff --git a/src/build/__main__.py b/src/build/__main__.py
new file mode 100644
index 0000000..67b21d1
--- /dev/null
+++ b/src/build/__main__.py
@@ -0,0 +1,397 @@
+# SPDX-License-Identifier: MIT
+
+
+import argparse
+import contextlib
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+import traceback
+import warnings
+
+from typing import Dict, Iterator, List, NoReturn, Optional, Sequence, TextIO, Type, Union
+
+import build
+
+from build import BuildBackendException, BuildException, ConfigSettingsType, FailedProcessError, PathType, ProjectBuilder
+from build.env import IsolatedEnvBuilder
+
+
+_COLORS = {
+ 'red': '\33[91m',
+ 'green': '\33[92m',
+ 'yellow': '\33[93m',
+ 'bold': '\33[1m',
+ 'dim': '\33[2m',
+ 'underline': '\33[4m',
+ 'reset': '\33[0m',
+}
+_NO_COLORS = {color: '' for color in _COLORS}
+
+
+def _init_colors() -> Dict[str, str]:
+ if 'NO_COLOR' in os.environ:
+ if 'FORCE_COLOR' in os.environ:
+ warnings.warn('Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color')
+ return _NO_COLORS
+ elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty():
+ return _COLORS
+ return _NO_COLORS
+
+
+_STYLES = _init_colors()
+
+
+def _cprint(fmt: str = '', msg: str = '') -> None:
+ print(fmt.format(msg, **_STYLES), flush=True)
+
+
+def _showwarning(
+ message: Union[Warning, str],
+ category: Type[Warning],
+ filename: str,
+ lineno: int,
+ file: Optional[TextIO] = None,
+ line: Optional[str] = None,
+) -> None: # pragma: no cover
+ _cprint('{yellow}WARNING{reset} {}', str(message))
+
+
+def _setup_cli() -> None:
+ warnings.showwarning = _showwarning
+
+ if platform.system() == 'Windows':
+ try:
+ import colorama
+
+ colorama.init()
+ except ModuleNotFoundError:
+ pass
+
+
+def _error(msg: str, code: int = 1) -> NoReturn: # pragma: no cover
+ """
+ Print an error message and exit. Will color the output when writing to a TTY.
+
+ :param msg: Error message
+ :param code: Error code
+ """
+ _cprint('{red}ERROR{reset} {}', msg)
+ raise SystemExit(code)
+
+
+class _ProjectBuilder(ProjectBuilder):
+ @staticmethod
+ def log(message: str) -> None:
+ _cprint('{bold}* {}{reset}', message)
+
+
+class _IsolatedEnvBuilder(IsolatedEnvBuilder):
+ @staticmethod
+ def log(message: str) -> None:
+ _cprint('{bold}* {}{reset}', message)
+
+
+def _format_dep_chain(dep_chain: Sequence[str]) -> str:
+ return ' -> '.join(dep.partition(';')[0].strip() for dep in dep_chain)
+
+
+def _build_in_isolated_env(
+ builder: ProjectBuilder, outdir: PathType, distribution: str, config_settings: Optional[ConfigSettingsType]
+) -> str:
+ with _IsolatedEnvBuilder() as env:
+ builder.python_executable = env.executable
+ builder.scripts_dir = env.scripts_dir
+ # first install the build dependencies
+ env.install(builder.build_system_requires)
+ # then get the extra required dependencies from the backend (which was installed in the call above :P)
+ env.install(builder.get_requires_for_build(distribution))
+ return builder.build(distribution, outdir, config_settings or {})
+
+
+def _build_in_current_env(
+ builder: ProjectBuilder,
+ outdir: PathType,
+ distribution: str,
+ config_settings: Optional[ConfigSettingsType],
+ skip_dependency_check: bool = False,
+) -> str:
+ if not skip_dependency_check:
+ missing = builder.check_dependencies(distribution)
+ if missing:
+ dependencies = ''.join('\n\t' + dep for deps in missing for dep in (deps[0], _format_dep_chain(deps[1:])) if dep)
+ _cprint()
+ _error(f'Missing dependencies:{dependencies}')
+
+ return builder.build(distribution, outdir, config_settings or {})
+
+
+def _build(
+ isolation: bool,
+ builder: ProjectBuilder,
+ outdir: PathType,
+ distribution: str,
+ config_settings: Optional[ConfigSettingsType],
+ skip_dependency_check: bool,
+) -> str:
+ if isolation:
+ return _build_in_isolated_env(builder, outdir, distribution, config_settings)
+ else:
+ return _build_in_current_env(builder, outdir, distribution, config_settings, skip_dependency_check)
+
+
+@contextlib.contextmanager
+def _handle_build_error() -> Iterator[None]:
+ try:
+ yield
+ except (BuildException, FailedProcessError) as e:
+ _error(str(e))
+ except BuildBackendException as e:
+ if isinstance(e.exception, subprocess.CalledProcessError):
+ _cprint()
+ _error(str(e))
+
+ if e.exc_info:
+ tb_lines = traceback.format_exception(
+ e.exc_info[0],
+ e.exc_info[1],
+ e.exc_info[2],
+ limit=-1,
+ )
+ tb = ''.join(tb_lines)
+ else:
+ tb = traceback.format_exc(-1)
+ _cprint('\n{dim}{}{reset}\n', tb.strip('\n'))
+ _error(str(e))
+
+
+def _natural_language_list(elements: Sequence[str]) -> str:
+ if len(elements) == 0:
+ raise IndexError('no elements')
+ elif len(elements) == 1:
+ return elements[0]
+ else:
+ return '{} and {}'.format(
+ ', '.join(elements[:-1]),
+ elements[-1],
+ )
+
+
+def build_package(
+ srcdir: PathType,
+ outdir: PathType,
+ distributions: Sequence[str],
+ config_settings: Optional[ConfigSettingsType] = None,
+ isolation: bool = True,
+ skip_dependency_check: bool = False,
+) -> Sequence[str]:
+ """
+ Run the build process.
+
+ :param srcdir: Source directory
+ :param outdir: Output directory
+ :param distribution: Distribution to build (sdist or wheel)
+ :param config_settings: Configuration settings to be passed to the backend
+ :param isolation: Isolate the build in a separate environment
+ :param skip_dependency_check: Do not perform the dependency check
+ """
+ built: List[str] = []
+ builder = _ProjectBuilder(srcdir)
+ for distribution in distributions:
+ out = _build(isolation, builder, outdir, distribution, config_settings, skip_dependency_check)
+ built.append(os.path.basename(out))
+ return built
+
+
+def build_package_via_sdist(
+ srcdir: PathType,
+ outdir: PathType,
+ distributions: Sequence[str],
+ config_settings: Optional[ConfigSettingsType] = None,
+ isolation: bool = True,
+ skip_dependency_check: bool = False,
+) -> Sequence[str]:
+ """
+ Build a sdist and then the specified distributions from it.
+
+ :param srcdir: Source directory
+ :param outdir: Output directory
+ :param distribution: Distribution to build (only wheel)
+ :param config_settings: Configuration settings to be passed to the backend
+ :param isolation: Isolate the build in a separate environment
+ :param skip_dependency_check: Do not perform the dependency check
+ """
+ if 'sdist' in distributions:
+ raise ValueError('Only binary distributions are allowed but sdist was specified')
+
+ builder = _ProjectBuilder(srcdir)
+ sdist = _build(isolation, builder, outdir, 'sdist', config_settings, skip_dependency_check)
+
+ sdist_name = os.path.basename(sdist)
+ sdist_out = tempfile.mkdtemp(prefix='build-via-sdist-')
+ built: List[str] = []
+ # extract sdist
+ with tarfile.open(sdist) as t:
+ t.extractall(sdist_out)
+ try:
+ builder = _ProjectBuilder(os.path.join(sdist_out, sdist_name[: -len('.tar.gz')]))
+ if distributions:
+ builder.log(f'Building {_natural_language_list(distributions)} from sdist')
+ for distribution in distributions:
+ out = _build(isolation, builder, outdir, distribution, config_settings, skip_dependency_check)
+ built.append(os.path.basename(out))
+ finally:
+ shutil.rmtree(sdist_out, ignore_errors=True)
+ return [sdist_name] + built
+
+
+def main_parser() -> argparse.ArgumentParser:
+ """
+ Construct the main parser.
+ """
+ parser = argparse.ArgumentParser(
+ description=textwrap.indent(
+ textwrap.dedent(
+ '''
+ A simple, correct PEP 517 build frontend.
+
+ By default, a source distribution (sdist) is built from {srcdir}
+ and a binary distribution (wheel) is built from the sdist.
+ This is recommended as it will ensure the sdist can be used
+ to build wheels.
+
+ Pass -s/--sdist and/or -w/--wheel to build a specific distribution.
+ If you do this, the default behavior will be disabled, and all
+ artifacts will be built from {srcdir} (even if you combine
+ -w/--wheel with -s/--sdist, the wheel will be built from {srcdir}).
+ '''
+ ).strip(),
+ ' ',
+ ),
+ formatter_class=argparse.RawTextHelpFormatter,
+ )
+ parser.add_argument(
+ 'srcdir',
+ type=str,
+ nargs='?',
+ default=os.getcwd(),
+ help='source directory (defaults to current directory)',
+ )
+ parser.add_argument(
+ '--version',
+ '-V',
+ action='version',
+ version=f"build {build.__version__} ({','.join(build.__path__)})",
+ )
+ parser.add_argument(
+ '--sdist',
+ '-s',
+ action='store_true',
+ help='build a source distribution (disables the default behavior)',
+ )
+ parser.add_argument(
+ '--wheel',
+ '-w',
+ action='store_true',
+ help='build a wheel (disables the default behavior)',
+ )
+ parser.add_argument(
+ '--outdir',
+ '-o',
+ type=str,
+ help=f'output directory (defaults to {{srcdir}}{os.sep}dist)',
+ )
+ parser.add_argument(
+ '--skip-dependency-check',
+ '-x',
+ action='store_true',
+ help='do not check that build dependencies are installed',
+ )
+ parser.add_argument(
+ '--no-isolation',
+ '-n',
+ action='store_true',
+ help='do not isolate the build in a virtual environment',
+ )
+ parser.add_argument(
+ '--config-setting',
+ '-C',
+ action='append',
+ help='pass options to the backend. options which begin with a hyphen must be in the form of '
+ '"--config-setting=--opt(=value)" or "-C--opt(=value)"',
+ )
+ return parser
+
+
+def main(cli_args: Sequence[str], prog: Optional[str] = None) -> None: # noqa: C901
+ """
+ Parse the CLI arguments and invoke the build process.
+
+ :param cli_args: CLI arguments
+ :param prog: Program name to show in help text
+ """
+ _setup_cli()
+ parser = main_parser()
+ if prog:
+ parser.prog = prog
+ args = parser.parse_args(cli_args)
+
+ distributions = []
+ config_settings = {}
+
+ if args.config_setting:
+ for arg in args.config_setting:
+ setting, _, value = arg.partition('=')
+ if setting not in config_settings:
+ config_settings[setting] = value
+ else:
+ if not isinstance(config_settings[setting], list):
+ config_settings[setting] = [config_settings[setting]]
+
+ config_settings[setting].append(value)
+
+ if args.sdist:
+ distributions.append('sdist')
+ if args.wheel:
+ distributions.append('wheel')
+
+ # outdir is relative to srcdir only if omitted.
+ outdir = os.path.join(args.srcdir, 'dist') if args.outdir is None else args.outdir
+
+ if distributions:
+ build_call = build_package
+ else:
+ build_call = build_package_via_sdist
+ distributions = ['wheel']
+ try:
+ with _handle_build_error():
+ built = build_call(
+ args.srcdir, outdir, distributions, config_settings, not args.no_isolation, args.skip_dependency_check
+ )
+ artifact_list = _natural_language_list(
+ ['{underline}{}{reset}{bold}{green}'.format(artifact, **_STYLES) for artifact in built]
+ )
+ _cprint('{bold}{green}Successfully built {}{reset}', artifact_list)
+ except Exception as e: # pragma: no cover
+ tb = traceback.format_exc().strip('\n')
+ _cprint('\n{dim}{}{reset}\n', tb)
+ _error(str(e))
+
+
+def entrypoint() -> None:
+ main(sys.argv[1:])
+
+
+if __name__ == '__main__': # pragma: no cover
+ main(sys.argv[1:], 'python -m build')
+
+
+__all__ = [
+ 'main',
+ 'main_parser',
+]
diff --git a/src/build/env.py b/src/build/env.py
new file mode 100644
index 0000000..b4a90a9
--- /dev/null
+++ b/src/build/env.py
@@ -0,0 +1,340 @@
+"""
+Creates and manages isolated build environments.
+"""
+import abc
+import functools
+import logging
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import sysconfig
+import tempfile
+import warnings
+
+from types import TracebackType
+from typing import Callable, Collection, List, Optional, Tuple, Type
+
+import build
+
+
+try:
+ import virtualenv
+except ModuleNotFoundError:
+ virtualenv = None
+
+
+_logger = logging.getLogger(__name__)
+
+
+class IsolatedEnv(metaclass=abc.ABCMeta):
+ """Abstract base of isolated build environments, as required by the build project."""
+
+ @property
+ @abc.abstractmethod
+ def executable(self) -> str:
+ """The executable of the isolated build environment."""
+ raise NotImplementedError
+
+ @property
+ @abc.abstractmethod
+ def scripts_dir(self) -> str:
+ """The scripts directory of the isolated build environment."""
+ raise NotImplementedError
+
+ @abc.abstractmethod
+ def install(self, requirements: Collection[str]) -> None:
+ """
+ Install packages from PEP 508 requirements in the isolated build environment.
+
+ :param requirements: PEP 508 requirements
+ """
+ raise NotImplementedError
+
+
+@functools.lru_cache(maxsize=None)
+def _should_use_virtualenv() -> bool:
+ import packaging.requirements
+
+ # virtualenv might be incompatible if it was installed separately
+ # from build. This verifies that virtualenv and all of its
+ # dependencies are installed as specified by build.
+ return virtualenv is not None and not any(
+ packaging.requirements.Requirement(d[1]).name == 'virtualenv'
+ for d in build.check_dependency('build[virtualenv]')
+ if len(d) > 1
+ )
+
+
+def _subprocess(cmd: List[str]) -> None:
+ """Invoke subprocess and output stdout and stderr if it fails."""
+ try:
+ subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ print(e.output.decode(), end='', file=sys.stderr)
+ raise e
+
+
+class IsolatedEnvBuilder:
+ """Builder object for isolated environments."""
+
+ def __init__(self) -> None:
+ self._path: Optional[str] = None
+
+ def __enter__(self) -> IsolatedEnv:
+ """
+ Create an isolated build environment.
+
+ :return: The isolated build environment
+ """
+ # Call ``realpath`` to prevent spurious warning from being emitted
+ # that the venv location has changed on Windows. The username is
+ # DOS-encoded in the output of tempfile - the location is the same
+ # but the representation of it is different, which confuses venv.
+ # Ref: https://bugs.python.org/issue46171
+ self._path = os.path.realpath(tempfile.mkdtemp(prefix='build-env-'))
+ try:
+ # use virtualenv when available (as it's faster than venv)
+ if _should_use_virtualenv():
+ self.log('Creating virtualenv isolated environment...')
+ executable, scripts_dir = _create_isolated_env_virtualenv(self._path)
+ else:
+ self.log('Creating venv isolated environment...')
+ executable, scripts_dir = _create_isolated_env_venv(self._path)
+ return _IsolatedEnvVenvPip(
+ path=self._path,
+ python_executable=executable,
+ scripts_dir=scripts_dir,
+ log=self.log,
+ )
+ except Exception: # cleanup folder if creation fails
+ self.__exit__(*sys.exc_info())
+ raise
+
+ def __exit__(
+ self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
+ ) -> None:
+ """
+ Delete the created isolated build environment.
+
+ :param exc_type: The type of exception raised (if any)
+ :param exc_val: The value of exception raised (if any)
+ :param exc_tb: The traceback of exception raised (if any)
+ """
+ if self._path is not None and os.path.exists(self._path): # in case the user already deleted skip remove
+ shutil.rmtree(self._path)
+
+ @staticmethod
+ def log(message: str) -> None:
+ """
+ Prints message
+
+ The default implementation uses the logging module but this function can be
+ overwritten by users to have a different implementation.
+
+ :param msg: Message to output
+ """
+ if sys.version_info >= (3, 8):
+ _logger.log(logging.INFO, message, stacklevel=2)
+ else:
+ _logger.log(logging.INFO, message)
+
+
+class _IsolatedEnvVenvPip(IsolatedEnv):
+ """
+ Isolated build environment context manager
+
+ Non-standard paths injected directly to sys.path will still be passed to the environment.
+ """
+
+ def __init__(
+ self,
+ path: str,
+ python_executable: str,
+ scripts_dir: str,
+ log: Callable[[str], None],
+ ) -> None:
+ """
+ :param path: The path where the environment exists
+ :param python_executable: The python executable within the environment
+ :param log: Log function
+ """
+ self._path = path
+ self._python_executable = python_executable
+ self._scripts_dir = scripts_dir
+ self._log = log
+
+ @property
+ def path(self) -> str:
+ """The location of the isolated build environment."""
+ return self._path
+
+ @property
+ def executable(self) -> str:
+ """The python executable of the isolated build environment."""
+ return self._python_executable
+
+ @property
+ def scripts_dir(self) -> str:
+ return self._scripts_dir
+
+ def install(self, requirements: Collection[str]) -> None:
+ """
+ Install packages from PEP 508 requirements in the isolated build environment.
+
+ :param requirements: PEP 508 requirement specification to install
+
+ :note: Passing non-PEP 508 strings will result in undefined behavior, you *should not* rely on it. It is
+ merely an implementation detail, it may change any time without warning.
+ """
+ if not requirements:
+ return
+
+ self._log('Installing packages in isolated environment... ({})'.format(', '.join(sorted(requirements))))
+
+ # pip does not honour environment markers in command line arguments
+ # but it does for requirements from a file
+ with tempfile.NamedTemporaryFile('w+', prefix='build-reqs-', suffix='.txt', delete=False) as req_file:
+ req_file.write(os.linesep.join(requirements))
+ try:
+ cmd = [
+ self.executable,
+ '-Im',
+ 'pip',
+ 'install',
+ '--use-pep517',
+ '--no-warn-script-location',
+ '-r',
+ os.path.abspath(req_file.name),
+ ]
+ _subprocess(cmd)
+ finally:
+ os.unlink(req_file.name)
+
+
+def _create_isolated_env_virtualenv(path: str) -> Tuple[str, str]:
+ """
+ We optionally can use the virtualenv package to provision a virtual environment.
+
+ :param path: The path where to create the isolated build environment
+ :return: The Python executable and script folder
+ """
+ cmd = [str(path), '--no-setuptools', '--no-wheel', '--activators', '']
+ result = virtualenv.cli_run(cmd, setup_logging=False)
+ executable = str(result.creator.exe)
+ script_dir = str(result.creator.script_dir)
+ return executable, script_dir
+
+
+@functools.lru_cache(maxsize=None)
+def _fs_supports_symlink() -> bool:
+ """Return True if symlinks are supported"""
+ # Using definition used by venv.main()
+ if not sys.platform.startswith('win'):
+ return True
+
+ # Windows may support symlinks (setting in Windows 10)
+ with tempfile.NamedTemporaryFile(prefix='build-symlink-') as tmp_file:
+ dest = f'{tmp_file}-b'
+ try:
+ os.symlink(tmp_file.name, dest)
+ os.unlink(dest)
+ return True
+ except (OSError, NotImplementedError, AttributeError):
+ return False
+
+
+def _create_isolated_env_venv(path: str) -> Tuple[str, str]:
+ """
+ On Python 3 we use the venv package from the standard library.
+
+ :param path: The path where to create the isolated build environment
+ :return: The Python executable and script folder
+ """
+ import venv
+
+ import packaging.version
+
+ if sys.version_info < (3, 8):
+ import importlib_metadata as metadata
+ else:
+ from importlib import metadata
+
+ symlinks = _fs_supports_symlink()
+ try:
+ with warnings.catch_warnings():
+ if sys.version_info[:3] == (3, 11, 0):
+ warnings.filterwarnings('ignore', 'check_home argument is deprecated and ignored.', DeprecationWarning)
+ venv.EnvBuilder(with_pip=True, symlinks=symlinks).create(path)
+ except subprocess.CalledProcessError as exc:
+ raise build.FailedProcessError(exc, 'Failed to create venv. Maybe try installing virtualenv.') from None
+
+ executable, script_dir, purelib = _find_executable_and_scripts(path)
+
+ # Get the version of pip in the environment
+ pip_distribution = next(iter(metadata.distributions(name='pip', path=[purelib]))) # type: ignore[no-untyped-call]
+ current_pip_version = packaging.version.Version(pip_distribution.version)
+
+ if platform.system() == 'Darwin' and int(platform.mac_ver()[0].split('.')[0]) >= 11:
+ # macOS 11+ name scheme change requires 20.3. Intel macOS 11.0 can be told to report 10.16 for backwards
+ # compatibility; but that also fixes earlier versions of pip so this is only needed for 11+.
+ is_apple_silicon_python = platform.machine() != 'x86_64'
+ minimum_pip_version = '21.0.1' if is_apple_silicon_python else '20.3.0'
+ else:
+ # PEP-517 and manylinux1 was first implemented in 19.1
+ minimum_pip_version = '19.1.0'
+
+ if current_pip_version < packaging.version.Version(minimum_pip_version):
+ _subprocess([executable, '-m', 'pip', 'install', f'pip>={minimum_pip_version}'])
+
+ # Avoid the setuptools from ensurepip to break the isolation
+ _subprocess([executable, '-m', 'pip', 'uninstall', 'setuptools', '-y'])
+ return executable, script_dir
+
+
+def _find_executable_and_scripts(path: str) -> Tuple[str, str, str]:
+ """
+ Detect the Python executable and script folder of a virtual environment.
+
+ :param path: The location of the virtual environment
+ :return: The Python executable, script folder, and purelib folder
+ """
+ config_vars = sysconfig.get_config_vars().copy() # globally cached, copy before altering it
+ config_vars['base'] = path
+ scheme_names = sysconfig.get_scheme_names()
+ if 'venv' in scheme_names:
+ # Python distributors with custom default installation scheme can set a
+ # scheme that can't be used to expand the paths in a venv.
+ # This can happen if build itself is not installed in a venv.
+ # The distributors are encouraged to set a "venv" scheme to be used for this.
+ # See https://bugs.python.org/issue45413
+ # and https://github.com/pypa/virtualenv/issues/2208
+ paths = sysconfig.get_paths(scheme='venv', vars=config_vars)
+ elif 'posix_local' in scheme_names:
+ # The Python that ships on Debian/Ubuntu varies the default scheme to
+ # install to /usr/local
+ # But it does not (yet) set the "venv" scheme.
+ # If we're the Debian "posix_local" scheme is available, but "venv"
+ # is not, we use "posix_prefix" instead which is venv-compatible there.
+ paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
+ elif 'osx_framework_library' in scheme_names:
+ # The Python that ships with the macOS developer tools varies the
+ # default scheme depending on whether the ``sys.prefix`` is part of a framework.
+ # But it does not (yet) set the "venv" scheme.
+ # If the Apple-custom "osx_framework_library" scheme is available but "venv"
+ # is not, we use "posix_prefix" instead which is venv-compatible there.
+ paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
+ else:
+ paths = sysconfig.get_paths(vars=config_vars)
+ executable = os.path.join(paths['scripts'], 'python.exe' if sys.platform.startswith('win') else 'python')
+ if not os.path.exists(executable):
+ raise RuntimeError(f'Virtual environment creation failed, executable {executable} missing')
+
+ return executable, paths['scripts'], paths['purelib']
+
+
+__all__ = [
+ 'IsolatedEnvBuilder',
+ 'IsolatedEnv',
+]
diff --git a/src/build/py.typed b/src/build/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/py.typed
diff --git a/src/build/util.py b/src/build/util.py
new file mode 100644
index 0000000..6c918ce
--- /dev/null
+++ b/src/build/util.py
@@ -0,0 +1,60 @@
+# SPDX-License-Identifier: MIT
+
+import os
+import pathlib
+import sys
+import tempfile
+
+import pep517
+
+import build
+import build.env
+
+
+if sys.version_info >= (3, 8):
+ import importlib.metadata as importlib_metadata
+else:
+ import importlib_metadata
+
+
+def _project_wheel_metadata(builder: build.ProjectBuilder) -> 'importlib_metadata.PackageMetadata':
+ with tempfile.TemporaryDirectory() as tmpdir:
+ path = pathlib.Path(builder.metadata_path(tmpdir))
+ # https://github.com/python/importlib_metadata/pull/343
+ return importlib_metadata.PathDistribution(path).metadata # type: ignore[arg-type]
+
+
+def project_wheel_metadata(
+ srcdir: build.PathType,
+ isolated: bool = True,
+) -> 'importlib_metadata.PackageMetadata':
+ """
+ Return the wheel metadata for a project.
+
+ Uses the ``prepare_metadata_for_build_wheel`` hook if available,
+ otherwise ``build_wheel``.
+
+ :param srcdir: Project source directory
+ :param isolated: Whether or not to run invoke the backend in the current
+ environment or to create an isolated one and invoke it
+ there.
+ """
+ builder = build.ProjectBuilder(
+ os.fspath(srcdir),
+ runner=pep517.quiet_subprocess_runner,
+ )
+
+ if not isolated:
+ return _project_wheel_metadata(builder)
+
+ with build.env.IsolatedEnvBuilder() as env:
+ builder.python_executable = env.executable
+ builder.scripts_dir = env.scripts_dir
+ env.install(builder.build_system_requires)
+ env.install(builder.get_requires_for_build('wheel'))
+ return _project_wheel_metadata(builder)
+
+
+__all__ = [
+ 'project_wheel_metadata',
+]
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..d65b64e
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,111 @@
+# SPDX-License-Identifier: MIT
+
+import os
+import os.path
+import shutil
+import stat
+import sys
+import sysconfig
+import tempfile
+
+import pytest
+
+import build.env
+
+
+def pytest_addoption(parser):
+ os.environ['PYTHONWARNINGS'] = 'ignore:DEPRECATION::pip._internal.cli.base_command' # for when not run within tox
+ os.environ['PIP_DISABLE_PIP_VERSION_CHECK'] = '1' # do not pollute stderr with upgrade advisory
+ parser.addoption('--run-integration', action='store_true', help='run the integration tests')
+ parser.addoption('--only-integration', action='store_true', help='only run the integration tests')
+
+
+PYPY3_WIN_VENV_BAD = (
+ sys.implementation.name == 'pypy' and sys.implementation.version < (7, 3, 9) and sys.platform.startswith('win')
+)
+PYPY3_WIN_M = 'https://foss.heptapod.net/pypy/pypy/-/issues/3323 and https://foss.heptapod.net/pypy/pypy/-/issues/3321'
+
+
+def pytest_collection_modifyitems(config, items):
+ skip_int = pytest.mark.skip(reason='integration tests not run (no --run-integration flag)')
+ skip_other = pytest.mark.skip(reason='only integration tests are run (got --only-integration flag)')
+
+ if config.getoption('--run-integration') and config.getoption('--only-integration'): # pragma: no cover
+ raise pytest.UsageError("--run-integration and --only-integration can't be used together, choose one")
+
+ if len(items) == 1: # do not require flags if called directly
+ return
+ for item in items:
+ is_integration_file = is_integration(item)
+ if PYPY3_WIN_VENV_BAD and item.get_closest_marker('pypy3323bug') and os.environ.get('PYPY3323BUG', None):
+ item.add_marker(pytest.mark.xfail(reason=PYPY3_WIN_M, strict=False))
+ if PYPY3_WIN_VENV_BAD and item.get_closest_marker('isolated'):
+ if not (is_integration_file and item.originalname == 'test_build') or (
+ hasattr(item, 'callspec') and '--no-isolation' not in item.callspec.params.get('args', [])
+ ):
+ item.add_marker(pytest.mark.xfail(reason=PYPY3_WIN_M, strict=True))
+ if is_integration_file: # pragma: no cover
+ if not config.getoption('--run-integration') and not config.getoption('--only-integration'):
+ item.add_marker(skip_int)
+ elif config.getoption('--only-integration'): # pragma: no cover
+ item.add_marker(skip_other)
+ # run integration tests after unit tests
+ items.sort(key=lambda i: 1 if is_integration(i) else 0)
+
+
+def is_integration(item):
+ return os.path.basename(item.location[0]) == 'test_integration.py'
+
+
+@pytest.fixture(scope='session', autouse=True)
+def ensure_syconfig_vars_created():
+ # the config vars are globally cached and may use get_path, make sure they are created
+ sysconfig.get_config_vars()
+
+
+@pytest.fixture
+def packages_path():
+ return os.path.realpath(os.path.join(__file__, '..', 'packages'))
+
+
+def generate_package_path_fixture(package_name):
+ @pytest.fixture
+ def fixture(packages_path):
+ return os.path.join(packages_path, package_name)
+
+ return fixture
+
+
+# Generate path fixtures dynamically.
+package_names = os.listdir(os.path.join(os.path.dirname(__file__), 'packages'))
+for package_name in package_names:
+ normalized_name = package_name.replace('-', '_')
+ fixture_name = f'package_{normalized_name}'
+ globals()[fixture_name] = generate_package_path_fixture(package_name)
+
+
+@pytest.fixture
+def test_no_permission(packages_path):
+ path = os.path.join(packages_path, 'test-no-permission')
+ file = os.path.join(path, 'pyproject.toml')
+ orig_stat = os.stat(file).st_mode
+
+ os.chmod(file, ~stat.S_IRWXU)
+
+ yield os.path.join(packages_path, 'test-no-permission')
+
+ os.chmod(file, orig_stat)
+
+
+@pytest.fixture
+def tmp_dir():
+ path = tempfile.mkdtemp(prefix='python-build-test-')
+
+ yield path
+
+ shutil.rmtree(path)
+
+
+@pytest.fixture(autouse=True)
+def force_venv(mocker):
+ mocker.patch.object(build.env, '_should_use_virtualenv', lambda: False)
diff --git a/tests/constraints.txt b/tests/constraints.txt
new file mode 100644
index 0000000..b073bd9
--- /dev/null
+++ b/tests/constraints.txt
@@ -0,0 +1,9 @@
+importlib-metadata==0.22
+packaging==19.0
+pep517==0.9.1
+setuptools==42.0.0; python_version < "3.10"
+setuptools==56.0.0; python_version >= "3.10"
+toml==0.10.0
+tomli==1.0.0
+virtualenv==20.0.35
+wheel==0.36.0
diff --git a/tests/packages/inline/build.py b/tests/packages/inline/build.py
new file mode 100644
index 0000000..a8e0dcb
--- /dev/null
+++ b/tests/packages/inline/build.py
@@ -0,0 +1,63 @@
+import os
+import sys
+
+from textwrap import dedent
+from zipfile import ZipFile
+
+
+name = 'demo_pkg_inline'
+pkg_name = name.replace('_', '-')
+
+version = '1.0.0'
+dist_info = f'{name}-{version}.dist-info'
+
+metadata = f'{dist_info}/METADATA'
+wheel = f'{dist_info}/WHEEL'
+entry_points = f'{dist_info}/entry_points.txt'
+record = f'{dist_info}/RECORD'
+init = f'{name}/__init__.py'
+content = {
+ init: f"def do():\n print('greetings from {name}')",
+ metadata: f"""
+ Metadata-Version: 2.1
+ Name: {pkg_name}
+ Version: {version}
+ Summary: Summary of package
+ Home-page: Does not exists
+ Author: someone
+ Author-email: a@o.com
+ License: MIT
+ Platform: ANY
+
+ Desc
+ """,
+ wheel: f"""
+ Wheel-Version: 1.0
+ Generator: {name}-{version}
+ Root-Is-Purelib: true
+ Tag: py3-none-any
+ """,
+ f'{dist_info}/top_level.txt': name,
+ entry_points: '\n[console_scripts]\ndemo-pkg-inline = demo_pkg_inline:do',
+ record: f"""
+ {name}/__init__.py,,
+ {dist_info}/METADATA,,
+ {dist_info}/WHEEL,,
+ {dist_info}/top_level.txt,,
+ {dist_info}/RECORD,,
+ """,
+}
+
+
+def build_wheel(wheel_directory, metadata_directory=None, config_settings=None):
+ base_name = f'{name}-{version}-py{sys.version_info.major}-none-any.whl'
+ path = os.path.join(wheel_directory, base_name)
+ with ZipFile(str(path), 'w') as zip_file_handler:
+ for arc_name, data in content.items():
+ zip_file_handler.writestr(arc_name, dedent(data).strip())
+ print(f'created wheel {path}')
+ return base_name
+
+
+def get_requires_for_build_wheel(config_settings):
+ return []
diff --git a/tests/packages/inline/pyproject.toml b/tests/packages/inline/pyproject.toml
new file mode 100644
index 0000000..dc9ecbb
--- /dev/null
+++ b/tests/packages/inline/pyproject.toml
@@ -0,0 +1,4 @@
+[build-system]
+requires = []
+build-backend = "build"
+backend-path = ["."]
diff --git a/tests/packages/legacy/legacy/__init__.py b/tests/packages/legacy/legacy/__init__.py
new file mode 100644
index 0000000..d5a030b
--- /dev/null
+++ b/tests/packages/legacy/legacy/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: MIT
+
+"""
+legacy - Example legacy package
+"""
+__version__ = '1.0.0'
diff --git a/tests/packages/legacy/setup.py b/tests/packages/legacy/setup.py
new file mode 100644
index 0000000..9feee16
--- /dev/null
+++ b/tests/packages/legacy/setup.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+
+
+setup(
+ name='legacy',
+ version='1.0.0',
+ author='Filipe Laíns',
+ author_email='lains@archlinux.org',
+ classifiers=[
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ ],
+ packages=['legacy'],
+)
diff --git a/tests/packages/test-bad-backend/pyproject.toml b/tests/packages/test-bad-backend/pyproject.toml
new file mode 100644
index 0000000..c199a33
--- /dev/null
+++ b/tests/packages/test-bad-backend/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = []
+build-backend = "nonsense_package"
diff --git a/tests/packages/test-bad-syntax/pyproject.toml b/tests/packages/test-bad-syntax/pyproject.toml
new file mode 100644
index 0000000..cb84496
--- /dev/null
+++ b/tests/packages/test-bad-syntax/pyproject.toml
@@ -0,0 +1,2 @@
+[build-system]
+requires = ['bad' 'syntax']
diff --git a/tests/packages/test-bad-wheel/backend_bad_wheel.py b/tests/packages/test-bad-wheel/backend_bad_wheel.py
new file mode 100644
index 0000000..039cae0
--- /dev/null
+++ b/tests/packages/test-bad-wheel/backend_bad_wheel.py
@@ -0,0 +1,7 @@
+# SPDX-License-Identifier: MIT
+
+from setuptools.build_meta import build_sdist # noqa: F401
+
+
+def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ return 'not a wheel'
diff --git a/tests/packages/test-bad-wheel/pyproject.toml b/tests/packages/test-bad-wheel/pyproject.toml
new file mode 100644
index 0000000..77de5df
--- /dev/null
+++ b/tests/packages/test-bad-wheel/pyproject.toml
@@ -0,0 +1,4 @@
+[build-system]
+build-backend = 'backend_bad_wheel'
+backend-path = ['.']
+requires = ['setuptools >= 42.0.0']
diff --git a/tests/packages/test-bad-wheel/setup.cfg b/tests/packages/test-bad-wheel/setup.cfg
new file mode 100644
index 0000000..1a4be2f
--- /dev/null
+++ b/tests/packages/test-bad-wheel/setup.cfg
@@ -0,0 +1,3 @@
+[metadata]
+name = test_bad_wheel
+version = 0.0.1
diff --git a/tests/packages/test-cant-build-via-sdist/backend_bad_sdist.py b/tests/packages/test-cant-build-via-sdist/backend_bad_sdist.py
new file mode 100644
index 0000000..2afdb3d
--- /dev/null
+++ b/tests/packages/test-cant-build-via-sdist/backend_bad_sdist.py
@@ -0,0 +1,23 @@
+# SPDX-License-Identifier: MIT
+
+import os.path
+import tarfile
+import zipfile
+
+
+def build_sdist(sdist_directory, config_settings=None):
+ name = 'test_cant_build_via_sdist-1.0.0'
+ file = f'{name}.tar.gz'
+ with tarfile.open(os.path.join(sdist_directory, file), 'w') as t:
+ t.add('pyproject.toml', f'{name}/pyproject.toml')
+ t.add('backend_bad_sdist.py', f'{name}/backend_bad_sdist.py')
+ return file
+
+
+def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ if not os.path.isfile('some-file-that-is-needed-for-build.txt'):
+ raise FileNotFoundError('some-file-that-is-needed-for-build.txt is missing!')
+ # pragma: no cover
+ file = 'test_cant_build_via_sdist-1.0.0-py2.py3-none-any.whl'
+ zipfile.ZipFile(os.path.join(wheel_directory, file), 'w').close()
+ return file
diff --git a/tests/packages/test-cant-build-via-sdist/pyproject.toml b/tests/packages/test-cant-build-via-sdist/pyproject.toml
new file mode 100644
index 0000000..e74afad
--- /dev/null
+++ b/tests/packages/test-cant-build-via-sdist/pyproject.toml
@@ -0,0 +1,4 @@
+[build-system]
+build-backend = 'backend_bad_sdist'
+backend-path = ['.']
+requires = []
diff --git a/tests/packages/test-cant-build-via-sdist/some-file-that-is-needed-for-build.txt b/tests/packages/test-cant-build-via-sdist/some-file-that-is-needed-for-build.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/packages/test-cant-build-via-sdist/some-file-that-is-needed-for-build.txt
diff --git a/tests/packages/test-flit/pyproject.toml b/tests/packages/test-flit/pyproject.toml
new file mode 100644
index 0000000..9758b57
--- /dev/null
+++ b/tests/packages/test-flit/pyproject.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ['flit_core >=2,<3']
+build-backend = 'flit_core.buildapi'
+
+[tool.flit.metadata]
+module = 'test_flit'
+author = 'Filipe Laíns'
+author-email = 'lains@archlinux.org'
+classifiers = [
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+]
diff --git a/tests/packages/test-flit/test_flit/__init__.py b/tests/packages/test-flit/test_flit/__init__.py
new file mode 100644
index 0000000..264c2e3
--- /dev/null
+++ b/tests/packages/test-flit/test_flit/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: MIT
+
+"""
+test_flit - Example flit package
+"""
+__version__ = '1.0.0'
diff --git a/tests/packages/test-invalid-requirements/pyproject.toml b/tests/packages/test-invalid-requirements/pyproject.toml
new file mode 100644
index 0000000..11974a0
--- /dev/null
+++ b/tests/packages/test-invalid-requirements/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ['setuptools >= 42.0.0', 'wheel >= 0.36.0', 'this is invalid']
+build-backend = 'setuptools.build_meta'
diff --git a/tests/packages/test-invalid-requirements/setup.cfg b/tests/packages/test-invalid-requirements/setup.cfg
new file mode 100644
index 0000000..aa22d23
--- /dev/null
+++ b/tests/packages/test-invalid-requirements/setup.cfg
@@ -0,0 +1,3 @@
+[metadata]
+name = test_invalid_requirements
+version = 1.0.0
diff --git a/tests/packages/test-metadata/backend.py b/tests/packages/test-metadata/backend.py
new file mode 100644
index 0000000..727dab9
--- /dev/null
+++ b/tests/packages/test-metadata/backend.py
@@ -0,0 +1,43 @@
+# SPDX-License-Identifier: MIT
+
+import pathlib
+import textwrap
+
+
+def get_requires_for_build_wheel(config_settings=None):
+ return ['tomli']
+
+
+def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):
+ import tomli
+
+ with open('pyproject.toml', 'rb') as f:
+ metadata = tomli.load(f)
+
+ distinfo = pathlib.Path(
+ metadata_directory,
+ '{}-{}.dist-info'.format(
+ metadata['project']['name'].replace('-', '-'),
+ metadata['project']['version'],
+ ),
+ )
+ distinfo.mkdir(parents=True, exist_ok=True)
+ distinfo.joinpath('METADATA').write_text(
+ textwrap.dedent(
+ f'''
+ Metadata-Version: 2.2
+ Name: {metadata['project']['name']}
+ Version: {metadata['project']['version']}
+ Summary: {metadata['project']['description']}
+ '''
+ ).strip()
+ )
+ return distinfo.name
+
+
+def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ raise NotImplementedError
+
+
+def build_sdist(sdist_directory, config_settings=None):
+ raise NotImplementedError
diff --git a/tests/packages/test-metadata/pyproject.toml b/tests/packages/test-metadata/pyproject.toml
new file mode 100644
index 0000000..3900263
--- /dev/null
+++ b/tests/packages/test-metadata/pyproject.toml
@@ -0,0 +1,14 @@
+[build-system]
+requires = []
+build-backend = 'backend'
+backend-path = ['.']
+
+[project]
+name = 'test-metadata'
+version = '1.0.0'
+description = 'hello!'
+
+[tool.black]
+line-length = 127
+skip-string-normalization = true
+target-version = ['py39', 'py38', 'py37', 'py36']
diff --git a/tests/packages/test-no-backend/pyproject.toml b/tests/packages/test-no-backend/pyproject.toml
new file mode 100644
index 0000000..024e9e6
--- /dev/null
+++ b/tests/packages/test-no-backend/pyproject.toml
@@ -0,0 +1,2 @@
+[build-system]
+requires = []
diff --git a/tests/packages/test-no-permission/pyproject.toml b/tests/packages/test-no-permission/pyproject.toml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/packages/test-no-permission/pyproject.toml
diff --git a/tests/packages/test-no-prepare/backend_no_prepare.py b/tests/packages/test-no-prepare/backend_no_prepare.py
new file mode 100644
index 0000000..206de1a
--- /dev/null
+++ b/tests/packages/test-no-prepare/backend_no_prepare.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from setuptools.build_meta import build_sdist, build_wheel # noqa: F401
diff --git a/tests/packages/test-no-prepare/pyproject.toml b/tests/packages/test-no-prepare/pyproject.toml
new file mode 100644
index 0000000..c6ca5f8
--- /dev/null
+++ b/tests/packages/test-no-prepare/pyproject.toml
@@ -0,0 +1,4 @@
+[build-system]
+build-backend = 'backend_no_prepare'
+backend-path = ['.']
+requires = ['setuptools >= 42.0.0', 'wheel >= 0.36.0']
diff --git a/tests/packages/test-no-prepare/setup.cfg b/tests/packages/test-no-prepare/setup.cfg
new file mode 100644
index 0000000..c9e0a96
--- /dev/null
+++ b/tests/packages/test-no-prepare/setup.cfg
@@ -0,0 +1,3 @@
+[metadata]
+name = test_no_prepare
+version = 1.0.0
diff --git a/tests/packages/test-no-project/empty.txt b/tests/packages/test-no-project/empty.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/packages/test-no-project/empty.txt
diff --git a/tests/packages/test-no-requires/pyproject.toml b/tests/packages/test-no-requires/pyproject.toml
new file mode 100644
index 0000000..4f25948
--- /dev/null
+++ b/tests/packages/test-no-requires/pyproject.toml
@@ -0,0 +1,2 @@
+[build-system]
+build-backend = 'something'
diff --git a/tests/packages/test-optional-hooks/hookless_backend.py b/tests/packages/test-optional-hooks/hookless_backend.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/packages/test-optional-hooks/hookless_backend.py
diff --git a/tests/packages/test-optional-hooks/pyproject.toml b/tests/packages/test-optional-hooks/pyproject.toml
new file mode 100644
index 0000000..2796891
--- /dev/null
+++ b/tests/packages/test-optional-hooks/pyproject.toml
@@ -0,0 +1,4 @@
+[build-system]
+requires = []
+build-backend = 'hookless_backend'
+backend-path = ['.']
diff --git a/tests/packages/test-setuptools/pyproject.toml b/tests/packages/test-setuptools/pyproject.toml
new file mode 100644
index 0000000..b00a27a
--- /dev/null
+++ b/tests/packages/test-setuptools/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ['setuptools >= 42.0.0', 'wheel >= 0.36.0']
+build-backend = 'setuptools.build_meta'
diff --git a/tests/packages/test-setuptools/setup.cfg b/tests/packages/test-setuptools/setup.cfg
new file mode 100644
index 0000000..bf198b6
--- /dev/null
+++ b/tests/packages/test-setuptools/setup.cfg
@@ -0,0 +1,6 @@
+[metadata]
+name = test_setuptools
+version = 1.0.0
+
+[bdist_wheel]
+universal = 1
diff --git a/tests/packages/test-typo/pyproject.toml b/tests/packages/test-typo/pyproject.toml
new file mode 100644
index 0000000..02d1af2
--- /dev/null
+++ b/tests/packages/test-typo/pyproject.toml
@@ -0,0 +1,3 @@
+[build_sytem]
+requires = ['setuptools >= 40.8.0', 'wheel']
+build-backend = 'setuptools.build_meta'
diff --git a/tests/test_env.py b/tests/test_env.py
new file mode 100644
index 0000000..f6f381a
--- /dev/null
+++ b/tests/test_env.py
@@ -0,0 +1,177 @@
+# SPDX-License-Identifier: MIT
+import collections
+import inspect
+import logging
+import platform
+import subprocess
+import sys
+import sysconfig
+
+import pytest
+
+from packaging.version import Version
+
+import build.env
+
+
+IS_PYPY3 = platform.python_implementation() == 'PyPy'
+
+
+@pytest.mark.isolated
+def test_isolation():
+ subprocess.check_call([sys.executable, '-c', 'import build.env'])
+ with build.env.IsolatedEnvBuilder() as env:
+ with pytest.raises(subprocess.CalledProcessError):
+ debug = 'import sys; import os; print(os.linesep.join(sys.path));'
+ subprocess.check_call([env.executable, '-c', f'{debug} import build.env'])
+
+
+@pytest.mark.isolated
+def test_isolated_environment_install(mocker):
+ with build.env.IsolatedEnvBuilder() as env:
+ mocker.patch('build.env._subprocess')
+
+ env.install([])
+ build.env._subprocess.assert_not_called()
+
+ env.install(['some', 'requirements'])
+ build.env._subprocess.assert_called()
+ args = build.env._subprocess.call_args[0][0][:-1]
+ assert args == [
+ env.executable,
+ '-Im',
+ 'pip',
+ 'install',
+ '--use-pep517',
+ '--no-warn-script-location',
+ '-r',
+ ]
+
+
+@pytest.mark.skipif(IS_PYPY3, reason='PyPy3 uses get path to create and provision venv')
+@pytest.mark.skipif(sys.platform != 'darwin', reason='workaround for Apple Python')
+def test_can_get_venv_paths_with_conflicting_default_scheme(mocker):
+ get_scheme_names = mocker.patch('sysconfig.get_scheme_names', return_value=('osx_framework_library',))
+ with build.env.IsolatedEnvBuilder():
+ pass
+ assert get_scheme_names.call_count == 1
+
+
+@pytest.mark.skipif('posix_local' not in sysconfig.get_scheme_names(), reason='workaround for Debian/Ubuntu Python')
+def test_can_get_venv_paths_with_posix_local_default_scheme(mocker):
+ get_paths = mocker.spy(sysconfig, 'get_paths')
+ # We should never call this, but we patch it to ensure failure if we do
+ get_default_scheme = mocker.patch('sysconfig.get_default_scheme', return_value='posix_local')
+ with build.env.IsolatedEnvBuilder():
+ pass
+ get_paths.assert_called_once_with(scheme='posix_prefix', vars=mocker.ANY)
+ assert get_default_scheme.call_count == 0
+
+
+def test_executable_missing_post_creation(mocker):
+ venv_create = mocker.patch('venv.EnvBuilder.create')
+ with pytest.raises(RuntimeError, match='Virtual environment creation failed, executable .* missing'):
+ with build.env.IsolatedEnvBuilder():
+ pass
+ assert venv_create.call_count == 1
+
+
+def test_isolated_env_abstract():
+ with pytest.raises(TypeError):
+ build.env.IsolatedEnv()
+
+
+def test_isolated_env_has_executable_still_abstract():
+ class Env(build.env.IsolatedEnv):
+ @property
+ def executable(self):
+ raise NotImplementedError
+
+ with pytest.raises(TypeError):
+ Env()
+
+
+def test_isolated_env_has_install_still_abstract():
+ class Env(build.env.IsolatedEnv):
+ def install(self, requirements):
+ raise NotImplementedError
+
+ with pytest.raises(TypeError):
+ Env()
+
+
+@pytest.mark.pypy3323bug
+def test_isolated_env_log(mocker, caplog, package_test_flit):
+ mocker.patch('build.env._subprocess')
+ caplog.set_level(logging.DEBUG)
+
+ builder = build.env.IsolatedEnvBuilder()
+ frameinfo = inspect.getframeinfo(inspect.currentframe())
+ builder.log('something') # line number 106
+ with builder as env:
+ env.install(['something'])
+
+ assert [(record.levelname, record.message) for record in caplog.records] == [
+ ('INFO', 'something'),
+ ('INFO', 'Creating venv isolated environment...'),
+ ('INFO', 'Installing packages in isolated environment... (something)'),
+ ]
+ if sys.version_info >= (3, 8): # stacklevel
+ assert [(record.lineno) for record in caplog.records] == [
+ frameinfo.lineno + 1,
+ frameinfo.lineno - 6,
+ frameinfo.lineno + 85,
+ ]
+
+
+@pytest.mark.isolated
+def test_default_pip_is_never_too_old():
+ with build.env.IsolatedEnvBuilder() as env:
+ version = subprocess.check_output(
+ [env.executable, '-c', 'import pip; print(pip.__version__)'], universal_newlines=True
+ ).strip()
+ assert Version(version) >= Version('19.1')
+
+
+@pytest.mark.isolated
+@pytest.mark.parametrize('pip_version', ['20.2.0', '20.3.0', '21.0.0', '21.0.1'])
+@pytest.mark.parametrize('arch', ['x86_64', 'arm64'])
+def test_pip_needs_upgrade_mac_os_11(mocker, pip_version, arch):
+ SimpleNamespace = collections.namedtuple('SimpleNamespace', 'version')
+
+ _subprocess = mocker.patch('build.env._subprocess')
+ mocker.patch('platform.system', return_value='Darwin')
+ mocker.patch('platform.machine', return_value=arch)
+ mocker.patch('platform.mac_ver', return_value=('11.0', ('', '', ''), ''))
+ metadata_name = 'importlib_metadata' if sys.version_info < (3, 8) else 'importlib.metadata'
+ mocker.patch(metadata_name + '.distributions', return_value=(SimpleNamespace(version=pip_version),))
+
+ min_version = Version('20.3' if arch == 'x86_64' else '21.0.1')
+ with build.env.IsolatedEnvBuilder():
+ if Version(pip_version) < min_version:
+ print(_subprocess.call_args_list)
+ upgrade_call, uninstall_call = _subprocess.call_args_list
+ answer = 'pip>=20.3.0' if arch == 'x86_64' else 'pip>=21.0.1'
+ assert upgrade_call[0][0][1:] == ['-m', 'pip', 'install', answer]
+ assert uninstall_call[0][0][1:] == ['-m', 'pip', 'uninstall', 'setuptools', '-y']
+ else:
+ (uninstall_call,) = _subprocess.call_args_list
+ assert uninstall_call[0][0][1:] == ['-m', 'pip', 'uninstall', 'setuptools', '-y']
+
+
+@pytest.mark.isolated
+@pytest.mark.skipif(IS_PYPY3 and sys.platform.startswith('win'), reason='Isolated tests not supported on PyPy3 + Windows')
+@pytest.mark.parametrize('has_symlink', [True, False] if sys.platform.startswith('win') else [True])
+def test_venv_symlink(mocker, has_symlink):
+ if has_symlink:
+ mocker.patch('os.symlink')
+ mocker.patch('os.unlink')
+ else:
+ mocker.patch('os.symlink', side_effect=OSError())
+
+ # Cache must be cleared to rerun
+ build.env._fs_supports_symlink.cache_clear()
+ supports_symlink = build.env._fs_supports_symlink()
+ build.env._fs_supports_symlink.cache_clear()
+
+ assert supports_symlink is has_symlink
diff --git a/tests/test_integration.py b/tests/test_integration.py
new file mode 100644
index 0000000..bc2f4ff
--- /dev/null
+++ b/tests/test_integration.py
@@ -0,0 +1,136 @@
+# SPDX-License-Identifier: MIT
+
+import os
+import os.path
+import platform
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import urllib.request
+
+import filelock
+import pytest
+
+import build.__main__
+
+
+IS_WINDOWS = sys.platform.startswith('win')
+IS_PYPY3 = platform.python_implementation() == 'PyPy'
+
+
+INTEGRATION_SOURCES = {
+ 'dateutil': ('dateutil/dateutil', '2.8.1'),
+ 'pip': ('pypa/pip', '20.2.1'),
+ 'Solaar': ('pwr-Solaar/Solaar', '1.0.3'),
+ 'flit': ('takluyver/flit', '2.3.0'),
+}
+
+_SDIST = re.compile('.*.tar.gz')
+_WHEEL = re.compile('.*.whl')
+ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
+def get_project(name, tmp_path):
+ dest = tmp_path / name
+ if name == 'build':
+ # our own project is available in-source, just ignore development files
+
+ def _ignore_folder(base, filenames):
+ ignore = [n for n in filenames if n in excl or any(n.endswith(i) for i in ('_cache', '.egg-info', '.pyc'))]
+ if os.path.basename == ROOT and 'build' in filenames: # ignore build only at root (our module is build too)
+ ignore.append('build')
+ return ignore
+
+ excl = '.tox', 'dist', '.git', '__pycache__', '.integration-sources', '.github', 'tests', 'docs'
+ shutil.copytree(ROOT, str(dest), ignore=_ignore_folder)
+ return dest
+
+ # for other projects download from github and cache it
+ tar_store = os.path.join(ROOT, '.integration-sources')
+ try:
+ os.makedirs(tar_store)
+ except OSError: # python 2 has no exist_ok, and checking with exists is not parallel safe
+ pass # just ignore, if the creation failed we will have another failure soon that will notify the user
+
+ github_org_repo, version = INTEGRATION_SOURCES[name]
+ tar_filename = f'{name}-{version}.tar.gz'
+ tarball = os.path.join(tar_store, tar_filename)
+ with filelock.FileLock(os.path.join(tar_store, f'{tar_filename}.lock')):
+ if not os.path.exists(tarball):
+ url = f'https://github.com/{github_org_repo}/archive/{version}.tar.gz'
+ with urllib.request.urlopen(url) as request, open(tarball, 'wb') as file_handler:
+ shutil.copyfileobj(request, file_handler)
+ with tarfile.open(tarball, 'r:gz') as tar_handler:
+ tar_handler.extractall(str(dest))
+ return dest / f'{name}-{version}'
+
+
+@pytest.mark.parametrize(
+ 'call',
+ [
+ None, # via code
+ [sys.executable, '-m', 'build'], # module
+ ['pyproject-build'], # entrypoint
+ ],
+ ids=['code', 'module', 'entrypoint'],
+)
+@pytest.mark.parametrize(
+ 'args',
+ [[], ['-x', '--no-isolation']],
+ ids=['isolated', 'no_isolation'],
+)
+@pytest.mark.parametrize(
+ 'project',
+ [
+ 'build',
+ 'pip',
+ 'dateutil',
+ 'Solaar',
+ 'flit',
+ ],
+)
+@pytest.mark.isolated
+def test_build(monkeypatch, project, args, call, tmp_path):
+ if project == 'flit' and '--no-isolation' in args:
+ pytest.xfail("can't build flit without isolation due to missing dependencies")
+ if project == 'Solaar' and IS_WINDOWS and IS_PYPY3:
+ pytest.xfail('Solaar fails building wheels via sdists on Windows on PyPy 3')
+
+ monkeypatch.chdir(tmp_path)
+ monkeypatch.setenv('SETUPTOOLS_SCM_PRETEND_VERSION', '0+dummy') # for the projects that use setuptools_scm
+
+ if call and call[0] == 'pyproject-build':
+ exe_name = f"pyproject-build{'.exe' if sys.platform.startswith('win') else ''}"
+ exe = os.path.join(os.path.dirname(sys.executable), exe_name)
+ if os.path.exists(exe):
+ call[0] = exe
+ else:
+ pytest.skip('Running via PYTHONPATH, so the pyproject-build entrypoint is not available')
+ path = get_project(project, tmp_path)
+ pkgs = tmp_path / 'pkgs'
+ args = [str(path), '-o', str(pkgs)] + args
+
+ if call is None:
+ build.__main__.main(args)
+ else:
+ subprocess.check_call(call + args)
+
+ pkg_names = os.listdir(str(pkgs))
+ assert list(filter(_SDIST.match, pkg_names))
+ assert list(filter(_WHEEL.match, pkg_names))
+
+
+def test_isolation(tmp_dir, package_test_flit, mocker):
+ try:
+ import flit_core # noqa: F401
+ except ModuleNotFoundError:
+ pass
+ else:
+ pytest.xfail('flit_core is available -- we want it missing!') # pragma: no cover
+
+ mocker.patch('build.__main__._error')
+
+ build.__main__.main([package_test_flit, '-o', tmp_dir, '--no-isolation'])
+ build.__main__._error.assert_called_with("Backend 'flit_core.buildapi' is not available.")
diff --git a/tests/test_main.py b/tests/test_main.py
new file mode 100644
index 0000000..e1fbe0c
--- /dev/null
+++ b/tests/test_main.py
@@ -0,0 +1,437 @@
+# SPDX-License-Identifier: MIT
+
+import contextlib
+import importlib
+import io
+import os
+import re
+import subprocess
+import sys
+import venv
+
+import pytest
+
+import build
+import build.__main__
+
+
+build_open_owner = 'builtins'
+
+cwd = os.getcwd()
+out = os.path.join(cwd, 'dist')
+
+
+@pytest.mark.parametrize(
+ ('cli_args', 'build_args', 'hook'),
+ [
+ (
+ [],
+ [cwd, out, ['wheel'], {}, True, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-n'],
+ [cwd, out, ['wheel'], {}, False, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-s'],
+ [cwd, out, ['sdist'], {}, True, False],
+ 'build_package',
+ ),
+ (
+ ['-w'],
+ [cwd, out, ['wheel'], {}, True, False],
+ 'build_package',
+ ),
+ (
+ ['-s', '-w'],
+ [cwd, out, ['sdist', 'wheel'], {}, True, False],
+ 'build_package',
+ ),
+ (
+ ['source'],
+ ['source', os.path.join('source', 'dist'), ['wheel'], {}, True, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-o', 'out'],
+ [cwd, 'out', ['wheel'], {}, True, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['source', '-o', 'out'],
+ ['source', 'out', ['wheel'], {}, True, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-x'],
+ [cwd, out, ['wheel'], {}, True, True],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-C--flag1', '-C--flag2'],
+ [cwd, out, ['wheel'], {'--flag1': '', '--flag2': ''}, True, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-C--flag=value'],
+ [cwd, out, ['wheel'], {'--flag': 'value'}, True, False],
+ 'build_package_via_sdist',
+ ),
+ (
+ ['-C--flag1=value', '-C--flag2=other_value', '-C--flag2=extra_value'],
+ [cwd, out, ['wheel'], {'--flag1': 'value', '--flag2': ['other_value', 'extra_value']}, True, False],
+ 'build_package_via_sdist',
+ ),
+ ],
+)
+def test_parse_args(mocker, cli_args, build_args, hook):
+ mocker.patch('build.__main__.build_package', return_value=['something'])
+ mocker.patch('build.__main__.build_package_via_sdist', return_value=['something'])
+
+ build.__main__.main(cli_args)
+
+ if hook == 'build_package':
+ build.__main__.build_package.assert_called_with(*build_args)
+ elif hook == 'build_package_via_sdist':
+ build.__main__.build_package_via_sdist.assert_called_with(*build_args)
+ else:
+ raise ValueError(f'Unknown hook {hook}') # pragma: no cover
+
+
+def test_prog():
+ out = io.StringIO()
+
+ with pytest.raises(SystemExit):
+ with contextlib.redirect_stdout(out):
+ build.__main__.main(['--help'], prog='something')
+
+ assert out.getvalue().startswith('usage: something [-h]')
+
+
+def test_version(capsys):
+ with pytest.raises(SystemExit):
+ build.__main__.main(['--version'])
+ out, err = capsys.readouterr()
+ assert out.startswith(f'build {build.__version__}')
+
+
+@pytest.mark.isolated
+def test_build_isolated(mocker, package_test_flit):
+ build_cmd = mocker.patch('build.ProjectBuilder.build', return_value='something')
+ required_cmd = mocker.patch(
+ 'build.ProjectBuilder.get_requires_for_build',
+ side_effect=[
+ ['dep1', 'dep2'],
+ ],
+ )
+ mocker.patch('build.__main__._error')
+ install = mocker.patch('build.env._IsolatedEnvVenvPip.install')
+
+ build.__main__.build_package(package_test_flit, '.', ['sdist'])
+
+ install.assert_any_call({'flit_core >=2,<3'})
+
+ required_cmd.assert_called_with('sdist')
+ install.assert_any_call(['dep1', 'dep2'])
+
+ build_cmd.assert_called_with('sdist', '.', {})
+
+
+def test_build_no_isolation_check_deps_empty(mocker, package_test_flit):
+ # check_dependencies = []
+ build_cmd = mocker.patch('build.ProjectBuilder.build', return_value='something')
+ mocker.patch('build.ProjectBuilder.check_dependencies', return_value=[])
+
+ build.__main__.build_package(package_test_flit, '.', ['sdist'], isolation=False)
+
+ build_cmd.assert_called_with('sdist', '.', {})
+
+
+@pytest.mark.parametrize(
+ ['missing_deps', 'output'],
+ [
+ ([('foo',)], '\n\tfoo'),
+ ([('foo',), ('bar', 'baz', 'qux')], '\n\tfoo\n\tbar\n\tbaz -> qux'),
+ ],
+)
+def test_build_no_isolation_with_check_deps(mocker, package_test_flit, missing_deps, output):
+ error = mocker.patch('build.__main__._error')
+ build_cmd = mocker.patch('build.ProjectBuilder.build', return_value='something')
+ mocker.patch('build.ProjectBuilder.check_dependencies', return_value=missing_deps)
+
+ build.__main__.build_package(package_test_flit, '.', ['sdist'], isolation=False)
+
+ build_cmd.assert_called_with('sdist', '.', {})
+ error.assert_called_with('Missing dependencies:' + output)
+
+
+@pytest.mark.isolated
+def test_build_raises_build_exception(mocker, package_test_flit):
+ mocker.patch('build.ProjectBuilder.get_requires_for_build', side_effect=build.BuildException)
+ mocker.patch('build.env._IsolatedEnvVenvPip.install')
+
+ with pytest.raises(build.BuildException):
+ build.__main__.build_package(package_test_flit, '.', ['sdist'])
+
+
+@pytest.mark.isolated
+def test_build_raises_build_backend_exception(mocker, package_test_flit):
+ mocker.patch('build.ProjectBuilder.get_requires_for_build', side_effect=build.BuildBackendException(Exception('a')))
+ mocker.patch('build.env._IsolatedEnvVenvPip.install')
+
+ msg = f"Backend operation failed: Exception('a'{',' if sys.version_info < (3, 7) else ''})"
+ with pytest.raises(build.BuildBackendException, match=re.escape(msg)):
+ build.__main__.build_package(package_test_flit, '.', ['sdist'])
+
+
+@pytest.mark.pypy3323bug
+def test_build_package(tmp_dir, package_test_setuptools):
+ build.__main__.build_package(package_test_setuptools, tmp_dir, ['sdist', 'wheel'])
+
+ assert sorted(os.listdir(tmp_dir)) == [
+ 'test_setuptools-1.0.0-py2.py3-none-any.whl',
+ 'test_setuptools-1.0.0.tar.gz',
+ ]
+
+
+@pytest.mark.pypy3323bug
+def test_build_package_via_sdist(tmp_dir, package_test_setuptools):
+ build.__main__.build_package_via_sdist(package_test_setuptools, tmp_dir, ['wheel'])
+
+ assert sorted(os.listdir(tmp_dir)) == [
+ 'test_setuptools-1.0.0-py2.py3-none-any.whl',
+ 'test_setuptools-1.0.0.tar.gz',
+ ]
+
+
+@pytest.mark.pypy3323bug
+def test_build_package_via_sdist_cant_build(tmp_dir, package_test_cant_build_via_sdist):
+ with pytest.raises(build.BuildBackendException):
+ build.__main__.build_package_via_sdist(package_test_cant_build_via_sdist, tmp_dir, ['wheel'])
+
+
+def test_build_package_via_sdist_invalid_distribution(tmp_dir, package_test_setuptools):
+ with pytest.raises(ValueError, match='Only binary distributions are allowed but sdist was specified'):
+ build.__main__.build_package_via_sdist(package_test_setuptools, tmp_dir, ['sdist'])
+
+
+@pytest.mark.pypy3323bug
+@pytest.mark.parametrize(
+ ('args', 'output'),
+ [
+ (
+ [],
+ [
+ '* Creating venv isolated environment...',
+ '* Installing packages in isolated environment... (setuptools >= 42.0.0, wheel >= 0.36.0)',
+ '* Getting build dependencies for sdist...',
+ '* Building sdist...',
+ '* Building wheel from sdist',
+ '* Creating venv isolated environment...',
+ '* Installing packages in isolated environment... (setuptools >= 42.0.0, wheel >= 0.36.0)',
+ '* Getting build dependencies for wheel...',
+ '* Installing packages in isolated environment... (wheel)',
+ '* Building wheel...',
+ 'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
+ ],
+ ),
+ (
+ ['--no-isolation'],
+ [
+ '* Getting build dependencies for sdist...',
+ '* Building sdist...',
+ '* Building wheel from sdist',
+ '* Getting build dependencies for wheel...',
+ '* Building wheel...',
+ 'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
+ ],
+ ),
+ (
+ ['--wheel'],
+ [
+ '* Creating venv isolated environment...',
+ '* Installing packages in isolated environment... (setuptools >= 42.0.0, wheel >= 0.36.0)',
+ '* Getting build dependencies for wheel...',
+ '* Installing packages in isolated environment... (wheel)',
+ '* Building wheel...',
+ 'Successfully built test_setuptools-1.0.0-py2.py3-none-any.whl',
+ ],
+ ),
+ (
+ ['--wheel', '--no-isolation'],
+ [
+ '* Getting build dependencies for wheel...',
+ '* Building wheel...',
+ 'Successfully built test_setuptools-1.0.0-py2.py3-none-any.whl',
+ ],
+ ),
+ (
+ ['--sdist', '--no-isolation'],
+ [
+ '* Getting build dependencies for sdist...',
+ '* Building sdist...',
+ 'Successfully built test_setuptools-1.0.0.tar.gz',
+ ],
+ ),
+ (
+ ['--sdist', '--wheel', '--no-isolation'],
+ [
+ '* Getting build dependencies for sdist...',
+ '* Building sdist...',
+ '* Getting build dependencies for wheel...',
+ '* Building wheel...',
+ 'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
+ ],
+ ),
+ ],
+ ids=[
+ 'via-sdist-isolation',
+ 'via-sdist-no-isolation',
+ 'wheel-direct-isolation',
+ 'wheel-direct-no-isolation',
+ 'sdist-direct-no-isolation',
+ 'sdist-and-wheel-direct-no-isolation',
+ ],
+)
+@pytest.mark.flaky(reruns=5)
+def test_output(package_test_setuptools, tmp_dir, capsys, args, output):
+ build.__main__.main([package_test_setuptools, '-o', tmp_dir] + args)
+ stdout, stderr = capsys.readouterr()
+ assert stdout.splitlines() == output
+
+
+@pytest.fixture()
+def main_reload_styles():
+ try:
+ yield
+ finally:
+ importlib.reload(build.__main__)
+
+
+@pytest.mark.pypy3323bug
+@pytest.mark.parametrize(
+ ('color', 'stdout_error', 'stdout_body'),
+ [
+ (
+ False,
+ 'ERROR ',
+ [
+ '* Creating venv isolated environment...',
+ '* Installing packages in isolated environment... (setuptools >= 42.0.0, this is invalid, wheel >= 0.36.0)',
+ '',
+ 'Traceback (most recent call last):',
+ ],
+ ),
+ (
+ True,
+ '\33[91mERROR\33[0m ',
+ [
+ '\33[1m* Creating venv isolated environment...\33[0m',
+ '\33[1m* Installing packages in isolated environment... '
+ '(setuptools >= 42.0.0, this is invalid, wheel >= 0.36.0)\33[0m',
+ '',
+ '\33[2mTraceback (most recent call last):',
+ ],
+ ),
+ ],
+ ids=['no-color', 'color'],
+)
+def test_output_env_subprocess_error(
+ mocker,
+ monkeypatch,
+ main_reload_styles,
+ package_test_invalid_requirements,
+ tmp_dir,
+ capsys,
+ color,
+ stdout_body,
+ stdout_error,
+):
+ try:
+ # do not inject hook to have clear output on capsys
+ mocker.patch('colorama.init')
+ except ModuleNotFoundError: # colorama might not be available
+ pass
+
+ monkeypatch.delenv('NO_COLOR', raising=False)
+ monkeypatch.setenv('FORCE_COLOR' if color else 'NO_COLOR', '')
+
+ importlib.reload(build.__main__) # reload module to set _STYLES
+
+ with pytest.raises(SystemExit):
+ build.__main__.main([package_test_invalid_requirements, '-o', tmp_dir])
+ stdout, stderr = capsys.readouterr()
+ stdout, stderr = stdout.splitlines(), stderr.splitlines()
+
+ assert stdout[:4] == stdout_body
+ assert stdout[-1].startswith(stdout_error)
+
+ assert len(stderr) == 1
+ assert stderr[0].startswith('ERROR: Invalid requirement: ')
+
+
+@pytest.mark.parametrize(
+ ('tty', 'env', 'colors'),
+ [
+ (False, {}, build.__main__._NO_COLORS),
+ (True, {}, build.__main__._COLORS),
+ (False, {'NO_COLOR': ''}, build.__main__._NO_COLORS),
+ (True, {'NO_COLOR': ''}, build.__main__._NO_COLORS),
+ (False, {'FORCE_COLOR': ''}, build.__main__._COLORS),
+ (True, {'FORCE_COLOR': ''}, build.__main__._COLORS),
+ ],
+)
+def test_colors(mocker, monkeypatch, main_reload_styles, tty, env, colors):
+ mocker.patch('sys.stdout.isatty', return_value=tty)
+ for key, value in env.items():
+ monkeypatch.setenv(key, value)
+
+ importlib.reload(build.__main__) # reload module to set _STYLES
+
+ assert build.__main__._STYLES == colors
+
+
+def test_colors_conflict(monkeypatch, main_reload_styles):
+ with monkeypatch.context() as m:
+ m.setenv('NO_COLOR', '')
+ m.setenv('FORCE_COLOR', '')
+
+ with pytest.warns(
+ UserWarning,
+ match='Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color',
+ ):
+ importlib.reload(build.__main__)
+
+ assert build.__main__._STYLES == build.__main__._NO_COLORS
+
+
+def raise_called_process_err(*args, **kwargs):
+ raise subprocess.CalledProcessError(1, ['test', 'args'], b'stdoutput', b'stderror')
+
+
+def test_venv_fail(monkeypatch, package_test_flit, tmp_dir, capsys):
+ monkeypatch.setattr(venv.EnvBuilder, 'create', raise_called_process_err)
+ monkeypatch.setenv('NO_COLOR', '')
+
+ importlib.reload(build.__main__) # reload module to set _STYLES
+
+ with pytest.raises(SystemExit):
+ build.__main__.main([package_test_flit, '-o', tmp_dir])
+
+ stdout, stderr = capsys.readouterr()
+
+ assert (
+ stdout
+ == '''\
+* Creating venv isolated environment...
+ERROR Failed to create venv. Maybe try installing virtualenv.
+ Command 'test args' failed with return code 1
+ stdout:
+ stdoutput
+ stderr:
+ stderror
+'''
+ )
+ assert stderr == ''
diff --git a/tests/test_module.py b/tests/test_module.py
new file mode 100644
index 0000000..1d3d1cc
--- /dev/null
+++ b/tests/test_module.py
@@ -0,0 +1,16 @@
+# SPDX-License-Identifier: MIT
+
+import sys
+
+import pytest
+
+import build
+
+
+def test_version():
+ assert build.__version__
+
+
+@pytest.mark.skipif(sys.version_info < (3, 7), reason='Python 3.7+ required for dir support')
+def test_dir():
+ assert set(dir(build)) == set(build.__all__)
diff --git a/tests/test_projectbuilder.py b/tests/test_projectbuilder.py
new file mode 100644
index 0000000..57ff9f9
--- /dev/null
+++ b/tests/test_projectbuilder.py
@@ -0,0 +1,672 @@
+# SPDX-License-Identifier: MIT
+
+
+import copy
+import importlib
+import logging
+import os
+import sys
+import textwrap
+
+import pep517.wrappers
+import pytest
+
+import build
+
+
+if sys.version_info >= (3, 8): # pragma: no cover
+ from importlib import metadata as importlib_metadata
+else: # pragma: no cover
+ import importlib_metadata
+
+import pathlib
+
+
+build_open_owner = 'builtins'
+
+
+DEFAULT_BACKEND = {
+ 'build-backend': 'setuptools.build_meta:__legacy__',
+ 'requires': ['setuptools >= 40.8.0', 'wheel'],
+}
+
+
+class MockDistribution(importlib_metadata.Distribution):
+ def locate_file(self, path): # pragma: no cover
+ return ''
+
+ @classmethod
+ def from_name(cls, name):
+ if name == 'extras_dep':
+ return ExtraMockDistribution()
+ elif name == 'requireless_dep':
+ return RequirelessMockDistribution()
+ elif name == 'recursive_dep':
+ return RecursiveMockDistribution()
+ elif name == 'prerelease_dep':
+ return PrereleaseMockDistribution()
+ elif name == 'circular_dep':
+ return CircularMockDistribution()
+ elif name == 'nested_circular_dep':
+ return NestedCircularMockDistribution()
+ raise importlib_metadata.PackageNotFoundError
+
+
+class ExtraMockDistribution(MockDistribution):
+ def read_text(self, filename):
+ if filename == 'METADATA':
+ return textwrap.dedent(
+ """
+ Metadata-Version: 2.2
+ Name: extras_dep
+ Version: 1.0.0
+ Provides-Extra: extra_without_associated_deps
+ Provides-Extra: extra_with_unmet_deps
+ Requires-Dist: unmet_dep; extra == 'extra_with_unmet_deps'
+ Provides-Extra: extra_with_met_deps
+ Requires-Dist: extras_dep; extra == 'extra_with_met_deps'
+ Provides-Extra: recursive_extra_with_unmet_deps
+ Requires-Dist: recursive_dep; extra == 'recursive_extra_with_unmet_deps'
+ """
+ ).strip()
+
+
+class RequirelessMockDistribution(MockDistribution):
+ def read_text(self, filename):
+ if filename == 'METADATA':
+ return textwrap.dedent(
+ """
+ Metadata-Version: 2.2
+ Name: requireless_dep
+ Version: 1.0.0
+ """
+ ).strip()
+
+
+class RecursiveMockDistribution(MockDistribution):
+ def read_text(self, filename):
+ if filename == 'METADATA':
+ return textwrap.dedent(
+ """
+ Metadata-Version: 2.2
+ Name: recursive_dep
+ Version: 1.0.0
+ Requires-Dist: recursive_unmet_dep
+ """
+ ).strip()
+
+
+class PrereleaseMockDistribution(MockDistribution):
+ def read_text(self, filename):
+ if filename == 'METADATA':
+ return textwrap.dedent(
+ """
+ Metadata-Version: 2.2
+ Name: prerelease_dep
+ Version: 1.0.1a0
+ """
+ ).strip()
+
+
+class CircularMockDistribution(MockDistribution):
+ def read_text(self, filename):
+ if filename == 'METADATA':
+ return textwrap.dedent(
+ """
+ Metadata-Version: 2.2
+ Name: circular_dep
+ Version: 1.0.0
+ Requires-Dist: nested_circular_dep
+ """
+ ).strip()
+
+
+class NestedCircularMockDistribution(MockDistribution):
+ def read_text(self, filename):
+ if filename == 'METADATA':
+ return textwrap.dedent(
+ """
+ Metadata-Version: 2.2
+ Name: nested_circular_dep
+ Version: 1.0.0
+ Requires-Dist: circular_dep
+ """
+ ).strip()
+
+
+@pytest.mark.parametrize(
+ ('requirement_string', 'expected'),
+ [
+ ('extras_dep', None),
+ ('missing_dep', ('missing_dep',)),
+ ('requireless_dep', None),
+ ('extras_dep[undefined_extra]', None),
+ # would the wheel builder filter this out?
+ ('extras_dep[extra_without_associated_deps]', None),
+ (
+ 'extras_dep[extra_with_unmet_deps]',
+ ('extras_dep[extra_with_unmet_deps]', 'unmet_dep; extra == "extra_with_unmet_deps"'),
+ ),
+ (
+ 'extras_dep[recursive_extra_with_unmet_deps]',
+ (
+ 'extras_dep[recursive_extra_with_unmet_deps]',
+ 'recursive_dep; extra == "recursive_extra_with_unmet_deps"',
+ 'recursive_unmet_dep',
+ ),
+ ),
+ ('extras_dep[extra_with_met_deps]', None),
+ ('missing_dep; python_version>"10"', None),
+ ('missing_dep; python_version<="1"', None),
+ ('missing_dep; python_version>="1"', ('missing_dep; python_version >= "1"',)),
+ ('extras_dep == 1.0.0', None),
+ ('extras_dep == 2.0.0', ('extras_dep==2.0.0',)),
+ ('extras_dep[extra_without_associated_deps] == 1.0.0', None),
+ ('extras_dep[extra_without_associated_deps] == 2.0.0', ('extras_dep[extra_without_associated_deps]==2.0.0',)),
+ ('prerelease_dep >= 1.0.0', None),
+ ('circular_dep', None),
+ ],
+)
+def test_check_dependency(monkeypatch, requirement_string, expected):
+ monkeypatch.setattr(importlib_metadata, 'Distribution', MockDistribution)
+ assert next(build.check_dependency(requirement_string), None) == expected
+
+
+def test_bad_project(package_test_no_project):
+ # Passing a nonexistent project directory
+ with pytest.raises(build.BuildException):
+ build.ProjectBuilder(os.path.join(package_test_no_project, 'does-not-exist'))
+ # Passing a file as a project directory
+ with pytest.raises(build.BuildException):
+ build.ProjectBuilder(os.path.join(package_test_no_project, 'empty.txt'))
+ # Passing a project directory with no pyproject.toml or setup.py
+ with pytest.raises(build.BuildException):
+ build.ProjectBuilder(package_test_no_project)
+
+
+def test_init(mocker, package_test_flit, package_legacy, test_no_permission, package_test_bad_syntax):
+ mocker.patch('pep517.wrappers.Pep517HookCaller')
+
+ # correct flit pyproject.toml
+ builder = build.ProjectBuilder(package_test_flit)
+ pep517.wrappers.Pep517HookCaller.assert_called_with(
+ package_test_flit, 'flit_core.buildapi', backend_path=None, python_executable=sys.executable, runner=builder._runner
+ )
+ pep517.wrappers.Pep517HookCaller.reset_mock()
+
+ # custom python
+ builder = build.ProjectBuilder(package_test_flit, python_executable='some-python')
+ pep517.wrappers.Pep517HookCaller.assert_called_with(
+ package_test_flit, 'flit_core.buildapi', backend_path=None, python_executable='some-python', runner=builder._runner
+ )
+ pep517.wrappers.Pep517HookCaller.reset_mock()
+
+ # FileNotFoundError
+ builder = build.ProjectBuilder(package_legacy)
+ pep517.wrappers.Pep517HookCaller.assert_called_with(
+ package_legacy,
+ 'setuptools.build_meta:__legacy__',
+ backend_path=None,
+ python_executable=sys.executable,
+ runner=builder._runner,
+ )
+
+ # PermissionError
+ if not sys.platform.startswith('win'): # can't correctly set the permissions required for this
+ with pytest.raises(build.BuildException):
+ build.ProjectBuilder(test_no_permission)
+
+ # TomlDecodeError
+ with pytest.raises(build.BuildException):
+ build.ProjectBuilder(package_test_bad_syntax)
+
+
+@pytest.mark.parametrize('value', [b'something', 'something_else'])
+def test_python_executable(package_test_flit, value):
+ builder = build.ProjectBuilder(package_test_flit)
+
+ builder.python_executable = value
+ assert builder.python_executable == value
+ assert builder._hook.python_executable == value
+
+
+@pytest.mark.parametrize('distribution', ['wheel', 'sdist'])
+def test_get_requires_for_build_missing_backend(packages_path, distribution):
+ bad_backend_path = os.path.join(packages_path, 'test-bad-backend')
+ builder = build.ProjectBuilder(bad_backend_path)
+
+ with pytest.raises(build.BuildBackendException):
+ builder.get_requires_for_build(distribution)
+
+
+@pytest.mark.parametrize('distribution', ['wheel', 'sdist'])
+def test_get_requires_for_build_missing_optional_hooks(package_test_optional_hooks, distribution):
+ builder = build.ProjectBuilder(package_test_optional_hooks)
+
+ assert builder.get_requires_for_build(distribution) == set()
+
+
+@pytest.mark.parametrize('distribution', ['wheel', 'sdist'])
+def test_build_missing_backend(packages_path, distribution, tmpdir):
+ bad_backend_path = os.path.join(packages_path, 'test-bad-backend')
+ builder = build.ProjectBuilder(bad_backend_path)
+
+ with pytest.raises(build.BuildBackendException):
+ builder.build(distribution, str(tmpdir))
+
+
+def test_check_dependencies(mocker, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller.get_requires_for_build_sdist')
+ mocker.patch('pep517.wrappers.Pep517HookCaller.get_requires_for_build_wheel')
+
+ builder = build.ProjectBuilder(package_test_flit)
+
+ side_effects = [
+ [],
+ ['something'],
+ pep517.wrappers.BackendUnavailable,
+ ]
+
+ builder._hook.get_requires_for_build_sdist.side_effect = copy.copy(side_effects)
+ builder._hook.get_requires_for_build_wheel.side_effect = copy.copy(side_effects)
+
+ # requires = []
+ assert builder.check_dependencies('sdist') == {('flit_core<3,>=2',)}
+ assert builder.check_dependencies('wheel') == {('flit_core<3,>=2',)}
+
+ # requires = ['something']
+ assert builder.check_dependencies('sdist') == {('flit_core<3,>=2',), ('something',)}
+ assert builder.check_dependencies('wheel') == {('flit_core<3,>=2',), ('something',)}
+
+ # BackendUnavailable
+ with pytest.raises(build.BuildBackendException):
+ builder.check_dependencies('sdist')
+ with pytest.raises(build.BuildBackendException):
+ not builder.check_dependencies('wheel')
+
+
+def test_working_directory(tmp_dir):
+ assert os.path.realpath(os.curdir) != os.path.realpath(tmp_dir)
+ with build._working_directory(tmp_dir):
+ assert os.path.realpath(os.curdir) == os.path.realpath(tmp_dir)
+
+
+def test_working_directory_exc_is_not_transformed(mocker, package_test_flit, tmp_dir):
+ mocker.patch('build._working_directory', side_effect=OSError)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ with pytest.raises(OSError):
+ builder._call_backend('build_sdist', tmp_dir)
+
+
+def test_build(mocker, package_test_flit, tmp_dir):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+ mocker.patch('build._working_directory', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+
+ builder._hook.build_sdist.side_effect = ['dist.tar.gz', Exception]
+ builder._hook.build_wheel.side_effect = ['dist.whl', Exception]
+
+ assert builder.build('sdist', tmp_dir) == os.path.join(tmp_dir, 'dist.tar.gz')
+ builder._hook.build_sdist.assert_called_with(tmp_dir, None)
+ build._working_directory.assert_called_with(package_test_flit)
+
+ assert builder.build('wheel', tmp_dir) == os.path.join(tmp_dir, 'dist.whl')
+ builder._hook.build_wheel.assert_called_with(tmp_dir, None)
+ build._working_directory.assert_called_with(package_test_flit)
+
+ with pytest.raises(build.BuildBackendException):
+ build._working_directory.assert_called_with(package_test_flit)
+ builder.build('sdist', tmp_dir)
+
+ with pytest.raises(build.BuildBackendException):
+ build._working_directory.assert_called_with(package_test_flit)
+ builder.build('wheel', tmp_dir)
+
+
+def test_default_backend(mocker, package_legacy):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_legacy)
+
+ assert builder._build_system == DEFAULT_BACKEND
+
+
+def test_missing_backend(mocker, package_test_no_backend):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_no_backend)
+
+ assert builder._build_system == {'requires': [], 'build-backend': DEFAULT_BACKEND['build-backend']}
+
+
+def test_missing_requires(mocker, package_test_no_requires):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ with pytest.raises(build.BuildException):
+ build.ProjectBuilder(package_test_no_requires)
+
+
+def test_build_system_typo(mocker, package_test_typo):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ with pytest.warns(build.TypoWarning):
+ build.ProjectBuilder(package_test_typo)
+
+
+def test_missing_outdir(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder._hook.build_sdist.return_value = 'dist.tar.gz'
+ out = os.path.join(tmp_dir, 'out')
+
+ builder.build('sdist', out)
+
+ assert os.path.isdir(out)
+
+
+def test_relative_outdir(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder._hook.build_sdist.return_value = 'dist.tar.gz'
+
+ builder.build('sdist', '.')
+
+ builder._hook.build_sdist.assert_called_with(os.path.abspath('.'), None)
+
+
+def test_build_not_dir_outdir(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder._hook.build_sdist.return_value = 'dist.tar.gz'
+ out = os.path.join(tmp_dir, 'out')
+
+ open(out, 'a').close() # create empty file
+
+ with pytest.raises(build.BuildException):
+ builder.build('sdist', out)
+
+
+@pytest.fixture(scope='session')
+def demo_pkg_inline(tmp_path_factory):
+ # builds a wheel without any dependencies and with a console script demo-pkg-inline
+ tmp_path = tmp_path_factory.mktemp('demo-pkg-inline')
+ builder = build.ProjectBuilder(srcdir=os.path.join(os.path.dirname(__file__), 'packages', 'inline'))
+ out = tmp_path / 'dist'
+ builder.build('wheel', str(out))
+ return next(out.iterdir())
+
+
+@pytest.mark.isolated
+def test_build_with_dep_on_console_script(tmp_path, demo_pkg_inline, capfd, mocker):
+ """
+ All command-line scripts provided by the build-required packages must be present in the build environment's PATH.
+ """
+ # we first install demo pkg inline as build dependency (as this provides a console script we can check)
+ # to validate backend invocations contain the correct path we use an inline backend that will fail, but first
+ # provides the PATH information (and validates shutil.which is able to discover the executable - as PEP states)
+ toml = textwrap.dedent(
+ '''
+ [build-system]
+ requires = ["demo_pkg_inline"]
+ build-backend = "build"
+ backend-path = ["."]
+
+ [project]
+ description = "Factory ⸻ A code generator 🏭"
+ authors = [{name = "Łukasz Langa"}]
+ '''
+ )
+ code = textwrap.dedent(
+ '''
+ import os
+ import shutil
+ import sys
+ print("BB " + os.environ["PATH"])
+ exe_at = shutil.which("demo-pkg-inline")
+ print("BB " + exe_at)
+ '''
+ )
+ (tmp_path / 'pyproject.toml').write_text(toml, encoding='UTF-8')
+ (tmp_path / 'build.py').write_text(code)
+
+ deps = {str(demo_pkg_inline)} # we patch the requires demo_pkg_inline to refer to the wheel -> we don't need index
+ mocker.patch('build.ProjectBuilder.build_system_requires', new_callable=mocker.PropertyMock, return_value=deps)
+ from build.__main__ import main
+
+ with pytest.raises(SystemExit):
+ main(['--wheel', '--outdir', str(tmp_path / 'dist'), str(tmp_path)])
+
+ out, err = capfd.readouterr()
+ lines = [line[3:] for line in out.splitlines() if line.startswith('BB ')] # filter for our markers
+ path_vars = lines[0].split(os.pathsep)
+ which_detected = lines[1]
+ assert which_detected.startswith(path_vars[0]), out
+
+
+def test_prepare(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+ mocker.patch('build._working_directory', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder._hook.prepare_metadata_for_build_wheel.return_value = 'dist-1.0.dist-info'
+
+ assert builder.prepare('wheel', tmp_dir) == os.path.join(tmp_dir, 'dist-1.0.dist-info')
+ builder._hook.prepare_metadata_for_build_wheel.assert_called_with(tmp_dir, None, _allow_fallback=False)
+ build._working_directory.assert_called_with(package_test_flit)
+
+
+def test_prepare_no_hook(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ failure = pep517.wrappers.HookMissing('prepare_metadata_for_build_wheel')
+ builder._hook.prepare_metadata_for_build_wheel.side_effect = failure
+
+ assert builder.prepare('wheel', tmp_dir) is None
+
+
+def test_prepare_error(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder._hook.prepare_metadata_for_build_wheel.side_effect = Exception
+
+ with pytest.raises(build.BuildBackendException, match='Backend operation failed: Exception'):
+ builder.prepare('wheel', tmp_dir)
+
+
+def test_prepare_not_dir_outdir(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+
+ out = os.path.join(tmp_dir, 'out')
+ with open(out, 'w') as f:
+ f.write('Not a directory')
+ with pytest.raises(build.BuildException, match='Build path .* exists and is not a directory'):
+ builder.prepare('wheel', out)
+
+
+def test_no_outdir_single(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller.prepare_metadata_for_build_wheel', return_value='')
+
+ builder = build.ProjectBuilder(package_test_flit)
+
+ out = os.path.join(tmp_dir, 'out')
+ builder.prepare('wheel', out)
+
+ assert os.path.isdir(out)
+
+
+def test_no_outdir_multiple(mocker, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller.prepare_metadata_for_build_wheel', return_value='')
+
+ builder = build.ProjectBuilder(package_test_flit)
+
+ out = os.path.join(tmp_dir, 'does', 'not', 'exist')
+ builder.prepare('wheel', out)
+
+ assert os.path.isdir(out)
+
+
+def test_runner_user_specified(tmp_dir, package_test_flit):
+ def dummy_runner(cmd, cwd=None, env=None):
+ raise RuntimeError('Runner was called')
+
+ builder = build.ProjectBuilder(package_test_flit, runner=dummy_runner)
+ with pytest.raises(build.BuildBackendException, match='Runner was called'):
+ builder.build('wheel', tmp_dir)
+
+
+def test_metadata_path_no_prepare(tmp_dir, package_test_no_prepare):
+ builder = build.ProjectBuilder(package_test_no_prepare)
+
+ metadata = importlib_metadata.PathDistribution(
+ pathlib.Path(builder.metadata_path(tmp_dir)),
+ ).metadata
+
+ assert metadata['name'] == 'test-no-prepare'
+ assert metadata['Version'] == '1.0.0'
+
+
+def test_metadata_path_with_prepare(tmp_dir, package_test_setuptools):
+ builder = build.ProjectBuilder(package_test_setuptools)
+
+ metadata = importlib_metadata.PathDistribution(
+ pathlib.Path(builder.metadata_path(tmp_dir)),
+ ).metadata
+
+ assert metadata['name'] == 'test-setuptools'
+ assert metadata['Version'] == '1.0.0'
+
+
+def test_metadata_path_legacy(tmp_dir, package_legacy):
+ builder = build.ProjectBuilder(package_legacy)
+
+ metadata = importlib_metadata.PathDistribution(
+ pathlib.Path(builder.metadata_path(tmp_dir)),
+ ).metadata
+
+ assert metadata['name'] == 'legacy'
+ assert metadata['Version'] == '1.0.0'
+
+
+def test_metadata_invalid_wheel(tmp_dir, package_test_bad_wheel):
+ builder = build.ProjectBuilder(package_test_bad_wheel)
+
+ with pytest.raises(ValueError, match='Invalid wheel'):
+ builder.metadata_path(tmp_dir)
+
+
+@pytest.fixture
+def mock_tomli_not_available(mocker):
+ loads = mocker.patch('tomli.loads')
+ mocker.patch.dict(sys.modules, {'tomli': None})
+ importlib.reload(build)
+ try:
+ yield
+ finally:
+ loads.assert_not_called()
+ mocker.stopall()
+ importlib.reload(build)
+
+
+@pytest.mark.skipif(sys.version_info >= (3, 11), reason='No need to test old toml support on 3.11+')
+def test_toml_instead_of_tomli(mocker, mock_tomli_not_available, tmp_dir, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder._hook.build_sdist.return_value = 'dist.tar.gz'
+
+ builder.build('sdist', '.')
+
+ builder._hook.build_sdist.assert_called_with(os.path.abspath('.'), None)
+
+
+def test_log(mocker, caplog, package_test_flit):
+ mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
+ mocker.patch('build.ProjectBuilder._call_backend', return_value='some_path')
+ caplog.set_level(logging.DEBUG)
+
+ builder = build.ProjectBuilder(package_test_flit)
+ builder.get_requires_for_build('sdist')
+ builder.get_requires_for_build('wheel')
+ builder.prepare('wheel', '.')
+ builder.build('sdist', '.')
+ builder.build('wheel', '.')
+ builder.log('something')
+
+ assert [(record.levelname, record.message) for record in caplog.records] == [
+ ('INFO', 'Getting build dependencies for sdist...'),
+ ('INFO', 'Getting build dependencies for wheel...'),
+ ('INFO', 'Getting metadata for wheel...'),
+ ('INFO', 'Building sdist...'),
+ ('INFO', 'Building wheel...'),
+ ('INFO', 'something'),
+ ]
+ if sys.version_info >= (3, 8): # stacklevel
+ assert caplog.records[-1].lineno == 602
+
+
+@pytest.mark.parametrize(
+ ('pyproject_toml', 'parse_output'),
+ [
+ (
+ {'build-system': {'requires': ['foo']}},
+ {'requires': ['foo'], 'build-backend': 'setuptools.build_meta:__legacy__'},
+ ),
+ (
+ {'build-system': {'requires': ['foo'], 'build-backend': 'bar'}},
+ {'requires': ['foo'], 'build-backend': 'bar'},
+ ),
+ (
+ {'build-system': {'requires': ['foo'], 'build-backend': 'bar', 'backend-path': ['baz']}},
+ {'requires': ['foo'], 'build-backend': 'bar', 'backend-path': ['baz']},
+ ),
+ ],
+)
+def test_parse_valid_build_system_table_type(pyproject_toml, parse_output):
+ assert build._parse_build_system_table(pyproject_toml) == parse_output
+
+
+@pytest.mark.parametrize(
+ ('pyproject_toml', 'error_message'),
+ [
+ (
+ {'build-system': {}},
+ '`requires` is a required property',
+ ),
+ (
+ {'build-system': {'requires': 'not an array'}},
+ '`requires` must be an array of strings',
+ ),
+ (
+ {'build-system': {'requires': [1]}},
+ '`requires` must be an array of strings',
+ ),
+ (
+ {'build-system': {'requires': ['foo'], 'build-backend': ['not a string']}},
+ '`build-backend` must be a string',
+ ),
+ (
+ {'build-system': {'requires': ['foo'], 'backend-path': 'not an array'}},
+ '`backend-path` must be an array of strings',
+ ),
+ (
+ {'build-system': {'requires': ['foo'], 'backend-path': [1]}},
+ '`backend-path` must be an array of strings',
+ ),
+ (
+ {'build-system': {'requires': ['foo'], 'unknown-prop': False}},
+ 'Unknown properties: unknown-prop',
+ ),
+ ],
+)
+def test_parse_invalid_build_system_table_type(pyproject_toml, error_message):
+ with pytest.raises(build.BuildSystemTableValidationError, match=error_message):
+ build._parse_build_system_table(pyproject_toml)
diff --git a/tests/test_self_packaging.py b/tests/test_self_packaging.py
new file mode 100644
index 0000000..abe09f8
--- /dev/null
+++ b/tests/test_self_packaging.py
@@ -0,0 +1,103 @@
+# These tests check the sdist, path, and wheel of build to ensure that all are valid.
+
+import subprocess
+import sys
+import tarfile
+import zipfile
+
+from pathlib import Path
+
+import pytest
+
+
+DIR = Path(__file__).parent.resolve()
+MAIN_DIR = DIR.parent
+
+sdist_files = {
+ 'LICENSE',
+ 'PKG-INFO',
+ 'README.md',
+ 'pyproject.toml',
+ 'setup.cfg',
+ 'setup.py',
+ 'src',
+ 'src/build',
+ 'src/build.egg-info',
+ 'src/build.egg-info/PKG-INFO',
+ 'src/build.egg-info/SOURCES.txt',
+ 'src/build.egg-info/dependency_links.txt',
+ 'src/build.egg-info/entry_points.txt',
+ 'src/build.egg-info/requires.txt',
+ 'src/build.egg-info/top_level.txt',
+ 'src/build/__init__.py',
+ 'src/build/__main__.py',
+ 'src/build/env.py',
+ 'src/build/py.typed',
+ 'src/build/util.py',
+}
+
+wheel_files = {
+ 'build/__init__.py',
+ 'build/__main__.py',
+ 'build/env.py',
+ 'build/py.typed',
+ 'build/util.py',
+ 'dist-info/LICENSE',
+ 'dist-info/METADATA',
+ 'dist-info/RECORD',
+ 'dist-info/WHEEL',
+ 'dist-info/entry_points.txt',
+ 'dist-info/top_level.txt',
+}
+
+
+def test_build_sdist(monkeypatch, tmpdir):
+
+ monkeypatch.chdir(MAIN_DIR)
+
+ subprocess.run(
+ [
+ sys.executable,
+ '-m',
+ 'build',
+ '--sdist',
+ '--outdir',
+ str(tmpdir),
+ ],
+ check=True,
+ ).stdout
+
+ (sdist,) = tmpdir.visit('*.tar.gz')
+
+ with tarfile.open(str(sdist), 'r:gz') as tar:
+ simpler = {n.split('/', 1)[-1] for n in tar.getnames()[1:]}
+
+ assert simpler == sdist_files
+
+
+@pytest.mark.parametrize('args', ((), ('--wheel',)), ids=('from_sdist', 'direct'))
+def test_build_wheel(monkeypatch, tmpdir, args):
+
+ monkeypatch.chdir(MAIN_DIR)
+
+ subprocess.run(
+ [
+ sys.executable,
+ '-m',
+ 'build',
+ *args,
+ '--outdir',
+ str(tmpdir),
+ ],
+ check=True,
+ )
+
+ (wheel,) = tmpdir.visit('*.whl')
+
+ with zipfile.ZipFile(str(wheel)) as z:
+ names = z.namelist()
+
+ trimmed = {n for n in names if 'dist-info' not in n}
+ trimmed |= {f"dist-info/{n.split('/', 1)[-1]}" for n in names if 'dist-info' in n}
+
+ assert trimmed == wheel_files
diff --git a/tests/test_util.py b/tests/test_util.py
new file mode 100644
index 0000000..9f090b5
--- /dev/null
+++ b/tests/test_util.py
@@ -0,0 +1,44 @@
+# SPDX-License-Identifier: MIT
+
+import pytest
+
+import build.util
+
+
+@pytest.mark.pypy3323bug
+@pytest.mark.parametrize('isolated', [False, True])
+def test_wheel_metadata(package_test_setuptools, isolated):
+ metadata = build.util.project_wheel_metadata(package_test_setuptools, isolated)
+
+ assert metadata['name'] == 'test-setuptools'
+ assert metadata['version'] == '1.0.0'
+
+
+@pytest.mark.pypy3323bug
+def test_wheel_metadata_isolation(package_test_flit):
+ try:
+ import flit_core # noqa: F401
+ except ModuleNotFoundError:
+ pass
+ else:
+ pytest.xfail('flit_core is available -- we want it missing!') # pragma: no cover
+
+ metadata = build.util.project_wheel_metadata(package_test_flit)
+
+ assert metadata['name'] == 'test_flit'
+ assert metadata['version'] == '1.0.0'
+
+ with pytest.raises(
+ build.BuildBackendException,
+ match="Backend 'flit_core.buildapi' is not available.",
+ ):
+ build.util.project_wheel_metadata(package_test_flit, isolated=False)
+
+
+@pytest.mark.pypy3323bug
+def test_with_get_requires(package_test_metadata):
+ metadata = build.util.project_wheel_metadata(package_test_metadata)
+
+ assert metadata['name'] == 'test-metadata'
+ assert str(metadata['version']) == '1.0.0'
+ assert metadata['summary'] == 'hello!'
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..19cce03
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,109 @@
+[tox]
+envlist =
+ fix
+ type
+ docs
+ path
+ {py311, py310, py39, py38, py37, py36, pypy37, pypy38, pypy39}{, -min}
+isolated_build = true
+skip_missing_interpreters = true
+minversion = 3.14
+requires =
+ virtualenv>=20.0.34
+
+[testenv]
+description =
+ run test suite with {basepython}
+passenv =
+ LC_ALL
+ PIP_*
+ PYTEST_*
+ TERM
+setenv =
+ COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
+ TEST_STATUS_DIR = {envtmpdir}
+ PYPY3323BUG = 1
+extras =
+ test
+commands =
+ pytest -ra --cov --cov-config pyproject.toml \
+ --cov-report=html:{envdir}/htmlcov --cov-context=test \
+ --cov-report=xml:{toxworkdir}/coverage.{envname}.xml {posargs:-n auto}
+
+[testenv:fix]
+description = run static analysis and style checks
+passenv =
+ HOMEPATH
+ PROGRAMDATA
+basepython = python3.9
+skip_install = true
+deps =
+ pre-commit>=2
+commands =
+ pre-commit run --all-files --show-diff-on-failure
+ python -c 'print("hint: run {envdir}/bin/pre-commit install to add checks as pre-commit hook")'
+
+[testenv:path]
+description = verify build can run from source (bootstrap)
+setenv =
+ PYTHONPATH = {toxinidir}/src
+ COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
+commands_pre =
+ python -E -m pip uninstall -y build colorama
+
+[testenv:type]
+description = run type check on code base
+extras = typing
+commands =
+ mypy
+
+[testenv:{py311, py310, py39, py38, py37, py36, pypy37, pypy38, pypy39}-min]
+description = check minimum versions required of all dependencies
+skip_install = true
+commands_pre =
+ pip install .[test] -c tests/constraints.txt
+
+[testenv:docs]
+description = build documentations
+basepython = python3.8
+extras =
+ docs
+commands =
+ sphinx-build -n docs {envtmpdir} {posargs:-W}
+ python -c 'print("Documentation available under file://{envtmpdir}/index.html")'
+
+[testenv:dev]
+description = generate a DEV environment
+usedevelop = true
+deps =
+ virtualenv>=20.0.34
+extras =
+ doc
+ test
+commands =
+ python -m pip list --format=columns
+ python -c 'import sys; print(sys.executable)'
+
+[testenv:coverage]
+description = combine coverage from test environments
+passenv =
+ DIFF_AGAINST
+setenv =
+skip_install = true
+deps =
+ coverage[toml]>=5.1
+ diff_cover>=3
+parallel_show_output = true
+commands =
+ coverage combine {toxworkdir}
+ coverage report --skip-covered --show-missing -i
+ coverage xml -o {toxworkdir}/coverage.xml -i
+ coverage html -d {toxworkdir}/htmlcov -i
+ python -m diff_cover.diff_cover_tool --compare-branch {env:DIFF_AGAINST:origin/main} {toxworkdir}/coverage.xml
+depends = {py311, py310, py39, py38, py37, py36, pypy37, pypy38, pypy39}{,-min}, path
+
+[flake8]
+max-line-length = 127
+max-complexity = 10
+extend-ignore = E203
+extend-select = B9