summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-29 04:20:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-29 04:20:41 +0000
commitb49f1524e250764592ff132af8fb0d39182620f7 (patch)
treea2c4da0c1bfc3be79c9b80180d8958804e91a07d /src
parentInitial commit. (diff)
downloadpython-build-b49f1524e250764592ff132af8fb0d39182620f7.tar.xz
python-build-b49f1524e250764592ff132af8fb0d39182620f7.zip
Adding upstream version 0.9.0.upstream/0.9.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src')
-rw-r--r--src/build/__init__.py539
-rw-r--r--src/build/__main__.py397
-rw-r--r--src/build/env.py340
-rw-r--r--src/build/py.typed0
-rw-r--r--src/build/util.py60
5 files changed, 1336 insertions, 0 deletions
diff --git a/src/build/__init__.py b/src/build/__init__.py
new file mode 100644
index 0000000..0425a85
--- /dev/null
+++ b/src/build/__init__.py
@@ -0,0 +1,539 @@
+# SPDX-License-Identifier: MIT
+
+"""
+build - A simple, correct PEP 517 build frontend
+"""
+
+__version__ = '0.9.0'
+
+import contextlib
+import difflib
+import logging
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import types
+import warnings
+import zipfile
+
+from collections import OrderedDict
+from typing import (
+ AbstractSet,
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Type,
+ Union,
+)
+
+import pep517.wrappers
+
+
+TOMLDecodeError: Type[Exception]
+toml_loads: Callable[[str], MutableMapping[str, Any]]
+
+if sys.version_info >= (3, 11):
+ from tomllib import TOMLDecodeError
+ from tomllib import loads as toml_loads
+else:
+ try:
+ from tomli import TOMLDecodeError
+ from tomli import loads as toml_loads
+ except ModuleNotFoundError: # pragma: no cover
+ from toml import TomlDecodeError as TOMLDecodeError # type: ignore[import,no-redef]
+ from toml import loads as toml_loads # type: ignore[no-redef]
+
+
+RunnerType = Callable[[Sequence[str], Optional[str], Optional[Mapping[str, str]]], None]
+ConfigSettingsType = Mapping[str, Union[str, Sequence[str]]]
+PathType = Union[str, 'os.PathLike[str]']
+_ExcInfoType = Union[Tuple[Type[BaseException], BaseException, types.TracebackType], Tuple[None, None, None]]
+
+
+_WHEEL_NAME_REGEX = re.compile(
+ r'(?P<distribution>.+)-(?P<version>.+)'
+ r'(-(?P<build_tag>.+))?-(?P<python_tag>.+)'
+ r'-(?P<abi_tag>.+)-(?P<platform_tag>.+)\.whl'
+)
+
+
+_DEFAULT_BACKEND = {
+ 'build-backend': 'setuptools.build_meta:__legacy__',
+ 'requires': ['setuptools >= 40.8.0', 'wheel'],
+}
+
+
+_logger = logging.getLogger(__name__)
+
+
+class BuildException(Exception):
+ """
+ Exception raised by :class:`ProjectBuilder`
+ """
+
+
+class BuildBackendException(Exception):
+ """
+ Exception raised when a backend operation fails
+ """
+
+ def __init__(
+ self, exception: Exception, description: Optional[str] = None, exc_info: _ExcInfoType = (None, None, None)
+ ) -> None:
+ super().__init__()
+ self.exception = exception
+ self.exc_info = exc_info
+ self._description = description
+
+ def __str__(self) -> str:
+ if self._description:
+ return self._description
+ return f'Backend operation failed: {self.exception!r}'
+
+
+class BuildSystemTableValidationError(BuildException):
+ """
+ Exception raised when the ``[build-system]`` table in pyproject.toml is invalid.
+ """
+
+ def __str__(self) -> str:
+ return f'Failed to validate `build-system` in pyproject.toml: {self.args[0]}'
+
+
+class FailedProcessError(Exception):
+ """
+ Exception raised when an setup or prepration operation fails.
+ """
+
+ def __init__(self, exception: subprocess.CalledProcessError, description: str) -> None:
+ super().__init__()
+ self.exception = exception
+ self._description = description
+
+ def __str__(self) -> str:
+ cmd = ' '.join(self.exception.cmd)
+ description = f"{self._description}\n Command '{cmd}' failed with return code {self.exception.returncode}"
+ for stream_name in ('stdout', 'stderr'):
+ stream = getattr(self.exception, stream_name)
+ if stream:
+ description += f'\n {stream_name}:\n'
+ description += textwrap.indent(stream.decode(), ' ')
+ return description
+
+
+class TypoWarning(Warning):
+ """
+ Warning raised when a possible typo is found
+ """
+
+
+@contextlib.contextmanager
+def _working_directory(path: str) -> Iterator[None]:
+ current = os.getcwd()
+
+ os.chdir(path)
+
+ try:
+ yield
+ finally:
+ os.chdir(current)
+
+
+def _validate_source_directory(srcdir: PathType) -> None:
+ if not os.path.isdir(srcdir):
+ raise BuildException(f'Source {srcdir} is not a directory')
+ pyproject_toml = os.path.join(srcdir, 'pyproject.toml')
+ setup_py = os.path.join(srcdir, 'setup.py')
+ if not os.path.exists(pyproject_toml) and not os.path.exists(setup_py):
+ raise BuildException(f'Source {srcdir} does not appear to be a Python project: no pyproject.toml or setup.py')
+
+
+def check_dependency(
+ req_string: str, ancestral_req_strings: Tuple[str, ...] = (), parent_extras: AbstractSet[str] = frozenset()
+) -> Iterator[Tuple[str, ...]]:
+ """
+ Verify that a dependency and all of its dependencies are met.
+
+ :param req_string: Requirement string
+ :param parent_extras: Extras (eg. "test" in myproject[test])
+ :yields: Unmet dependencies
+ """
+ import packaging.requirements
+
+ if sys.version_info >= (3, 8):
+ import importlib.metadata as importlib_metadata
+ else:
+ import importlib_metadata
+
+ req = packaging.requirements.Requirement(req_string)
+ normalised_req_string = str(req)
+
+ # ``Requirement`` doesn't implement ``__eq__`` so we cannot compare reqs for
+ # equality directly but the string representation is stable.
+ if normalised_req_string in ancestral_req_strings:
+ # cyclical dependency, already checked.
+ return
+
+ if req.marker:
+ extras = frozenset(('',)).union(parent_extras)
+ # a requirement can have multiple extras but ``evaluate`` can
+ # only check one at a time.
+ if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
+ # if the marker conditions are not met, we pretend that the
+ # dependency is satisfied.
+ return
+
+ try:
+ dist = importlib_metadata.distribution(req.name) # type: ignore[no-untyped-call]
+ except importlib_metadata.PackageNotFoundError:
+ # dependency is not installed in the environment.
+ yield ancestral_req_strings + (normalised_req_string,)
+ else:
+ if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
+ # the installed version is incompatible.
+ yield ancestral_req_strings + (normalised_req_string,)
+ elif dist.requires:
+ for other_req_string in dist.requires:
+ # yields transitive dependencies that are not satisfied.
+ yield from check_dependency(other_req_string, ancestral_req_strings + (normalised_req_string,), req.extras)
+
+
+def _find_typo(dictionary: Mapping[str, str], expected: str) -> None:
+ for obj in dictionary:
+ if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
+ warnings.warn(
+ f"Found '{obj}' in pyproject.toml, did you mean '{expected}'?",
+ TypoWarning,
+ )
+
+
+def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Dict[str, Any]:
+ # If pyproject.toml is missing (per PEP 517) or [build-system] is missing
+ # (per PEP 518), use default values
+ if 'build-system' not in pyproject_toml:
+ _find_typo(pyproject_toml, 'build-system')
+ return _DEFAULT_BACKEND
+
+ build_system_table = dict(pyproject_toml['build-system'])
+
+ # If [build-system] is present, it must have a ``requires`` field (per PEP 518)
+ if 'requires' not in build_system_table:
+ _find_typo(build_system_table, 'requires')
+ raise BuildSystemTableValidationError('`requires` is a required property')
+ elif not isinstance(build_system_table['requires'], list) or not all(
+ isinstance(i, str) for i in build_system_table['requires']
+ ):
+ raise BuildSystemTableValidationError('`requires` must be an array of strings')
+
+ if 'build-backend' not in build_system_table:
+ _find_typo(build_system_table, 'build-backend')
+ # If ``build-backend`` is missing, inject the legacy setuptools backend
+ # but leave ``requires`` intact to emulate pip
+ build_system_table['build-backend'] = _DEFAULT_BACKEND['build-backend']
+ elif not isinstance(build_system_table['build-backend'], str):
+ raise BuildSystemTableValidationError('`build-backend` must be a string')
+
+ if 'backend-path' in build_system_table and (
+ not isinstance(build_system_table['backend-path'], list)
+ or not all(isinstance(i, str) for i in build_system_table['backend-path'])
+ ):
+ raise BuildSystemTableValidationError('`backend-path` must be an array of strings')
+
+ unknown_props = build_system_table.keys() - {'requires', 'build-backend', 'backend-path'}
+ if unknown_props:
+ raise BuildSystemTableValidationError(f'Unknown properties: {", ".join(unknown_props)}')
+
+ return build_system_table
+
+
+class ProjectBuilder:
+ """
+ The PEP 517 consumer API.
+ """
+
+ def __init__(
+ self,
+ srcdir: PathType,
+ python_executable: str = sys.executable,
+ scripts_dir: Optional[str] = None,
+ runner: RunnerType = pep517.wrappers.default_subprocess_runner,
+ ) -> None:
+ """
+ :param srcdir: The source directory
+ :param scripts_dir: The location of the scripts dir (defaults to the folder where the python executable lives)
+ :param python_executable: The python executable where the backend lives
+ :param runner: An alternative runner for backend subprocesses
+
+ The 'runner', if provided, must accept the following arguments:
+
+ - cmd: a list of strings representing the command and arguments to
+ execute, as would be passed to e.g. 'subprocess.check_call'.
+ - cwd: a string representing the working directory that must be
+ used for the subprocess. Corresponds to the provided srcdir.
+ - extra_environ: a dict mapping environment variable names to values
+ which must be set for the subprocess execution.
+
+ The default runner simply calls the backend hooks in a subprocess, writing backend output
+ to stdout/stderr.
+ """
+ self._srcdir: str = os.path.abspath(srcdir)
+ _validate_source_directory(srcdir)
+
+ spec_file = os.path.join(srcdir, 'pyproject.toml')
+
+ try:
+ with open(spec_file, 'rb') as f:
+ spec = toml_loads(f.read().decode())
+ except FileNotFoundError:
+ spec = {}
+ except PermissionError as e:
+ raise BuildException(f"{e.strerror}: '{e.filename}' ") # noqa: B904 # use raise from
+ except TOMLDecodeError as e:
+ raise BuildException(f'Failed to parse {spec_file}: {e} ') # noqa: B904 # use raise from
+
+ self._build_system = _parse_build_system_table(spec)
+ self._backend = self._build_system['build-backend']
+ self._scripts_dir = scripts_dir
+ self._hook_runner = runner
+ self._hook = pep517.wrappers.Pep517HookCaller(
+ self.srcdir,
+ self._backend,
+ backend_path=self._build_system.get('backend-path'),
+ python_executable=python_executable,
+ runner=self._runner,
+ )
+
+ def _runner(
+ self, cmd: Sequence[str], cwd: Optional[str] = None, extra_environ: Optional[Mapping[str, str]] = None
+ ) -> None:
+ # if script dir is specified must be inserted at the start of PATH (avoid duplicate path while doing so)
+ if self.scripts_dir is not None:
+ paths: Dict[str, None] = OrderedDict()
+ paths[str(self.scripts_dir)] = None
+ if 'PATH' in os.environ:
+ paths.update((i, None) for i in os.environ['PATH'].split(os.pathsep))
+ extra_environ = {} if extra_environ is None else dict(extra_environ)
+ extra_environ['PATH'] = os.pathsep.join(paths)
+ self._hook_runner(cmd, cwd, extra_environ)
+
+ @property
+ def srcdir(self) -> str:
+ """Project source directory."""
+ return self._srcdir
+
+ @property
+ def python_executable(self) -> str:
+ """
+ The Python executable used to invoke the backend.
+ """
+ # make mypy happy
+ exe: str = self._hook.python_executable
+ return exe
+
+ @python_executable.setter
+ def python_executable(self, value: str) -> None:
+ self._hook.python_executable = value
+
+ @property
+ def scripts_dir(self) -> Optional[str]:
+ """
+ The folder where the scripts are stored for the python executable.
+ """
+ return self._scripts_dir
+
+ @scripts_dir.setter
+ def scripts_dir(self, value: Optional[str]) -> None:
+ self._scripts_dir = value
+
+ @property
+ def build_system_requires(self) -> Set[str]:
+ """
+ The dependencies defined in the ``pyproject.toml``'s
+ ``build-system.requires`` field or the default build dependencies
+ if ``pyproject.toml`` is missing or ``build-system`` is undefined.
+ """
+ return set(self._build_system['requires'])
+
+ def get_requires_for_build(self, distribution: str, config_settings: Optional[ConfigSettingsType] = None) -> Set[str]:
+ """
+ Return the dependencies defined by the backend in addition to
+ :attr:`build_system_requires` for a given distribution.
+
+ :param distribution: Distribution to get the dependencies of
+ (``sdist`` or ``wheel``)
+ :param config_settings: Config settings for the build backend
+ """
+ self.log(f'Getting build dependencies for {distribution}...')
+ hook_name = f'get_requires_for_build_{distribution}'
+ get_requires = getattr(self._hook, hook_name)
+
+ with self._handle_backend(hook_name):
+ return set(get_requires(config_settings))
+
+ def check_dependencies(
+ self, distribution: str, config_settings: Optional[ConfigSettingsType] = None
+ ) -> Set[Tuple[str, ...]]:
+ """
+ Return the dependencies which are not satisfied from the combined set of
+ :attr:`build_system_requires` and :meth:`get_requires_for_build` for a given
+ distribution.
+
+ :param distribution: Distribution to check (``sdist`` or ``wheel``)
+ :param config_settings: Config settings for the build backend
+ :returns: Set of variable-length unmet dependency tuples
+ """
+ dependencies = self.get_requires_for_build(distribution, config_settings).union(self.build_system_requires)
+ return {u for d in dependencies for u in check_dependency(d)}
+
+ def prepare(
+ self, distribution: str, output_directory: PathType, config_settings: Optional[ConfigSettingsType] = None
+ ) -> Optional[str]:
+ """
+ Prepare metadata for a distribution.
+
+ :param distribution: Distribution to build (must be ``wheel``)
+ :param output_directory: Directory to put the prepared metadata in
+ :param config_settings: Config settings for the build backend
+ :returns: The full path to the prepared metadata directory
+ """
+ self.log(f'Getting metadata for {distribution}...')
+ try:
+ return self._call_backend(
+ f'prepare_metadata_for_build_{distribution}',
+ output_directory,
+ config_settings,
+ _allow_fallback=False,
+ )
+ except BuildBackendException as exception:
+ if isinstance(exception.exception, pep517.wrappers.HookMissing):
+ return None
+ raise
+
+ def build(
+ self,
+ distribution: str,
+ output_directory: PathType,
+ config_settings: Optional[ConfigSettingsType] = None,
+ metadata_directory: Optional[str] = None,
+ ) -> str:
+ """
+ Build a distribution.
+
+ :param distribution: Distribution to build (``sdist`` or ``wheel``)
+ :param output_directory: Directory to put the built distribution in
+ :param config_settings: Config settings for the build backend
+ :param metadata_directory: If provided, should be the return value of a
+ previous ``prepare`` call on the same ``distribution`` kind
+ :returns: The full path to the built distribution
+ """
+ self.log(f'Building {distribution}...')
+ kwargs = {} if metadata_directory is None else {'metadata_directory': metadata_directory}
+ return self._call_backend(f'build_{distribution}', output_directory, config_settings, **kwargs)
+
+ def metadata_path(self, output_directory: PathType) -> str:
+ """
+ Generate the metadata directory of a distribution and return its path.
+
+ If the backend does not support the ``prepare_metadata_for_build_wheel``
+ hook, a wheel will be built and the metadata will be extracted from it.
+
+ :param output_directory: Directory to put the metadata distribution in
+ :returns: The path of the metadata directory
+ """
+ # prepare_metadata hook
+ metadata = self.prepare('wheel', output_directory)
+ if metadata is not None:
+ return metadata
+
+ # fallback to build_wheel hook
+ wheel = self.build('wheel', output_directory)
+ match = _WHEEL_NAME_REGEX.match(os.path.basename(wheel))
+ if not match:
+ raise ValueError('Invalid wheel')
+ distinfo = f"{match['distribution']}-{match['version']}.dist-info"
+ member_prefix = f'{distinfo}/'
+ with zipfile.ZipFile(wheel) as w:
+ w.extractall(
+ output_directory,
+ (member for member in w.namelist() if member.startswith(member_prefix)),
+ )
+ return os.path.join(output_directory, distinfo)
+
+ def _call_backend(
+ self, hook_name: str, outdir: PathType, config_settings: Optional[ConfigSettingsType] = None, **kwargs: Any
+ ) -> str:
+ outdir = os.path.abspath(outdir)
+
+ callback = getattr(self._hook, hook_name)
+
+ if os.path.exists(outdir):
+ if not os.path.isdir(outdir):
+ raise BuildException(f"Build path '{outdir}' exists and is not a directory")
+ else:
+ os.makedirs(outdir)
+
+ with self._handle_backend(hook_name):
+ basename: str = callback(outdir, config_settings, **kwargs)
+
+ return os.path.join(outdir, basename)
+
+ @contextlib.contextmanager
+ def _handle_backend(self, hook: str) -> Iterator[None]:
+ with _working_directory(self.srcdir):
+ try:
+ yield
+ except pep517.wrappers.BackendUnavailable as exception:
+ raise BuildBackendException( # noqa: B904 # use raise from
+ exception,
+ f"Backend '{self._backend}' is not available.",
+ sys.exc_info(),
+ )
+ except subprocess.CalledProcessError as exception:
+ raise BuildBackendException( # noqa: B904 # use raise from
+ exception, f'Backend subprocess exited when trying to invoke {hook}'
+ )
+ except Exception as exception:
+ raise BuildBackendException(exception, exc_info=sys.exc_info()) # noqa: B904 # use raise from
+
+ @staticmethod
+ def log(message: str) -> None:
+ """
+ Log a message.
+
+ The default implementation uses the logging module but this function can be
+ overridden by users to have a different implementation.
+
+ :param message: Message to output
+ """
+ if sys.version_info >= (3, 8):
+ _logger.log(logging.INFO, message, stacklevel=2)
+ else:
+ _logger.log(logging.INFO, message)
+
+
+__all__ = [
+ '__version__',
+ 'BuildSystemTableValidationError',
+ 'BuildBackendException',
+ 'BuildException',
+ 'ConfigSettingsType',
+ 'FailedProcessError',
+ 'ProjectBuilder',
+ 'RunnerType',
+ 'TypoWarning',
+ 'check_dependency',
+]
+
+
+def __dir__() -> List[str]:
+ return __all__
diff --git a/src/build/__main__.py b/src/build/__main__.py
new file mode 100644
index 0000000..67b21d1
--- /dev/null
+++ b/src/build/__main__.py
@@ -0,0 +1,397 @@
+# SPDX-License-Identifier: MIT
+
+
+import argparse
+import contextlib
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+import traceback
+import warnings
+
+from typing import Dict, Iterator, List, NoReturn, Optional, Sequence, TextIO, Type, Union
+
+import build
+
+from build import BuildBackendException, BuildException, ConfigSettingsType, FailedProcessError, PathType, ProjectBuilder
+from build.env import IsolatedEnvBuilder
+
+
+_COLORS = {
+ 'red': '\33[91m',
+ 'green': '\33[92m',
+ 'yellow': '\33[93m',
+ 'bold': '\33[1m',
+ 'dim': '\33[2m',
+ 'underline': '\33[4m',
+ 'reset': '\33[0m',
+}
+_NO_COLORS = {color: '' for color in _COLORS}
+
+
+def _init_colors() -> Dict[str, str]:
+ if 'NO_COLOR' in os.environ:
+ if 'FORCE_COLOR' in os.environ:
+ warnings.warn('Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color')
+ return _NO_COLORS
+ elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty():
+ return _COLORS
+ return _NO_COLORS
+
+
+_STYLES = _init_colors()
+
+
+def _cprint(fmt: str = '', msg: str = '') -> None:
+ print(fmt.format(msg, **_STYLES), flush=True)
+
+
+def _showwarning(
+ message: Union[Warning, str],
+ category: Type[Warning],
+ filename: str,
+ lineno: int,
+ file: Optional[TextIO] = None,
+ line: Optional[str] = None,
+) -> None: # pragma: no cover
+ _cprint('{yellow}WARNING{reset} {}', str(message))
+
+
+def _setup_cli() -> None:
+ warnings.showwarning = _showwarning
+
+ if platform.system() == 'Windows':
+ try:
+ import colorama
+
+ colorama.init()
+ except ModuleNotFoundError:
+ pass
+
+
+def _error(msg: str, code: int = 1) -> NoReturn: # pragma: no cover
+ """
+ Print an error message and exit. Will color the output when writing to a TTY.
+
+ :param msg: Error message
+ :param code: Error code
+ """
+ _cprint('{red}ERROR{reset} {}', msg)
+ raise SystemExit(code)
+
+
+class _ProjectBuilder(ProjectBuilder):
+ @staticmethod
+ def log(message: str) -> None:
+ _cprint('{bold}* {}{reset}', message)
+
+
+class _IsolatedEnvBuilder(IsolatedEnvBuilder):
+ @staticmethod
+ def log(message: str) -> None:
+ _cprint('{bold}* {}{reset}', message)
+
+
+def _format_dep_chain(dep_chain: Sequence[str]) -> str:
+ return ' -> '.join(dep.partition(';')[0].strip() for dep in dep_chain)
+
+
+def _build_in_isolated_env(
+ builder: ProjectBuilder, outdir: PathType, distribution: str, config_settings: Optional[ConfigSettingsType]
+) -> str:
+ with _IsolatedEnvBuilder() as env:
+ builder.python_executable = env.executable
+ builder.scripts_dir = env.scripts_dir
+ # first install the build dependencies
+ env.install(builder.build_system_requires)
+ # then get the extra required dependencies from the backend (which was installed in the call above :P)
+ env.install(builder.get_requires_for_build(distribution))
+ return builder.build(distribution, outdir, config_settings or {})
+
+
+def _build_in_current_env(
+ builder: ProjectBuilder,
+ outdir: PathType,
+ distribution: str,
+ config_settings: Optional[ConfigSettingsType],
+ skip_dependency_check: bool = False,
+) -> str:
+ if not skip_dependency_check:
+ missing = builder.check_dependencies(distribution)
+ if missing:
+ dependencies = ''.join('\n\t' + dep for deps in missing for dep in (deps[0], _format_dep_chain(deps[1:])) if dep)
+ _cprint()
+ _error(f'Missing dependencies:{dependencies}')
+
+ return builder.build(distribution, outdir, config_settings or {})
+
+
+def _build(
+ isolation: bool,
+ builder: ProjectBuilder,
+ outdir: PathType,
+ distribution: str,
+ config_settings: Optional[ConfigSettingsType],
+ skip_dependency_check: bool,
+) -> str:
+ if isolation:
+ return _build_in_isolated_env(builder, outdir, distribution, config_settings)
+ else:
+ return _build_in_current_env(builder, outdir, distribution, config_settings, skip_dependency_check)
+
+
+@contextlib.contextmanager
+def _handle_build_error() -> Iterator[None]:
+ try:
+ yield
+ except (BuildException, FailedProcessError) as e:
+ _error(str(e))
+ except BuildBackendException as e:
+ if isinstance(e.exception, subprocess.CalledProcessError):
+ _cprint()
+ _error(str(e))
+
+ if e.exc_info:
+ tb_lines = traceback.format_exception(
+ e.exc_info[0],
+ e.exc_info[1],
+ e.exc_info[2],
+ limit=-1,
+ )
+ tb = ''.join(tb_lines)
+ else:
+ tb = traceback.format_exc(-1)
+ _cprint('\n{dim}{}{reset}\n', tb.strip('\n'))
+ _error(str(e))
+
+
+def _natural_language_list(elements: Sequence[str]) -> str:
+ if len(elements) == 0:
+ raise IndexError('no elements')
+ elif len(elements) == 1:
+ return elements[0]
+ else:
+ return '{} and {}'.format(
+ ', '.join(elements[:-1]),
+ elements[-1],
+ )
+
+
+def build_package(
+ srcdir: PathType,
+ outdir: PathType,
+ distributions: Sequence[str],
+ config_settings: Optional[ConfigSettingsType] = None,
+ isolation: bool = True,
+ skip_dependency_check: bool = False,
+) -> Sequence[str]:
+ """
+ Run the build process.
+
+ :param srcdir: Source directory
+ :param outdir: Output directory
+ :param distribution: Distribution to build (sdist or wheel)
+ :param config_settings: Configuration settings to be passed to the backend
+ :param isolation: Isolate the build in a separate environment
+ :param skip_dependency_check: Do not perform the dependency check
+ """
+ built: List[str] = []
+ builder = _ProjectBuilder(srcdir)
+ for distribution in distributions:
+ out = _build(isolation, builder, outdir, distribution, config_settings, skip_dependency_check)
+ built.append(os.path.basename(out))
+ return built
+
+
+def build_package_via_sdist(
+ srcdir: PathType,
+ outdir: PathType,
+ distributions: Sequence[str],
+ config_settings: Optional[ConfigSettingsType] = None,
+ isolation: bool = True,
+ skip_dependency_check: bool = False,
+) -> Sequence[str]:
+ """
+ Build a sdist and then the specified distributions from it.
+
+ :param srcdir: Source directory
+ :param outdir: Output directory
+ :param distribution: Distribution to build (only wheel)
+ :param config_settings: Configuration settings to be passed to the backend
+ :param isolation: Isolate the build in a separate environment
+ :param skip_dependency_check: Do not perform the dependency check
+ """
+ if 'sdist' in distributions:
+ raise ValueError('Only binary distributions are allowed but sdist was specified')
+
+ builder = _ProjectBuilder(srcdir)
+ sdist = _build(isolation, builder, outdir, 'sdist', config_settings, skip_dependency_check)
+
+ sdist_name = os.path.basename(sdist)
+ sdist_out = tempfile.mkdtemp(prefix='build-via-sdist-')
+ built: List[str] = []
+ # extract sdist
+ with tarfile.open(sdist) as t:
+ t.extractall(sdist_out)
+ try:
+ builder = _ProjectBuilder(os.path.join(sdist_out, sdist_name[: -len('.tar.gz')]))
+ if distributions:
+ builder.log(f'Building {_natural_language_list(distributions)} from sdist')
+ for distribution in distributions:
+ out = _build(isolation, builder, outdir, distribution, config_settings, skip_dependency_check)
+ built.append(os.path.basename(out))
+ finally:
+ shutil.rmtree(sdist_out, ignore_errors=True)
+ return [sdist_name] + built
+
+
+def main_parser() -> argparse.ArgumentParser:
+ """
+ Construct the main parser.
+ """
+ parser = argparse.ArgumentParser(
+ description=textwrap.indent(
+ textwrap.dedent(
+ '''
+ A simple, correct PEP 517 build frontend.
+
+ By default, a source distribution (sdist) is built from {srcdir}
+ and a binary distribution (wheel) is built from the sdist.
+ This is recommended as it will ensure the sdist can be used
+ to build wheels.
+
+ Pass -s/--sdist and/or -w/--wheel to build a specific distribution.
+ If you do this, the default behavior will be disabled, and all
+ artifacts will be built from {srcdir} (even if you combine
+ -w/--wheel with -s/--sdist, the wheel will be built from {srcdir}).
+ '''
+ ).strip(),
+ ' ',
+ ),
+ formatter_class=argparse.RawTextHelpFormatter,
+ )
+ parser.add_argument(
+ 'srcdir',
+ type=str,
+ nargs='?',
+ default=os.getcwd(),
+ help='source directory (defaults to current directory)',
+ )
+ parser.add_argument(
+ '--version',
+ '-V',
+ action='version',
+ version=f"build {build.__version__} ({','.join(build.__path__)})",
+ )
+ parser.add_argument(
+ '--sdist',
+ '-s',
+ action='store_true',
+ help='build a source distribution (disables the default behavior)',
+ )
+ parser.add_argument(
+ '--wheel',
+ '-w',
+ action='store_true',
+ help='build a wheel (disables the default behavior)',
+ )
+ parser.add_argument(
+ '--outdir',
+ '-o',
+ type=str,
+ help=f'output directory (defaults to {{srcdir}}{os.sep}dist)',
+ )
+ parser.add_argument(
+ '--skip-dependency-check',
+ '-x',
+ action='store_true',
+ help='do not check that build dependencies are installed',
+ )
+ parser.add_argument(
+ '--no-isolation',
+ '-n',
+ action='store_true',
+ help='do not isolate the build in a virtual environment',
+ )
+ parser.add_argument(
+ '--config-setting',
+ '-C',
+ action='append',
+ help='pass options to the backend. options which begin with a hyphen must be in the form of '
+ '"--config-setting=--opt(=value)" or "-C--opt(=value)"',
+ )
+ return parser
+
+
+def main(cli_args: Sequence[str], prog: Optional[str] = None) -> None: # noqa: C901
+ """
+ Parse the CLI arguments and invoke the build process.
+
+ :param cli_args: CLI arguments
+ :param prog: Program name to show in help text
+ """
+ _setup_cli()
+ parser = main_parser()
+ if prog:
+ parser.prog = prog
+ args = parser.parse_args(cli_args)
+
+ distributions = []
+ config_settings = {}
+
+ if args.config_setting:
+ for arg in args.config_setting:
+ setting, _, value = arg.partition('=')
+ if setting not in config_settings:
+ config_settings[setting] = value
+ else:
+ if not isinstance(config_settings[setting], list):
+ config_settings[setting] = [config_settings[setting]]
+
+ config_settings[setting].append(value)
+
+ if args.sdist:
+ distributions.append('sdist')
+ if args.wheel:
+ distributions.append('wheel')
+
+ # outdir is relative to srcdir only if omitted.
+ outdir = os.path.join(args.srcdir, 'dist') if args.outdir is None else args.outdir
+
+ if distributions:
+ build_call = build_package
+ else:
+ build_call = build_package_via_sdist
+ distributions = ['wheel']
+ try:
+ with _handle_build_error():
+ built = build_call(
+ args.srcdir, outdir, distributions, config_settings, not args.no_isolation, args.skip_dependency_check
+ )
+ artifact_list = _natural_language_list(
+ ['{underline}{}{reset}{bold}{green}'.format(artifact, **_STYLES) for artifact in built]
+ )
+ _cprint('{bold}{green}Successfully built {}{reset}', artifact_list)
+ except Exception as e: # pragma: no cover
+ tb = traceback.format_exc().strip('\n')
+ _cprint('\n{dim}{}{reset}\n', tb)
+ _error(str(e))
+
+
+def entrypoint() -> None:
+ main(sys.argv[1:])
+
+
+if __name__ == '__main__': # pragma: no cover
+ main(sys.argv[1:], 'python -m build')
+
+
+__all__ = [
+ 'main',
+ 'main_parser',
+]
diff --git a/src/build/env.py b/src/build/env.py
new file mode 100644
index 0000000..b4a90a9
--- /dev/null
+++ b/src/build/env.py
@@ -0,0 +1,340 @@
+"""
+Creates and manages isolated build environments.
+"""
+import abc
+import functools
+import logging
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import sysconfig
+import tempfile
+import warnings
+
+from types import TracebackType
+from typing import Callable, Collection, List, Optional, Tuple, Type
+
+import build
+
+
+try:
+ import virtualenv
+except ModuleNotFoundError:
+ virtualenv = None
+
+
+_logger = logging.getLogger(__name__)
+
+
+class IsolatedEnv(metaclass=abc.ABCMeta):
+ """Abstract base of isolated build environments, as required by the build project."""
+
+ @property
+ @abc.abstractmethod
+ def executable(self) -> str:
+ """The executable of the isolated build environment."""
+ raise NotImplementedError
+
+ @property
+ @abc.abstractmethod
+ def scripts_dir(self) -> str:
+ """The scripts directory of the isolated build environment."""
+ raise NotImplementedError
+
+ @abc.abstractmethod
+ def install(self, requirements: Collection[str]) -> None:
+ """
+ Install packages from PEP 508 requirements in the isolated build environment.
+
+ :param requirements: PEP 508 requirements
+ """
+ raise NotImplementedError
+
+
+@functools.lru_cache(maxsize=None)
+def _should_use_virtualenv() -> bool:
+ import packaging.requirements
+
+ # virtualenv might be incompatible if it was installed separately
+ # from build. This verifies that virtualenv and all of its
+ # dependencies are installed as specified by build.
+ return virtualenv is not None and not any(
+ packaging.requirements.Requirement(d[1]).name == 'virtualenv'
+ for d in build.check_dependency('build[virtualenv]')
+ if len(d) > 1
+ )
+
+
+def _subprocess(cmd: List[str]) -> None:
+ """Invoke subprocess and output stdout and stderr if it fails."""
+ try:
+ subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ print(e.output.decode(), end='', file=sys.stderr)
+ raise e
+
+
+class IsolatedEnvBuilder:
+ """Builder object for isolated environments."""
+
+ def __init__(self) -> None:
+ self._path: Optional[str] = None
+
+ def __enter__(self) -> IsolatedEnv:
+ """
+ Create an isolated build environment.
+
+ :return: The isolated build environment
+ """
+ # Call ``realpath`` to prevent spurious warning from being emitted
+ # that the venv location has changed on Windows. The username is
+ # DOS-encoded in the output of tempfile - the location is the same
+ # but the representation of it is different, which confuses venv.
+ # Ref: https://bugs.python.org/issue46171
+ self._path = os.path.realpath(tempfile.mkdtemp(prefix='build-env-'))
+ try:
+ # use virtualenv when available (as it's faster than venv)
+ if _should_use_virtualenv():
+ self.log('Creating virtualenv isolated environment...')
+ executable, scripts_dir = _create_isolated_env_virtualenv(self._path)
+ else:
+ self.log('Creating venv isolated environment...')
+ executable, scripts_dir = _create_isolated_env_venv(self._path)
+ return _IsolatedEnvVenvPip(
+ path=self._path,
+ python_executable=executable,
+ scripts_dir=scripts_dir,
+ log=self.log,
+ )
+ except Exception: # cleanup folder if creation fails
+ self.__exit__(*sys.exc_info())
+ raise
+
+ def __exit__(
+ self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
+ ) -> None:
+ """
+ Delete the created isolated build environment.
+
+ :param exc_type: The type of exception raised (if any)
+ :param exc_val: The value of exception raised (if any)
+ :param exc_tb: The traceback of exception raised (if any)
+ """
+ if self._path is not None and os.path.exists(self._path): # in case the user already deleted skip remove
+ shutil.rmtree(self._path)
+
+ @staticmethod
+ def log(message: str) -> None:
+ """
+ Prints message
+
+ The default implementation uses the logging module but this function can be
+ overwritten by users to have a different implementation.
+
+ :param msg: Message to output
+ """
+ if sys.version_info >= (3, 8):
+ _logger.log(logging.INFO, message, stacklevel=2)
+ else:
+ _logger.log(logging.INFO, message)
+
+
+class _IsolatedEnvVenvPip(IsolatedEnv):
+ """
+ Isolated build environment context manager
+
+ Non-standard paths injected directly to sys.path will still be passed to the environment.
+ """
+
+ def __init__(
+ self,
+ path: str,
+ python_executable: str,
+ scripts_dir: str,
+ log: Callable[[str], None],
+ ) -> None:
+ """
+ :param path: The path where the environment exists
+ :param python_executable: The python executable within the environment
+ :param log: Log function
+ """
+ self._path = path
+ self._python_executable = python_executable
+ self._scripts_dir = scripts_dir
+ self._log = log
+
+ @property
+ def path(self) -> str:
+ """The location of the isolated build environment."""
+ return self._path
+
+ @property
+ def executable(self) -> str:
+ """The python executable of the isolated build environment."""
+ return self._python_executable
+
+ @property
+ def scripts_dir(self) -> str:
+ return self._scripts_dir
+
+ def install(self, requirements: Collection[str]) -> None:
+ """
+ Install packages from PEP 508 requirements in the isolated build environment.
+
+ :param requirements: PEP 508 requirement specification to install
+
+ :note: Passing non-PEP 508 strings will result in undefined behavior, you *should not* rely on it. It is
+ merely an implementation detail, it may change any time without warning.
+ """
+ if not requirements:
+ return
+
+ self._log('Installing packages in isolated environment... ({})'.format(', '.join(sorted(requirements))))
+
+ # pip does not honour environment markers in command line arguments
+ # but it does for requirements from a file
+ with tempfile.NamedTemporaryFile('w+', prefix='build-reqs-', suffix='.txt', delete=False) as req_file:
+ req_file.write(os.linesep.join(requirements))
+ try:
+ cmd = [
+ self.executable,
+ '-Im',
+ 'pip',
+ 'install',
+ '--use-pep517',
+ '--no-warn-script-location',
+ '-r',
+ os.path.abspath(req_file.name),
+ ]
+ _subprocess(cmd)
+ finally:
+ os.unlink(req_file.name)
+
+
+def _create_isolated_env_virtualenv(path: str) -> Tuple[str, str]:
+ """
+ We optionally can use the virtualenv package to provision a virtual environment.
+
+ :param path: The path where to create the isolated build environment
+ :return: The Python executable and script folder
+ """
+ cmd = [str(path), '--no-setuptools', '--no-wheel', '--activators', '']
+ result = virtualenv.cli_run(cmd, setup_logging=False)
+ executable = str(result.creator.exe)
+ script_dir = str(result.creator.script_dir)
+ return executable, script_dir
+
+
+@functools.lru_cache(maxsize=None)
+def _fs_supports_symlink() -> bool:
+ """Return True if symlinks are supported"""
+ # Using definition used by venv.main()
+ if not sys.platform.startswith('win'):
+ return True
+
+ # Windows may support symlinks (setting in Windows 10)
+ with tempfile.NamedTemporaryFile(prefix='build-symlink-') as tmp_file:
+ dest = f'{tmp_file}-b'
+ try:
+ os.symlink(tmp_file.name, dest)
+ os.unlink(dest)
+ return True
+ except (OSError, NotImplementedError, AttributeError):
+ return False
+
+
+def _create_isolated_env_venv(path: str) -> Tuple[str, str]:
+ """
+ On Python 3 we use the venv package from the standard library.
+
+ :param path: The path where to create the isolated build environment
+ :return: The Python executable and script folder
+ """
+ import venv
+
+ import packaging.version
+
+ if sys.version_info < (3, 8):
+ import importlib_metadata as metadata
+ else:
+ from importlib import metadata
+
+ symlinks = _fs_supports_symlink()
+ try:
+ with warnings.catch_warnings():
+ if sys.version_info[:3] == (3, 11, 0):
+ warnings.filterwarnings('ignore', 'check_home argument is deprecated and ignored.', DeprecationWarning)
+ venv.EnvBuilder(with_pip=True, symlinks=symlinks).create(path)
+ except subprocess.CalledProcessError as exc:
+ raise build.FailedProcessError(exc, 'Failed to create venv. Maybe try installing virtualenv.') from None
+
+ executable, script_dir, purelib = _find_executable_and_scripts(path)
+
+ # Get the version of pip in the environment
+ pip_distribution = next(iter(metadata.distributions(name='pip', path=[purelib]))) # type: ignore[no-untyped-call]
+ current_pip_version = packaging.version.Version(pip_distribution.version)
+
+ if platform.system() == 'Darwin' and int(platform.mac_ver()[0].split('.')[0]) >= 11:
+ # macOS 11+ name scheme change requires 20.3. Intel macOS 11.0 can be told to report 10.16 for backwards
+ # compatibility; but that also fixes earlier versions of pip so this is only needed for 11+.
+ is_apple_silicon_python = platform.machine() != 'x86_64'
+ minimum_pip_version = '21.0.1' if is_apple_silicon_python else '20.3.0'
+ else:
+ # PEP-517 and manylinux1 was first implemented in 19.1
+ minimum_pip_version = '19.1.0'
+
+ if current_pip_version < packaging.version.Version(minimum_pip_version):
+ _subprocess([executable, '-m', 'pip', 'install', f'pip>={minimum_pip_version}'])
+
+ # Avoid the setuptools from ensurepip to break the isolation
+ _subprocess([executable, '-m', 'pip', 'uninstall', 'setuptools', '-y'])
+ return executable, script_dir
+
+
+def _find_executable_and_scripts(path: str) -> Tuple[str, str, str]:
+ """
+ Detect the Python executable and script folder of a virtual environment.
+
+ :param path: The location of the virtual environment
+ :return: The Python executable, script folder, and purelib folder
+ """
+ config_vars = sysconfig.get_config_vars().copy() # globally cached, copy before altering it
+ config_vars['base'] = path
+ scheme_names = sysconfig.get_scheme_names()
+ if 'venv' in scheme_names:
+ # Python distributors with custom default installation scheme can set a
+ # scheme that can't be used to expand the paths in a venv.
+ # This can happen if build itself is not installed in a venv.
+ # The distributors are encouraged to set a "venv" scheme to be used for this.
+ # See https://bugs.python.org/issue45413
+ # and https://github.com/pypa/virtualenv/issues/2208
+ paths = sysconfig.get_paths(scheme='venv', vars=config_vars)
+ elif 'posix_local' in scheme_names:
+ # The Python that ships on Debian/Ubuntu varies the default scheme to
+ # install to /usr/local
+ # But it does not (yet) set the "venv" scheme.
+ # If we're the Debian "posix_local" scheme is available, but "venv"
+ # is not, we use "posix_prefix" instead which is venv-compatible there.
+ paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
+ elif 'osx_framework_library' in scheme_names:
+ # The Python that ships with the macOS developer tools varies the
+ # default scheme depending on whether the ``sys.prefix`` is part of a framework.
+ # But it does not (yet) set the "venv" scheme.
+ # If the Apple-custom "osx_framework_library" scheme is available but "venv"
+ # is not, we use "posix_prefix" instead which is venv-compatible there.
+ paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
+ else:
+ paths = sysconfig.get_paths(vars=config_vars)
+ executable = os.path.join(paths['scripts'], 'python.exe' if sys.platform.startswith('win') else 'python')
+ if not os.path.exists(executable):
+ raise RuntimeError(f'Virtual environment creation failed, executable {executable} missing')
+
+ return executable, paths['scripts'], paths['purelib']
+
+
+__all__ = [
+ 'IsolatedEnvBuilder',
+ 'IsolatedEnv',
+]
diff --git a/src/build/py.typed b/src/build/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/py.typed
diff --git a/src/build/util.py b/src/build/util.py
new file mode 100644
index 0000000..6c918ce
--- /dev/null
+++ b/src/build/util.py
@@ -0,0 +1,60 @@
+# SPDX-License-Identifier: MIT
+
+import os
+import pathlib
+import sys
+import tempfile
+
+import pep517
+
+import build
+import build.env
+
+
+if sys.version_info >= (3, 8):
+ import importlib.metadata as importlib_metadata
+else:
+ import importlib_metadata
+
+
+def _project_wheel_metadata(builder: build.ProjectBuilder) -> 'importlib_metadata.PackageMetadata':
+ with tempfile.TemporaryDirectory() as tmpdir:
+ path = pathlib.Path(builder.metadata_path(tmpdir))
+ # https://github.com/python/importlib_metadata/pull/343
+ return importlib_metadata.PathDistribution(path).metadata # type: ignore[arg-type]
+
+
+def project_wheel_metadata(
+ srcdir: build.PathType,
+ isolated: bool = True,
+) -> 'importlib_metadata.PackageMetadata':
+ """
+ Return the wheel metadata for a project.
+
+ Uses the ``prepare_metadata_for_build_wheel`` hook if available,
+ otherwise ``build_wheel``.
+
+ :param srcdir: Project source directory
+ :param isolated: Whether or not to run invoke the backend in the current
+ environment or to create an isolated one and invoke it
+ there.
+ """
+ builder = build.ProjectBuilder(
+ os.fspath(srcdir),
+ runner=pep517.quiet_subprocess_runner,
+ )
+
+ if not isolated:
+ return _project_wheel_metadata(builder)
+
+ with build.env.IsolatedEnvBuilder() as env:
+ builder.python_executable = env.executable
+ builder.scripts_dir = env.scripts_dir
+ env.install(builder.build_system_requires)
+ env.install(builder.get_requires_for_build('wheel'))
+ return _project_wheel_metadata(builder)
+
+
+__all__ = [
+ 'project_wheel_metadata',
+]