summaryrefslogtreecommitdiffstats
path: root/flit_core
diff options
context:
space:
mode:
Diffstat (limited to 'flit_core')
-rw-r--r--flit_core/LICENSE29
-rw-r--r--flit_core/README.rst6
-rw-r--r--flit_core/bootstrap_install.py57
-rw-r--r--flit_core/build_dists.py17
-rw-r--r--flit_core/flit_core/__init__.py7
-rw-r--r--flit_core/flit_core/buildapi.py83
-rw-r--r--flit_core/flit_core/common.py449
-rw-r--r--flit_core/flit_core/config.py660
-rw-r--r--flit_core/flit_core/sdist.py202
-rw-r--r--flit_core/flit_core/tests/__init__.py0
-rw-r--r--flit_core/flit_core/tests/samples/EG_README.rst4
-rw-r--r--flit_core/flit_core/tests/samples/bad-description-ext.toml9
-rw-r--r--flit_core/flit_core/tests/samples/conflicting_modules/module1.py0
-rw-r--r--flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml8
-rw-r--r--flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py0
-rw-r--r--flit_core/flit_core/tests/samples/constructed_version/module1.py4
-rw-r--r--flit_core/flit_core/tests/samples/constructed_version/pyproject.toml12
-rw-r--r--flit_core/flit_core/tests/samples/extras-dev-conflict.toml13
-rw-r--r--flit_core/flit_core/tests/samples/extras.toml15
-rw-r--r--flit_core/flit_core/tests/samples/imported_version/package1/__init__.py3
-rw-r--r--flit_core/flit_core/tests/samples/imported_version/package1/_version.py1
-rw-r--r--flit_core/flit_core/tests/samples/imported_version/pyproject.toml10
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/LICENSES/README2
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md0
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt1
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/test.rst1
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/test.txt1
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/pyproject.toml12
-rw-r--r--flit_core/flit_core/tests/samples/invalid_version1.py3
-rw-r--r--flit_core/flit_core/tests/samples/missing-description-file.toml9
-rw-r--r--flit_core/flit_core/tests/samples/misspelled-key.toml10
-rw-r--r--flit_core/flit_core/tests/samples/module1-pkg.ini5
-rw-r--r--flit_core/flit_core/tests/samples/module1-pkg.toml12
-rw-r--r--flit_core/flit_core/tests/samples/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/module2.py10
-rw-r--r--flit_core/flit_core/tests/samples/moduleunimportable.py8
-rw-r--r--flit_core/flit_core/tests/samples/moduleunimportabledouble.py8
-rw-r--r--flit_core/flit_core/tests/samples/my-description.rst1
-rw-r--r--flit_core/flit_core/tests/samples/no_docstring-pkg.toml12
-rw-r--r--flit_core/flit_core/tests/samples/no_docstring.py1
-rw-r--r--flit_core/flit_core/tests/samples/normalization/my_python_module.py0
-rw-r--r--flit_core/flit_core/tests/samples/normalization/pyproject.toml14
-rw-r--r--flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst4
-rw-r--r--flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py8
-rw-r--r--flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml10
-rw-r--r--flit_core/flit_core/tests/samples/package1.toml13
-rw-r--r--flit_core/flit_core/tests/samples/package1/__init__.py6
-rw-r--r--flit_core/flit_core/tests/samples/package1/data_dir/foo.sh2
-rw-r--r--flit_core/flit_core/tests/samples/package1/foo.py1
-rw-r--r--flit_core/flit_core/tests/samples/package1/subpkg/__init__.py0
-rw-r--r--flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json1
-rw-r--r--flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py0
-rw-r--r--flit_core/flit_core/tests/samples/pep517/LICENSE1
-rw-r--r--flit_core/flit_core/tests/samples/pep517/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/pep517/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/pep517/pyproject.toml17
-rw-r--r--flit_core/flit_core/tests/samples/pep621/LICENSE1
-rw-r--r--flit_core/flit_core/tests/samples/pep621/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/pep621/module1a.py3
-rw-r--r--flit_core/flit_core/tests/samples/pep621/pyproject.toml39
-rw-r--r--flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py0
-rw-r--r--flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml28
-rw-r--r--flit_core/flit_core/tests/samples/requires-dev.toml11
-rw-r--r--flit_core/flit_core/tests/samples/requires-envmark.toml12
-rw-r--r--flit_core/flit_core/tests/samples/requires-extra-envmark.toml12
-rw-r--r--flit_core/flit_core/tests/samples/requires-requests.toml10
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/LICENSE1
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.11
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml26
-rw-r--r--flit_core/flit_core/tests/test_build_thyself.py57
-rw-r--r--flit_core/flit_core/tests/test_buildapi.py93
-rw-r--r--flit_core/flit_core/tests/test_common.py158
-rw-r--r--flit_core/flit_core/tests/test_config.py165
-rw-r--r--flit_core/flit_core/tests/test_sdist.py61
-rw-r--r--flit_core/flit_core/tests/test_versionno.py40
-rw-r--r--flit_core/flit_core/tests/test_wheel.py47
-rw-r--r--flit_core/flit_core/vendor/README13
-rw-r--r--flit_core/flit_core/vendor/__init__.py0
-rw-r--r--flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE21
-rw-r--r--flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA208
-rw-r--r--flit_core/flit_core/vendor/tomli/__init__.py9
-rw-r--r--flit_core/flit_core/vendor/tomli/_parser.py663
-rw-r--r--flit_core/flit_core/vendor/tomli/_re.py101
-rw-r--r--flit_core/flit_core/vendor/tomli/_types.py6
-rw-r--r--flit_core/flit_core/vendor/tomli/py.typed1
-rw-r--r--flit_core/flit_core/versionno.py127
-rw-r--r--flit_core/flit_core/wheel.py259
-rw-r--r--flit_core/pyproject.toml25
-rwxr-xr-xflit_core/update-vendored-tomli.sh18
93 files changed, 3994 insertions, 0 deletions
diff --git a/flit_core/LICENSE b/flit_core/LICENSE
new file mode 100644
index 0000000..1bd2e2d
--- /dev/null
+++ b/flit_core/LICENSE
@@ -0,0 +1,29 @@
+Copyright (c) 2015, Thomas Kluyver and contributors
+All rights reserved.
+
+BSD 3-clause license:
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/flit_core/README.rst b/flit_core/README.rst
new file mode 100644
index 0000000..6d9818f
--- /dev/null
+++ b/flit_core/README.rst
@@ -0,0 +1,6 @@
+flit_core
+---------
+
+This provides a PEP 517 build backend for packages using Flit.
+The only public interface is the API specified by PEP 517, at ``flit_core.buildapi``.
+
diff --git a/flit_core/bootstrap_install.py b/flit_core/bootstrap_install.py
new file mode 100644
index 0000000..1fd4c87
--- /dev/null
+++ b/flit_core/bootstrap_install.py
@@ -0,0 +1,57 @@
+"""Install flit_core without using any other tools.
+
+Normally, you would install flit_core with pip like any other Python package.
+This script is meant to help with 'bootstrapping' other packaging
+systems, where you may need flit_core to build other packaging tools.
+
+Use 'python -m flit_core.wheel' to make a wheel, then:
+
+ python bootstrap_install.py flit_core-3.6.0-py3-none-any.whl
+
+To install for something other than the Python running the script, pass a
+site-packages or equivalent directory with the --installdir option.
+"""
+import argparse
+import sys
+import sysconfig
+from pathlib import Path
+from zipfile import ZipFile
+
+def extract_wheel(whl_path, dest):
+ print("Installing to", dest)
+ with ZipFile(whl_path) as zf:
+ zf.extractall(dest)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ 'wheel',
+ type=Path,
+ help=f'flit_core wheel to install (.whl file)',
+ )
+ purelib = Path(sysconfig.get_path('purelib')).resolve()
+ parser.add_argument(
+ '--installdir',
+ '-i',
+ type=Path,
+ default=purelib,
+ help=f'installdir directory (defaults to {purelib})',
+ )
+ parser.add_argument(
+ '--install-root',
+ type=Path,
+ default=None,
+ help='if given, installdir is considered to be under this'
+ )
+
+ args = parser.parse_args()
+
+ if not args.wheel.name.startswith('flit_core-'):
+ sys.exit("Use this script only for flit_core wheels")
+ if args.install_root:
+ installdir = args.install_root / args.installdir.relative_to("/")
+ else:
+ installdir = args.installdir
+
+ installdir.mkdir(parents=True, exist_ok=True)
+ extract_wheel(args.wheel, installdir)
diff --git a/flit_core/build_dists.py b/flit_core/build_dists.py
new file mode 100644
index 0000000..efbce59
--- /dev/null
+++ b/flit_core/build_dists.py
@@ -0,0 +1,17 @@
+"""Build flit_core to upload to PyPI.
+
+Normally, this should only be used by me when making a release.
+"""
+import os
+
+from flit_core import buildapi
+
+os.chdir(os.path.dirname(os.path.abspath(__file__)))
+
+print("Building sdist")
+sdist_fname = buildapi.build_sdist('dist/')
+print(os.path.join('dist', sdist_fname))
+
+print("\nBuilding wheel")
+whl_fname = buildapi.build_wheel('dist/')
+print(os.path.join('dist', whl_fname))
diff --git a/flit_core/flit_core/__init__.py b/flit_core/flit_core/__init__.py
new file mode 100644
index 0000000..d3125ef
--- /dev/null
+++ b/flit_core/flit_core/__init__.py
@@ -0,0 +1,7 @@
+"""Flit's core machinery for building packages.
+
+This package provides a standard PEP 517 API to build packages using Flit.
+All the convenient development features live in the main 'flit' package.
+"""
+
+__version__ = '3.8.0'
diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py
new file mode 100644
index 0000000..963bf61
--- /dev/null
+++ b/flit_core/flit_core/buildapi.py
@@ -0,0 +1,83 @@
+"""PEP-517 compliant buildsystem API"""
+import logging
+import io
+import os
+import os.path as osp
+from pathlib import Path
+
+from .common import (
+ Module, make_metadata, write_entry_points, dist_info_name,
+ get_docstring_and_version_via_ast,
+)
+from .config import read_flit_config
+from .wheel import make_wheel_in, _write_wheel_file
+from .sdist import SdistBuilder
+
+log = logging.getLogger(__name__)
+
+# PEP 517 specifies that the CWD will always be the source tree
+pyproj_toml = Path('pyproject.toml')
+
+def get_requires_for_build_wheel(config_settings=None):
+ """Returns a list of requirements for building, as strings"""
+ info = read_flit_config(pyproj_toml)
+ # If we can get version & description from pyproject.toml (PEP 621), or
+ # by parsing the module (_via_ast), we don't need any extra
+ # dependencies. If not, we'll need to try importing it, so report any
+ # runtime dependencies as build dependencies.
+ want_summary = 'description' in info.dynamic_metadata
+ want_version = 'version' in info.dynamic_metadata
+
+ module = Module(info.module, Path.cwd())
+ docstring, version = get_docstring_and_version_via_ast(module)
+
+ if (want_summary and not docstring) or (want_version and not version):
+ return info.metadata.get('requires_dist', [])
+ else:
+ return []
+
+# Requirements to build an sdist are the same as for a wheel
+get_requires_for_build_sdist = get_requires_for_build_wheel
+
+# Requirements to build an editable are the same as for a wheel
+get_requires_for_build_editable = get_requires_for_build_wheel
+
+def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):
+ """Creates {metadata_directory}/foo-1.2.dist-info"""
+ ini_info = read_flit_config(pyproj_toml)
+ module = Module(ini_info.module, Path.cwd())
+ metadata = make_metadata(module, ini_info)
+
+ dist_info = osp.join(metadata_directory,
+ dist_info_name(metadata.name, metadata.version))
+ os.mkdir(dist_info)
+
+ with io.open(osp.join(dist_info, 'WHEEL'), 'w', encoding='utf-8') as f:
+ _write_wheel_file(f, supports_py2=metadata.supports_py2)
+
+ with io.open(osp.join(dist_info, 'METADATA'), 'w', encoding='utf-8') as f:
+ metadata.write_metadata_file(f)
+
+ if ini_info.entrypoints:
+ with io.open(osp.join(dist_info, 'entry_points.txt'), 'w', encoding='utf-8') as f:
+ write_entry_points(ini_info.entrypoints, f)
+
+ return osp.basename(dist_info)
+
+# Metadata for editable are the same as for a wheel
+prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel
+
+def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds a wheel, places it in wheel_directory"""
+ info = make_wheel_in(pyproj_toml, Path(wheel_directory))
+ return info.file.name
+
+def build_editable(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds an "editable" wheel, places it in wheel_directory"""
+ info = make_wheel_in(pyproj_toml, Path(wheel_directory), editable=True)
+ return info.file.name
+
+def build_sdist(sdist_directory, config_settings=None):
+ """Builds an sdist, places it in sdist_directory"""
+ path = SdistBuilder.from_ini_path(pyproj_toml).build(Path(sdist_directory))
+ return path.name
diff --git a/flit_core/flit_core/common.py b/flit_core/flit_core/common.py
new file mode 100644
index 0000000..68d91bb
--- /dev/null
+++ b/flit_core/flit_core/common.py
@@ -0,0 +1,449 @@
+import ast
+from contextlib import contextmanager
+import hashlib
+import logging
+import os
+import sys
+
+from pathlib import Path
+import re
+
+log = logging.getLogger(__name__)
+
+from .versionno import normalise_version
+
+class Module(object):
+ """This represents the module/package that we are going to distribute
+ """
+ in_namespace_package = False
+ namespace_package_name = None
+
+ def __init__(self, name, directory=Path()):
+ self.name = name
+
+ # It must exist either as a .py file or a directory, but not both
+ name_as_path = name.replace('.', os.sep)
+ pkg_dir = directory / name_as_path
+ py_file = directory / (name_as_path+'.py')
+ src_pkg_dir = directory / 'src' / name_as_path
+ src_py_file = directory / 'src' / (name_as_path+'.py')
+
+ existing = set()
+ if pkg_dir.is_dir():
+ self.path = pkg_dir
+ self.is_package = True
+ self.prefix = ''
+ existing.add(pkg_dir)
+ if py_file.is_file():
+ self.path = py_file
+ self.is_package = False
+ self.prefix = ''
+ existing.add(py_file)
+ if src_pkg_dir.is_dir():
+ self.path = src_pkg_dir
+ self.is_package = True
+ self.prefix = 'src'
+ existing.add(src_pkg_dir)
+ if src_py_file.is_file():
+ self.path = src_py_file
+ self.is_package = False
+ self.prefix = 'src'
+ existing.add(src_py_file)
+
+ if len(existing) > 1:
+ raise ValueError(
+ "Multiple files or folders could be module {}: {}"
+ .format(name, ", ".join([str(p) for p in sorted(existing)]))
+ )
+ elif not existing:
+ raise ValueError("No file/folder found for module {}".format(name))
+
+ self.source_dir = directory / self.prefix
+
+ if '.' in name:
+ self.namespace_package_name = name.rpartition('.')[0]
+ self.in_namespace_package = True
+
+ @property
+ def file(self):
+ if self.is_package:
+ return self.path / '__init__.py'
+ else:
+ return self.path
+
+ def iter_files(self):
+ """Iterate over the files contained in this module.
+
+ Yields absolute paths - caller may want to make them relative.
+ Excludes any __pycache__ and *.pyc files.
+ """
+ def _include(path):
+ name = os.path.basename(path)
+ if (name == '__pycache__') or name.endswith('.pyc'):
+ return False
+ return True
+
+ if self.is_package:
+ # Ensure we sort all files and directories so the order is stable
+ for dirpath, dirs, files in os.walk(str(self.path)):
+ for file in sorted(files):
+ full_path = os.path.join(dirpath, file)
+ if _include(full_path):
+ yield full_path
+
+ dirs[:] = [d for d in sorted(dirs) if _include(d)]
+
+ else:
+ yield str(self.path)
+
+class ProblemInModule(ValueError): pass
+class NoDocstringError(ProblemInModule): pass
+class NoVersionError(ProblemInModule): pass
+class InvalidVersion(ProblemInModule): pass
+
+class VCSError(Exception):
+ def __init__(self, msg, directory):
+ self.msg = msg
+ self.directory = directory
+
+ def __str__(self):
+ return self.msg + ' ({})'.format(self.directory)
+
+
+@contextmanager
+def _module_load_ctx():
+ """Preserve some global state that modules might change at import time.
+
+ - Handlers on the root logger.
+ """
+ logging_handlers = logging.root.handlers[:]
+ try:
+ yield
+ finally:
+ logging.root.handlers = logging_handlers
+
+def get_docstring_and_version_via_ast(target):
+ """
+ Return a tuple like (docstring, version) for the given module,
+ extracted by parsing its AST.
+ """
+ # read as bytes to enable custom encodings
+ with target.file.open('rb') as f:
+ node = ast.parse(f.read())
+ for child in node.body:
+ # Only use the version from the given module if it's a simple
+ # string assignment to __version__
+ is_version_str = (
+ isinstance(child, ast.Assign)
+ and any(
+ isinstance(target, ast.Name)
+ and target.id == "__version__"
+ for target in child.targets
+ )
+ and isinstance(child.value, ast.Str)
+ )
+ if is_version_str:
+ version = child.value.s
+ break
+ else:
+ version = None
+ return ast.get_docstring(node), version
+
+
+# To ensure we're actually loading the specified file, give it a unique name to
+# avoid any cached import. In normal use we'll only load one module per process,
+# so it should only matter for the tests, but we'll do it anyway.
+_import_i = 0
+
+
+def get_docstring_and_version_via_import(target):
+ """
+ Return a tuple like (docstring, version) for the given module,
+ extracted by importing the module and pulling __doc__ & __version__
+ from it.
+ """
+ global _import_i
+ _import_i += 1
+
+ log.debug("Loading module %s", target.file)
+ from importlib.util import spec_from_file_location, module_from_spec
+ mod_name = 'flit_core.dummy.import%d' % _import_i
+ spec = spec_from_file_location(mod_name, target.file)
+ with _module_load_ctx():
+ m = module_from_spec(spec)
+ # Add the module to sys.modules to allow relative imports to work.
+ # importlib has more code around this to handle the case where two
+ # threads are trying to load the same module at the same time, but Flit
+ # should always be running a single thread, so we won't duplicate that.
+ sys.modules[mod_name] = m
+ try:
+ spec.loader.exec_module(m)
+ finally:
+ sys.modules.pop(mod_name, None)
+
+ docstring = m.__dict__.get('__doc__', None)
+ version = m.__dict__.get('__version__', None)
+ return docstring, version
+
+
+def get_info_from_module(target, for_fields=('version', 'description')):
+ """Load the module/package, get its docstring and __version__
+ """
+ if not for_fields:
+ return {}
+
+ # What core metadata calls Summary, PEP 621 calls description
+ want_summary = 'description' in for_fields
+ want_version = 'version' in for_fields
+
+ log.debug("Loading module %s", target.file)
+
+ # Attempt to extract our docstring & version by parsing our target's
+ # AST, falling back to an import if that fails. This allows us to
+ # build without necessarily requiring that our built package's
+ # requirements are installed.
+ docstring, version = get_docstring_and_version_via_ast(target)
+ if (want_summary and not docstring) or (want_version and not version):
+ docstring, version = get_docstring_and_version_via_import(target)
+
+ res = {}
+
+ if want_summary:
+ if (not docstring) or not docstring.strip():
+ raise NoDocstringError(
+ 'Flit cannot package module without docstring, or empty docstring. '
+ 'Please add a docstring to your module ({}).'.format(target.file)
+ )
+ res['summary'] = docstring.lstrip().splitlines()[0]
+
+ if want_version:
+ res['version'] = check_version(version)
+
+ return res
+
+def check_version(version):
+ """
+ Check whether a given version string match PEP 440, and do normalisation.
+
+ Raise InvalidVersion/NoVersionError with relevant information if
+ version is invalid.
+
+ Log a warning if the version is not canonical with respect to PEP 440.
+
+ Returns the version in canonical PEP 440 format.
+ """
+ if not version:
+ raise NoVersionError('Cannot package module without a version string. '
+ 'Please define a `__version__ = "x.y.z"` in your module.')
+ if not isinstance(version, str):
+ raise InvalidVersion('__version__ must be a string, not {}.'
+ .format(type(version)))
+
+ # Import here to avoid circular import
+ version = normalise_version(version)
+
+ return version
+
+
+script_template = """\
+#!{interpreter}
+# -*- coding: utf-8 -*-
+import re
+import sys
+from {module} import {import_name}
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\\.pyw|\\.exe)?$', '', sys.argv[0])
+ sys.exit({func}())
+"""
+
+def parse_entry_point(ep):
+ """Check and parse a 'package.module:func' style entry point specification.
+
+ Returns (modulename, funcname)
+ """
+ if ':' not in ep:
+ raise ValueError("Invalid entry point (no ':'): %r" % ep)
+ mod, func = ep.split(':')
+
+ for piece in func.split('.'):
+ if not piece.isidentifier():
+ raise ValueError("Invalid entry point: %r is not an identifier" % piece)
+ for piece in mod.split('.'):
+ if not piece.isidentifier():
+ raise ValueError("Invalid entry point: %r is not a module path" % piece)
+
+ return mod, func
+
+def write_entry_points(d, fp):
+ """Write entry_points.txt from a two-level dict
+
+ Sorts on keys to ensure results are reproducible.
+ """
+ for group_name in sorted(d):
+ fp.write(u'[{}]\n'.format(group_name))
+ group = d[group_name]
+ for name in sorted(group):
+ val = group[name]
+ fp.write(u'{}={}\n'.format(name, val))
+ fp.write(u'\n')
+
+def hash_file(path, algorithm='sha256'):
+ with open(path, 'rb') as f:
+ h = hashlib.new(algorithm, f.read())
+ return h.hexdigest()
+
+def normalize_file_permissions(st_mode):
+ """Normalize the permission bits in the st_mode field from stat to 644/755
+
+ Popular VCSs only track whether a file is executable or not. The exact
+ permissions can vary on systems with different umasks. Normalising
+ to 644 (non executable) or 755 (executable) makes builds more reproducible.
+ """
+ # Set 644 permissions, leaving higher bits of st_mode unchanged
+ new_mode = (st_mode | 0o644) & ~0o133
+ if st_mode & 0o100:
+ new_mode |= 0o111 # Executable: 644 -> 755
+ return new_mode
+
+class Metadata(object):
+
+ summary = None
+ home_page = None
+ author = None
+ author_email = None
+ maintainer = None
+ maintainer_email = None
+ license = None
+ description = None
+ keywords = None
+ download_url = None
+ requires_python = None
+ description_content_type = None
+
+ platform = ()
+ supported_platform = ()
+ classifiers = ()
+ provides = ()
+ requires = ()
+ obsoletes = ()
+ project_urls = ()
+ provides_dist = ()
+ requires_dist = ()
+ obsoletes_dist = ()
+ requires_external = ()
+ provides_extra = ()
+
+ metadata_version = "2.1"
+
+ def __init__(self, data):
+ data = data.copy()
+ self.name = data.pop('name')
+ self.version = data.pop('version')
+
+ for k, v in data.items():
+ assert hasattr(self, k), "data does not have attribute '{}'".format(k)
+ setattr(self, k, v)
+
+ def _normalise_name(self, n):
+ return n.lower().replace('-', '_')
+
+ def write_metadata_file(self, fp):
+ """Write out metadata in the email headers format"""
+ fields = [
+ 'Metadata-Version',
+ 'Name',
+ 'Version',
+ ]
+ optional_fields = [
+ 'Summary',
+ 'Home-page',
+ 'License',
+ 'Keywords',
+ 'Author',
+ 'Author-email',
+ 'Maintainer',
+ 'Maintainer-email',
+ 'Requires-Python',
+ 'Description-Content-Type',
+ ]
+
+ for field in fields:
+ value = getattr(self, self._normalise_name(field))
+ fp.write(u"{}: {}\n".format(field, value))
+
+ for field in optional_fields:
+ value = getattr(self, self._normalise_name(field))
+ if value is not None:
+ # TODO: verify which fields can be multiline
+ # The spec has multiline examples for Author, Maintainer &
+ # License (& Description, but we put that in the body)
+ # Indent following lines with 8 spaces:
+ value = '\n '.join(value.splitlines())
+ fp.write(u"{}: {}\n".format(field, value))
+
+ for clsfr in self.classifiers:
+ fp.write(u'Classifier: {}\n'.format(clsfr))
+
+ for req in self.requires_dist:
+ fp.write(u'Requires-Dist: {}\n'.format(req))
+
+ for url in self.project_urls:
+ fp.write(u'Project-URL: {}\n'.format(url))
+
+ for extra in self.provides_extra:
+ fp.write(u'Provides-Extra: {}\n'.format(extra))
+
+ if self.description is not None:
+ fp.write(u'\n' + self.description + u'\n')
+
+ @property
+ def supports_py2(self):
+ """Return True if Requires-Python indicates Python 2 support."""
+ for part in (self.requires_python or "").split(","):
+ if re.search(r"^\s*(>=?|~=|===?)?\s*[3-9]", part):
+ return False
+ return True
+
+
+def make_metadata(module, ini_info):
+ md_dict = {'name': module.name, 'provides': [module.name]}
+ md_dict.update(get_info_from_module(module, ini_info.dynamic_metadata))
+ md_dict.update(ini_info.metadata)
+ return Metadata(md_dict)
+
+
+
+def normalize_dist_name(name: str, version: str) -> str:
+ """Normalizes a name and a PEP 440 version
+
+ The resulting string is valid as dist-info folder name
+ and as first part of a wheel filename
+
+ See https://packaging.python.org/specifications/binary-distribution-format/#escaping-and-unicode
+ """
+ normalized_name = re.sub(r'[-_.]+', '_', name, flags=re.UNICODE).lower()
+ assert check_version(version) == version
+ assert '-' not in version, 'Normalized versions can’t have dashes'
+ return '{}-{}'.format(normalized_name, version)
+
+
+def dist_info_name(distribution, version):
+ """Get the correct name of the .dist-info folder"""
+ return normalize_dist_name(distribution, version) + '.dist-info'
+
+
+def walk_data_dir(data_directory):
+ """Iterate over the files in the given data directory.
+
+ Yields paths prefixed with data_directory - caller may want to make them
+ relative to that. Excludes any __pycache__ subdirectories.
+ """
+ if data_directory is None:
+ return
+
+ for dirpath, dirs, files in os.walk(data_directory):
+ for file in sorted(files):
+ full_path = os.path.join(dirpath, file)
+ yield full_path
+
+ dirs[:] = [d for d in sorted(dirs) if d != '__pycache__']
diff --git a/flit_core/flit_core/config.py b/flit_core/flit_core/config.py
new file mode 100644
index 0000000..1292956
--- /dev/null
+++ b/flit_core/flit_core/config.py
@@ -0,0 +1,660 @@
+import difflib
+from email.headerregistry import Address
+import errno
+import logging
+import os
+import os.path as osp
+from pathlib import Path
+import re
+
+try:
+ import tomllib
+except ImportError:
+ try:
+ from .vendor import tomli as tomllib
+ # Some downstream distributors remove the vendored tomli.
+ # When that is removed, import tomli from the regular location.
+ except ImportError:
+ import tomli as tomllib
+
+from .versionno import normalise_version
+
+log = logging.getLogger(__name__)
+
+
+class ConfigError(ValueError):
+ pass
+
+metadata_list_fields = {
+ 'classifiers',
+ 'requires',
+ 'dev-requires'
+}
+
+metadata_allowed_fields = {
+ 'module',
+ 'author',
+ 'author-email',
+ 'maintainer',
+ 'maintainer-email',
+ 'home-page',
+ 'license',
+ 'keywords',
+ 'requires-python',
+ 'dist-name',
+ 'description-file',
+ 'requires-extra',
+} | metadata_list_fields
+
+metadata_required_fields = {
+ 'module',
+ 'author',
+}
+
+pep621_allowed_fields = {
+ 'name',
+ 'version',
+ 'description',
+ 'readme',
+ 'requires-python',
+ 'license',
+ 'authors',
+ 'maintainers',
+ 'keywords',
+ 'classifiers',
+ 'urls',
+ 'scripts',
+ 'gui-scripts',
+ 'entry-points',
+ 'dependencies',
+ 'optional-dependencies',
+ 'dynamic',
+}
+
+
+def read_flit_config(path):
+ """Read and check the `pyproject.toml` file with data about the package.
+ """
+ d = tomllib.loads(path.read_text('utf-8'))
+ return prep_toml_config(d, path)
+
+
+class EntryPointsConflict(ConfigError):
+ def __str__(self):
+ return ('Please specify console_scripts entry points, or [scripts] in '
+ 'flit config, not both.')
+
+def prep_toml_config(d, path):
+ """Validate config loaded from pyproject.toml and prepare common metadata
+
+ Returns a LoadedConfig object.
+ """
+ dtool = d.get('tool', {}).get('flit', {})
+
+ if 'project' in d:
+ # Metadata in [project] table (PEP 621)
+ if 'metadata' in dtool:
+ raise ConfigError(
+ "Use [project] table for metadata or [tool.flit.metadata], not both."
+ )
+ if ('scripts' in dtool) or ('entrypoints' in dtool):
+ raise ConfigError(
+ "Don't mix [project] metadata with [tool.flit.scripts] or "
+ "[tool.flit.entrypoints]. Use [project.scripts],"
+ "[project.gui-scripts] or [project.entry-points] as replacements."
+ )
+ loaded_cfg = read_pep621_metadata(d['project'], path)
+
+ module_tbl = dtool.get('module', {})
+ if 'name' in module_tbl:
+ loaded_cfg.module = module_tbl['name']
+ elif 'metadata' in dtool:
+ # Metadata in [tool.flit.metadata] (pre PEP 621 format)
+ if 'module' in dtool:
+ raise ConfigError(
+ "Use [tool.flit.module] table with new-style [project] metadata, "
+ "not [tool.flit.metadata]"
+ )
+ loaded_cfg = _prep_metadata(dtool['metadata'], path)
+ loaded_cfg.dynamic_metadata = ['version', 'description']
+
+ if 'entrypoints' in dtool:
+ loaded_cfg.entrypoints = flatten_entrypoints(dtool['entrypoints'])
+
+ if 'scripts' in dtool:
+ loaded_cfg.add_scripts(dict(dtool['scripts']))
+ else:
+ raise ConfigError(
+ "Neither [project] nor [tool.flit.metadata] found in pyproject.toml"
+ )
+
+ unknown_sections = set(dtool) - {
+ 'metadata', 'module', 'scripts', 'entrypoints', 'sdist', 'external-data'
+ }
+ unknown_sections = [s for s in unknown_sections if not s.lower().startswith('x-')]
+ if unknown_sections:
+ raise ConfigError('Unexpected tables in pyproject.toml: ' + ', '.join(
+ '[tool.flit.{}]'.format(s) for s in unknown_sections
+ ))
+
+ if 'sdist' in dtool:
+ unknown_keys = set(dtool['sdist']) - {'include', 'exclude'}
+ if unknown_keys:
+ raise ConfigError(
+ "Unknown keys in [tool.flit.sdist]:" + ", ".join(unknown_keys)
+ )
+
+ loaded_cfg.sdist_include_patterns = _check_glob_patterns(
+ dtool['sdist'].get('include', []), 'include'
+ )
+ exclude = [
+ "**/__pycache__",
+ "**.pyc",
+ ] + dtool['sdist'].get('exclude', [])
+ loaded_cfg.sdist_exclude_patterns = _check_glob_patterns(
+ exclude, 'exclude'
+ )
+
+ data_dir = dtool.get('external-data', {}).get('directory', None)
+ if data_dir is not None:
+ toml_key = "tool.flit.external-data.directory"
+ if not isinstance(data_dir, str):
+ raise ConfigError(f"{toml_key} must be a string")
+
+ normp = osp.normpath(data_dir)
+ if osp.isabs(normp):
+ raise ConfigError(f"{toml_key} cannot be an absolute path")
+ if normp.startswith('..' + os.sep):
+ raise ConfigError(
+ f"{toml_key} cannot point outside the directory containing pyproject.toml"
+ )
+ if normp == '.':
+ raise ConfigError(
+ f"{toml_key} cannot refer to the directory containing pyproject.toml"
+ )
+ loaded_cfg.data_directory = path.parent / data_dir
+ if not loaded_cfg.data_directory.is_dir():
+ raise ConfigError(f"{toml_key} must refer to a directory")
+
+ return loaded_cfg
+
+def flatten_entrypoints(ep):
+ """Flatten nested entrypoints dicts.
+
+ Entry points group names can include dots. But dots in TOML make nested
+ dictionaries:
+
+ [entrypoints.a.b] # {'entrypoints': {'a': {'b': {}}}}
+
+ The proper way to avoid this is:
+
+ [entrypoints."a.b"] # {'entrypoints': {'a.b': {}}}
+
+ But since there isn't a need for arbitrarily nested mappings in entrypoints,
+ flit allows you to use the former. This flattens the nested dictionaries
+ from loading pyproject.toml.
+ """
+ def _flatten(d, prefix):
+ d1 = {}
+ for k, v in d.items():
+ if isinstance(v, dict):
+ for flattened in _flatten(v, prefix+'.'+k):
+ yield flattened
+ else:
+ d1[k] = v
+
+ if d1:
+ yield prefix, d1
+
+ res = {}
+ for k, v in ep.items():
+ res.update(_flatten(v, k))
+ return res
+
+
+def _check_glob_patterns(pats, clude):
+ """Check and normalise glob patterns for sdist include/exclude"""
+ if not isinstance(pats, list):
+ raise ConfigError("sdist {} patterns must be a list".format(clude))
+
+ # Windows filenames can't contain these (nor * or ?, but they are part of
+ # glob patterns) - https://stackoverflow.com/a/31976060/434217
+ bad_chars = re.compile(r'[\000-\037<>:"\\]')
+
+ normed = []
+
+ for p in pats:
+ if bad_chars.search(p):
+ raise ConfigError(
+ '{} pattern {!r} contains bad characters (<>:\"\\ or control characters)'
+ .format(clude, p)
+ )
+
+ normp = osp.normpath(p)
+
+ if osp.isabs(normp):
+ raise ConfigError(
+ '{} pattern {!r} is an absolute path'.format(clude, p)
+ )
+ if normp.startswith('..' + os.sep):
+ raise ConfigError(
+ '{} pattern {!r} points out of the directory containing pyproject.toml'
+ .format(clude, p)
+ )
+ normed.append(normp)
+
+ return normed
+
+
+class LoadedConfig(object):
+ def __init__(self):
+ self.module = None
+ self.metadata = {}
+ self.reqs_by_extra = {}
+ self.entrypoints = {}
+ self.referenced_files = []
+ self.sdist_include_patterns = []
+ self.sdist_exclude_patterns = []
+ self.dynamic_metadata = []
+ self.data_directory = None
+
+ def add_scripts(self, scripts_dict):
+ if scripts_dict:
+ if 'console_scripts' in self.entrypoints:
+ raise EntryPointsConflict
+ else:
+ self.entrypoints['console_scripts'] = scripts_dict
+
+readme_ext_to_content_type = {
+ '.rst': 'text/x-rst',
+ '.md': 'text/markdown',
+ '.txt': 'text/plain',
+}
+
+
+def description_from_file(rel_path: str, proj_dir: Path, guess_mimetype=True):
+ if osp.isabs(rel_path):
+ raise ConfigError("Readme path must be relative")
+
+ desc_path = proj_dir / rel_path
+ try:
+ with desc_path.open('r', encoding='utf-8') as f:
+ raw_desc = f.read()
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ raise ConfigError(
+ "Description file {} does not exist".format(desc_path)
+ )
+ raise
+
+ if guess_mimetype:
+ ext = desc_path.suffix.lower()
+ try:
+ mimetype = readme_ext_to_content_type[ext]
+ except KeyError:
+ log.warning("Unknown extension %r for description file.", ext)
+ log.warning(" Recognised extensions: %s",
+ " ".join(readme_ext_to_content_type))
+ mimetype = None
+ else:
+ mimetype = None
+
+ return raw_desc, mimetype
+
+
+def _prep_metadata(md_sect, path):
+ """Process & verify the metadata from a config file
+
+ - Pull out the module name we're packaging.
+ - Read description-file and check that it's valid rst
+ - Convert dashes in key names to underscores
+ (e.g. home-page in config -> home_page in metadata)
+ """
+ if not set(md_sect).issuperset(metadata_required_fields):
+ missing = metadata_required_fields - set(md_sect)
+ raise ConfigError("Required fields missing: " + '\n'.join(missing))
+
+ res = LoadedConfig()
+
+ res.module = md_sect.get('module')
+ if not all([m.isidentifier() for m in res.module.split(".")]):
+ raise ConfigError("Module name %r is not a valid identifier" % res.module)
+
+ md_dict = res.metadata
+
+ # Description file
+ if 'description-file' in md_sect:
+ desc_path = md_sect.get('description-file')
+ res.referenced_files.append(desc_path)
+ desc_content, mimetype = description_from_file(desc_path, path.parent)
+ md_dict['description'] = desc_content
+ md_dict['description_content_type'] = mimetype
+
+ if 'urls' in md_sect:
+ project_urls = md_dict['project_urls'] = []
+ for label, url in sorted(md_sect.pop('urls').items()):
+ project_urls.append("{}, {}".format(label, url))
+
+ for key, value in md_sect.items():
+ if key in {'description-file', 'module'}:
+ continue
+ if key not in metadata_allowed_fields:
+ closest = difflib.get_close_matches(key, metadata_allowed_fields,
+ n=1, cutoff=0.7)
+ msg = "Unrecognised metadata key: {!r}".format(key)
+ if closest:
+ msg += " (did you mean {!r}?)".format(closest[0])
+ raise ConfigError(msg)
+
+ k2 = key.replace('-', '_')
+ md_dict[k2] = value
+ if key in metadata_list_fields:
+ if not isinstance(value, list):
+ raise ConfigError('Expected a list for {} field, found {!r}'
+ .format(key, value))
+ if not all(isinstance(a, str) for a in value):
+ raise ConfigError('Expected a list of strings for {} field'
+ .format(key))
+ elif key == 'requires-extra':
+ if not isinstance(value, dict):
+ raise ConfigError('Expected a dict for requires-extra field, found {!r}'
+ .format(value))
+ if not all(isinstance(e, list) for e in value.values()):
+ raise ConfigError('Expected a dict of lists for requires-extra field')
+ for e, reqs in value.items():
+ if not all(isinstance(a, str) for a in reqs):
+ raise ConfigError('Expected a string list for requires-extra. (extra {})'
+ .format(e))
+ else:
+ if not isinstance(value, str):
+ raise ConfigError('Expected a string for {} field, found {!r}'
+ .format(key, value))
+
+ # What we call requires in the ini file is technically requires_dist in
+ # the metadata.
+ if 'requires' in md_dict:
+ md_dict['requires_dist'] = md_dict.pop('requires')
+
+ # And what we call dist-name is name in the metadata
+ if 'dist_name' in md_dict:
+ md_dict['name'] = md_dict.pop('dist_name')
+
+ # Move dev-requires into requires-extra
+ reqs_noextra = md_dict.pop('requires_dist', [])
+ res.reqs_by_extra = md_dict.pop('requires_extra', {})
+ dev_requires = md_dict.pop('dev_requires', None)
+ if dev_requires is not None:
+ if 'dev' in res.reqs_by_extra:
+ raise ConfigError(
+ 'dev-requires occurs together with its replacement requires-extra.dev.')
+ else:
+ log.warning(
+ '"dev-requires = ..." is obsolete. Use "requires-extra = {"dev" = ...}" instead.')
+ res.reqs_by_extra['dev'] = dev_requires
+
+ # Add requires-extra requirements into requires_dist
+ md_dict['requires_dist'] = \
+ reqs_noextra + list(_expand_requires_extra(res.reqs_by_extra))
+
+ md_dict['provides_extra'] = sorted(res.reqs_by_extra.keys())
+
+ # For internal use, record the main requirements as a '.none' extra.
+ res.reqs_by_extra['.none'] = reqs_noextra
+
+ return res
+
+def _expand_requires_extra(re):
+ for extra, reqs in sorted(re.items()):
+ for req in reqs:
+ if ';' in req:
+ name, envmark = req.split(';', 1)
+ yield '{} ; extra == "{}" and ({})'.format(name, extra, envmark)
+ else:
+ yield '{} ; extra == "{}"'.format(req, extra)
+
+
+def _check_type(d, field_name, cls):
+ if not isinstance(d[field_name], cls):
+ raise ConfigError(
+ "{} field should be {}, not {}".format(field_name, cls, type(d[field_name]))
+ )
+
+def _check_list_of_str(d, field_name):
+ if not isinstance(d[field_name], list) or not all(
+ isinstance(e, str) for e in d[field_name]
+ ):
+ raise ConfigError(
+ "{} field should be a list of strings".format(field_name)
+ )
+
+def read_pep621_metadata(proj, path) -> LoadedConfig:
+ lc = LoadedConfig()
+ md_dict = lc.metadata
+
+ if 'name' not in proj:
+ raise ConfigError('name must be specified in [project] table')
+ _check_type(proj, 'name', str)
+ md_dict['name'] = proj['name']
+ lc.module = md_dict['name'].replace('-', '_')
+
+ unexpected_keys = proj.keys() - pep621_allowed_fields
+ if unexpected_keys:
+ log.warning("Unexpected names under [project]: %s", ', '.join(unexpected_keys))
+
+ if 'version' in proj:
+ _check_type(proj, 'version', str)
+ md_dict['version'] = normalise_version(proj['version'])
+ if 'description' in proj:
+ _check_type(proj, 'description', str)
+ md_dict['summary'] = proj['description']
+ if 'readme' in proj:
+ readme = proj['readme']
+ if isinstance(readme, str):
+ lc.referenced_files.append(readme)
+ desc_content, mimetype = description_from_file(readme, path.parent)
+
+ elif isinstance(readme, dict):
+ unrec_keys = set(readme.keys()) - {'text', 'file', 'content-type'}
+ if unrec_keys:
+ raise ConfigError(
+ "Unrecognised keys in [project.readme]: {}".format(unrec_keys)
+ )
+ if 'content-type' in readme:
+ mimetype = readme['content-type']
+ mtype_base = mimetype.split(';')[0].strip() # e.g. text/x-rst
+ if mtype_base not in readme_ext_to_content_type.values():
+ raise ConfigError(
+ "Unrecognised readme content-type: {!r}".format(mtype_base)
+ )
+ # TODO: validate content-type parameters (charset, md variant)?
+ else:
+ raise ConfigError(
+ "content-type field required in [project.readme] table"
+ )
+ if 'file' in readme:
+ if 'text' in readme:
+ raise ConfigError(
+ "[project.readme] should specify file or text, not both"
+ )
+ lc.referenced_files.append(readme['file'])
+ desc_content, _ = description_from_file(
+ readme['file'], path.parent, guess_mimetype=False
+ )
+ elif 'text' in readme:
+ desc_content = readme['text']
+ else:
+ raise ConfigError(
+ "file or text field required in [project.readme] table"
+ )
+ else:
+ raise ConfigError(
+ "project.readme should be a string or a table"
+ )
+
+ md_dict['description'] = desc_content
+ md_dict['description_content_type'] = mimetype
+
+ if 'requires-python' in proj:
+ md_dict['requires_python'] = proj['requires-python']
+
+ if 'license' in proj:
+ _check_type(proj, 'license', dict)
+ license_tbl = proj['license']
+ unrec_keys = set(license_tbl.keys()) - {'text', 'file'}
+ if unrec_keys:
+ raise ConfigError(
+ "Unrecognised keys in [project.license]: {}".format(unrec_keys)
+ )
+
+ # TODO: Do something with license info.
+ # The 'License' field in packaging metadata is a brief description of
+ # a license, not the full text or a file path. PEP 639 will improve on
+ # how licenses are recorded.
+ if 'file' in license_tbl:
+ if 'text' in license_tbl:
+ raise ConfigError(
+ "[project.license] should specify file or text, not both"
+ )
+ lc.referenced_files.append(license_tbl['file'])
+ elif 'text' in license_tbl:
+ pass
+ else:
+ raise ConfigError(
+ "file or text field required in [project.license] table"
+ )
+
+ if 'authors' in proj:
+ _check_type(proj, 'authors', list)
+ md_dict.update(pep621_people(proj['authors']))
+
+ if 'maintainers' in proj:
+ _check_type(proj, 'maintainers', list)
+ md_dict.update(pep621_people(proj['maintainers'], group_name='maintainer'))
+
+ if 'keywords' in proj:
+ _check_list_of_str(proj, 'keywords')
+ md_dict['keywords'] = ",".join(proj['keywords'])
+
+ if 'classifiers' in proj:
+ _check_list_of_str(proj, 'classifiers')
+ md_dict['classifiers'] = proj['classifiers']
+
+ if 'urls' in proj:
+ _check_type(proj, 'urls', dict)
+ project_urls = md_dict['project_urls'] = []
+ for label, url in sorted(proj['urls'].items()):
+ project_urls.append("{}, {}".format(label, url))
+
+ if 'entry-points' in proj:
+ _check_type(proj, 'entry-points', dict)
+ for grp in proj['entry-points'].values():
+ if not isinstance(grp, dict):
+ raise ConfigError(
+ "projects.entry-points should only contain sub-tables"
+ )
+ if not all(isinstance(k, str) for k in grp.values()):
+ raise ConfigError(
+ "[projects.entry-points.*] tables should have string values"
+ )
+ if set(proj['entry-points'].keys()) & {'console_scripts', 'gui_scripts'}:
+ raise ConfigError(
+ "Scripts should be specified in [project.scripts] or "
+ "[project.gui-scripts], not under [project.entry-points]"
+ )
+ lc.entrypoints = proj['entry-points']
+
+ if 'scripts' in proj:
+ _check_type(proj, 'scripts', dict)
+ if not all(isinstance(k, str) for k in proj['scripts'].values()):
+ raise ConfigError(
+ "[projects.scripts] table should have string values"
+ )
+ lc.entrypoints['console_scripts'] = proj['scripts']
+
+ if 'gui-scripts' in proj:
+ _check_type(proj, 'gui-scripts', dict)
+ if not all(isinstance(k, str) for k in proj['gui-scripts'].values()):
+ raise ConfigError(
+ "[projects.gui-scripts] table should have string values"
+ )
+ lc.entrypoints['gui_scripts'] = proj['gui-scripts']
+
+ if 'dependencies' in proj:
+ _check_list_of_str(proj, 'dependencies')
+ reqs_noextra = proj['dependencies']
+ else:
+ reqs_noextra = []
+
+ if 'optional-dependencies' in proj:
+ _check_type(proj, 'optional-dependencies', dict)
+ optdeps = proj['optional-dependencies']
+ if not all(isinstance(e, list) for e in optdeps.values()):
+ raise ConfigError(
+ 'Expected a dict of lists in optional-dependencies field'
+ )
+ for e, reqs in optdeps.items():
+ if not all(isinstance(a, str) for a in reqs):
+ raise ConfigError(
+ 'Expected a string list for optional-dependencies ({})'.format(e)
+ )
+
+ lc.reqs_by_extra = optdeps.copy()
+ md_dict['provides_extra'] = sorted(lc.reqs_by_extra.keys())
+
+ md_dict['requires_dist'] = \
+ reqs_noextra + list(_expand_requires_extra(lc.reqs_by_extra))
+
+ # For internal use, record the main requirements as a '.none' extra.
+ if reqs_noextra:
+ lc.reqs_by_extra['.none'] = reqs_noextra
+
+ if 'dynamic' in proj:
+ _check_list_of_str(proj, 'dynamic')
+ dynamic = set(proj['dynamic'])
+ unrec_dynamic = dynamic - {'version', 'description'}
+ if unrec_dynamic:
+ raise ConfigError(
+ "flit only supports dynamic metadata for 'version' & 'description'"
+ )
+ if dynamic.intersection(proj):
+ raise ConfigError(
+ "keys listed in project.dynamic must not be in [project] table"
+ )
+ lc.dynamic_metadata = dynamic
+
+ if ('version' not in proj) and ('version' not in lc.dynamic_metadata):
+ raise ConfigError(
+ "version must be specified under [project] or listed as a dynamic field"
+ )
+ if ('description' not in proj) and ('description' not in lc.dynamic_metadata):
+ raise ConfigError(
+ "description must be specified under [project] or listed as a dynamic field"
+ )
+
+ return lc
+
+def pep621_people(people, group_name='author') -> dict:
+ """Convert authors/maintainers from PEP 621 to core metadata fields"""
+ names, emails = [], []
+ for person in people:
+ if not isinstance(person, dict):
+ raise ConfigError("{} info must be list of dicts".format(group_name))
+ unrec_keys = set(person.keys()) - {'name', 'email'}
+ if unrec_keys:
+ raise ConfigError(
+ "Unrecognised keys in {} info: {}".format(group_name, unrec_keys)
+ )
+ if 'email' in person:
+ email = person['email']
+ if 'name' in person:
+ email = str(Address(person['name'], addr_spec=email))
+ emails.append(email)
+ elif 'name' in person:
+ names.append(person['name'])
+
+ res = {}
+ if names:
+ res[group_name] = ", ".join(names)
+ if emails:
+ res[group_name + '_email'] = ", ".join(emails)
+ return res
diff --git a/flit_core/flit_core/sdist.py b/flit_core/flit_core/sdist.py
new file mode 100644
index 0000000..f41d177
--- /dev/null
+++ b/flit_core/flit_core/sdist.py
@@ -0,0 +1,202 @@
+from collections import defaultdict
+from copy import copy
+from glob import glob
+from gzip import GzipFile
+import io
+import logging
+import os
+import os.path as osp
+from pathlib import Path
+from posixpath import join as pjoin
+import tarfile
+
+from . import common
+
+log = logging.getLogger(__name__)
+
+
+def clean_tarinfo(ti, mtime=None):
+ """Clean metadata from a TarInfo object to make it more reproducible.
+
+ - Set uid & gid to 0
+ - Set uname and gname to ""
+ - Normalise permissions to 644 or 755
+ - Set mtime if not None
+ """
+ ti = copy(ti)
+ ti.uid = 0
+ ti.gid = 0
+ ti.uname = ''
+ ti.gname = ''
+ ti.mode = common.normalize_file_permissions(ti.mode)
+ if mtime is not None:
+ ti.mtime = mtime
+ return ti
+
+
+class FilePatterns:
+ """Manage a set of file inclusion/exclusion patterns relative to basedir"""
+ def __init__(self, patterns, basedir):
+ self.basedir = basedir
+
+ self.dirs = set()
+ self.files = set()
+
+ for pattern in patterns:
+ for path in sorted(glob(osp.join(basedir, pattern), recursive=True)):
+ rel = osp.relpath(path, basedir)
+ if osp.isdir(path):
+ self.dirs.add(rel)
+ else:
+ self.files.add(rel)
+
+ def match_file(self, rel_path):
+ if rel_path in self.files:
+ return True
+
+ return any(rel_path.startswith(d + os.sep) for d in self.dirs)
+
+ def match_dir(self, rel_path):
+ if rel_path in self.dirs:
+ return True
+
+ # Check if it's a subdirectory of any directory in the list
+ return any(rel_path.startswith(d + os.sep) for d in self.dirs)
+
+
+class SdistBuilder:
+ """Builds a minimal sdist
+
+ These minimal sdists should work for PEP 517.
+ The class is extended in flit.sdist to make a more 'full fat' sdist,
+ which is what should normally be published to PyPI.
+ """
+ def __init__(self, module, metadata, cfgdir, reqs_by_extra, entrypoints,
+ extra_files, data_directory, include_patterns=(), exclude_patterns=()):
+ self.module = module
+ self.metadata = metadata
+ self.cfgdir = cfgdir
+ self.reqs_by_extra = reqs_by_extra
+ self.entrypoints = entrypoints
+ self.extra_files = extra_files
+ self.data_directory = data_directory
+ self.includes = FilePatterns(include_patterns, str(cfgdir))
+ self.excludes = FilePatterns(exclude_patterns, str(cfgdir))
+
+ @classmethod
+ def from_ini_path(cls, ini_path: Path):
+ # Local import so bootstrapping doesn't try to load toml
+ from .config import read_flit_config
+ ini_info = read_flit_config(ini_path)
+ srcdir = ini_path.parent
+ module = common.Module(ini_info.module, srcdir)
+ metadata = common.make_metadata(module, ini_info)
+ extra_files = [ini_path.name] + ini_info.referenced_files
+ return cls(
+ module, metadata, srcdir, ini_info.reqs_by_extra,
+ ini_info.entrypoints, extra_files, ini_info.data_directory,
+ ini_info.sdist_include_patterns, ini_info.sdist_exclude_patterns,
+ )
+
+ def prep_entry_points(self):
+ # Reformat entry points from dict-of-dicts to dict-of-lists
+ res = defaultdict(list)
+ for groupname, group in self.entrypoints.items():
+ for name, ep in sorted(group.items()):
+ res[groupname].append('{} = {}'.format(name, ep))
+
+ return dict(res)
+
+ def select_files(self):
+ """Pick which files from the source tree will be included in the sdist
+
+ This is overridden in flit itself to use information from a VCS to
+ include tests, docs, etc. for a 'gold standard' sdist.
+ """
+ cfgdir_s = str(self.cfgdir)
+ return [
+ osp.relpath(p, cfgdir_s) for p in self.module.iter_files()
+ ] + [
+ osp.relpath(p, cfgdir_s) for p in common.walk_data_dir(self.data_directory)
+ ] + self.extra_files
+
+ def apply_includes_excludes(self, files):
+ cfgdir_s = str(self.cfgdir)
+ files = {f for f in files if not self.excludes.match_file(f)}
+
+ for f_rel in self.includes.files:
+ if not self.excludes.match_file(f_rel):
+ files.add(f_rel)
+
+ for rel_d in self.includes.dirs:
+ for dirpath, dirs, dfiles in os.walk(osp.join(cfgdir_s, rel_d)):
+ for file in dfiles:
+ f_abs = osp.join(dirpath, file)
+ f_rel = osp.relpath(f_abs, cfgdir_s)
+ if not self.excludes.match_file(f_rel):
+ files.add(f_rel)
+
+ # Filter subdirectories before os.walk scans them
+ dirs[:] = [d for d in dirs if not self.excludes.match_dir(
+ osp.relpath(osp.join(dirpath, d), cfgdir_s)
+ )]
+
+ crucial_files = set(
+ self.extra_files + [str(self.module.file.relative_to(self.cfgdir))]
+ )
+ missing_crucial = crucial_files - files
+ if missing_crucial:
+ raise Exception("Crucial files were excluded from the sdist: {}"
+ .format(", ".join(missing_crucial)))
+
+ return sorted(files)
+
+ def add_setup_py(self, files_to_add, target_tarfile):
+ """No-op here; overridden in flit to generate setup.py"""
+ pass
+
+ @property
+ def dir_name(self):
+ return '{}-{}'.format(self.metadata.name, self.metadata.version)
+
+ def build(self, target_dir, gen_setup_py=True):
+ os.makedirs(str(target_dir), exist_ok=True)
+ target = target_dir / '{}-{}.tar.gz'.format(
+ self.metadata.name, self.metadata.version
+ )
+ source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH', '')
+ mtime = int(source_date_epoch) if source_date_epoch else None
+ gz = GzipFile(str(target), mode='wb', mtime=mtime)
+ tf = tarfile.TarFile(str(target), mode='w', fileobj=gz,
+ format=tarfile.PAX_FORMAT)
+
+ try:
+ files_to_add = self.apply_includes_excludes(self.select_files())
+
+ for relpath in files_to_add:
+ path = str(self.cfgdir / relpath)
+ ti = tf.gettarinfo(path, arcname=pjoin(self.dir_name, relpath))
+ ti = clean_tarinfo(ti, mtime)
+
+ if ti.isreg():
+ with open(path, 'rb') as f:
+ tf.addfile(ti, f)
+ else:
+ tf.addfile(ti) # Symlinks & ?
+
+ if gen_setup_py:
+ self.add_setup_py(files_to_add, tf)
+
+ stream = io.StringIO()
+ self.metadata.write_metadata_file(stream)
+ pkg_info = stream.getvalue().encode()
+ ti = tarfile.TarInfo(pjoin(self.dir_name, 'PKG-INFO'))
+ ti.size = len(pkg_info)
+ tf.addfile(ti, io.BytesIO(pkg_info))
+
+ finally:
+ tf.close()
+ gz.close()
+
+ log.info("Built sdist: %s", target)
+ return target
diff --git a/flit_core/flit_core/tests/__init__.py b/flit_core/flit_core/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/__init__.py
diff --git a/flit_core/flit_core/tests/samples/EG_README.rst b/flit_core/flit_core/tests/samples/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/flit_core/flit_core/tests/samples/bad-description-ext.toml b/flit_core/flit_core/tests/samples/bad-description-ext.toml
new file mode 100644
index 0000000..1062829
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/bad-description-ext.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "module1.py" # WRONG
diff --git a/flit_core/flit_core/tests/samples/conflicting_modules/module1.py b/flit_core/flit_core/tests/samples/conflicting_modules/module1.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/conflicting_modules/module1.py
diff --git a/flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml b/flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml
new file mode 100644
index 0000000..a38df52
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml
@@ -0,0 +1,8 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
diff --git a/flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py b/flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py
diff --git a/flit_core/flit_core/tests/samples/constructed_version/module1.py b/flit_core/flit_core/tests/samples/constructed_version/module1.py
new file mode 100644
index 0000000..5d9ec93
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/constructed_version/module1.py
@@ -0,0 +1,4 @@
+
+"""This module has a __version__ that requires runtime interpretation"""
+
+__version__ = ".".join(["1", "2", "3"])
diff --git a/flit_core/flit_core/tests/samples/constructed_version/pyproject.toml b/flit_core/flit_core/tests/samples/constructed_version/pyproject.toml
new file mode 100644
index 0000000..812b74f
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/constructed_version/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = [
+ "numpy >=1.16.0",
+]
diff --git a/flit_core/flit_core/tests/samples/extras-dev-conflict.toml b/flit_core/flit_core/tests/samples/extras-dev-conflict.toml
new file mode 100644
index 0000000..0fe249d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/extras-dev-conflict.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+dev-requires = ["apackage"]
+
+[tool.flit.metadata.requires-extra]
+dev = ["anotherpackage"]
diff --git a/flit_core/flit_core/tests/samples/extras.toml b/flit_core/flit_core/tests/samples/extras.toml
new file mode 100644
index 0000000..afdb221
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/extras.toml
@@ -0,0 +1,15 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+requires = ["toml"]
+
+[tool.flit.metadata.requires-extra]
+test = ["pytest"]
+custom = ["requests"]
diff --git a/flit_core/flit_core/tests/samples/imported_version/package1/__init__.py b/flit_core/flit_core/tests/samples/imported_version/package1/__init__.py
new file mode 100644
index 0000000..49adc42
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/imported_version/package1/__init__.py
@@ -0,0 +1,3 @@
+"""This module has a __version__ that requires a relative import"""
+
+from ._version import __version__
diff --git a/flit_core/flit_core/tests/samples/imported_version/package1/_version.py b/flit_core/flit_core/tests/samples/imported_version/package1/_version.py
new file mode 100644
index 0000000..91201fc
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/imported_version/package1/_version.py
@@ -0,0 +1 @@
+__version__ = '0.5.8'
diff --git a/flit_core/flit_core/tests/samples/imported_version/pyproject.toml b/flit_core/flit_core/tests/samples/imported_version/pyproject.toml
new file mode 100644
index 0000000..b6d44e1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/imported_version/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "package1"
+authors = [
+ {name = "Sir Röbin", email = "robin@camelot.uk"}
+]
+dynamic = ["version", "description"]
diff --git a/flit_core/flit_core/tests/samples/inclusion/LICENSES/README b/flit_core/flit_core/tests/samples/inclusion/LICENSES/README
new file mode 100644
index 0000000..63de856
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/LICENSES/README
@@ -0,0 +1,2 @@
+This directory will match the LICENSE* glob which Flit uses to add license
+files to wheel metadata.
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt
new file mode 100644
index 0000000..5f852b1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt
@@ -0,0 +1 @@
+sdists should include this (see pyproject.toml)
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/test.rst b/flit_core/flit_core/tests/samples/inclusion/doc/test.rst
new file mode 100644
index 0000000..5f852b1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/test.rst
@@ -0,0 +1 @@
+sdists should include this (see pyproject.toml)
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/test.txt b/flit_core/flit_core/tests/samples/inclusion/doc/test.txt
new file mode 100644
index 0000000..31dc06a
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/test.txt
@@ -0,0 +1 @@
+sdists should exclude this (see pyproject.toml)
diff --git a/flit_core/flit_core/tests/samples/inclusion/module1.py b/flit_core/flit_core/tests/samples/inclusion/module1.py
new file mode 100644
index 0000000..7e0d3cb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/module1.py
@@ -0,0 +1,3 @@
+"""For tests"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/inclusion/pyproject.toml b/flit_core/flit_core/tests/samples/inclusion/pyproject.toml
new file mode 100644
index 0000000..c37d44d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+build-backend = "flit.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+
+[tool.flit.sdist]
+include = ["doc"]
+exclude = ["doc/*.txt", "doc/**/*.md"]
diff --git a/flit_core/flit_core/tests/samples/invalid_version1.py b/flit_core/flit_core/tests/samples/invalid_version1.py
new file mode 100644
index 0000000..dd3268a
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/invalid_version1.py
@@ -0,0 +1,3 @@
+"""Sample module with invalid __version__ string"""
+
+__version__ = "not starting with a number" \ No newline at end of file
diff --git a/flit_core/flit_core/tests/samples/missing-description-file.toml b/flit_core/flit_core/tests/samples/missing-description-file.toml
new file mode 100644
index 0000000..00fae72
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/missing-description-file.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "missingdescriptionfile"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/missingdescriptionfile"
+description-file = "definitely-missing.rst"
diff --git a/flit_core/flit_core/tests/samples/misspelled-key.toml b/flit_core/flit_core/tests/samples/misspelled-key.toml
new file mode 100644
index 0000000..cbde9ac
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/misspelled-key.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+descryption-file = "my-description.rst" # Deliberate typo for test
+home-page = "http://github.com/sirrobin/package1"
diff --git a/flit_core/flit_core/tests/samples/module1-pkg.ini b/flit_core/flit_core/tests/samples/module1-pkg.ini
new file mode 100644
index 0000000..9bbfc4e
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module1-pkg.ini
@@ -0,0 +1,5 @@
+[metadata]
+module=module1
+author=Sir Robin
+author-email=robin@camelot.uk
+home-page=http://github.com/sirrobin/module1
diff --git a/flit_core/flit_core/tests/samples/module1-pkg.toml b/flit_core/flit_core/tests/samples/module1-pkg.toml
new file mode 100644
index 0000000..740ec87
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module1-pkg.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.urls]
+Documentation = "https://example.com/module1"
diff --git a/flit_core/flit_core/tests/samples/module1.py b/flit_core/flit_core/tests/samples/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/module2.py b/flit_core/flit_core/tests/samples/module2.py
new file mode 100644
index 0000000..0f36679
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module2.py
@@ -0,0 +1,10 @@
+"""
+Docstring formatted like this.
+"""
+
+a = {}
+# An assignment to a subscript (a['test']) broke introspection
+# https://github.com/pypa/flit/issues/343
+a['test'] = 6
+
+__version__ = '7.0'
diff --git a/flit_core/flit_core/tests/samples/moduleunimportable.py b/flit_core/flit_core/tests/samples/moduleunimportable.py
new file mode 100644
index 0000000..147d26e
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/moduleunimportable.py
@@ -0,0 +1,8 @@
+
+"""
+A sample unimportable module
+"""
+
+raise ImportError()
+
+__version__ = "0.1"
diff --git a/flit_core/flit_core/tests/samples/moduleunimportabledouble.py b/flit_core/flit_core/tests/samples/moduleunimportabledouble.py
new file mode 100644
index 0000000..42d51f3
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/moduleunimportabledouble.py
@@ -0,0 +1,8 @@
+
+"""
+A sample unimportable module with double assignment
+"""
+
+raise ImportError()
+
+VERSION = __version__ = "0.1"
diff --git a/flit_core/flit_core/tests/samples/my-description.rst b/flit_core/flit_core/tests/samples/my-description.rst
new file mode 100644
index 0000000..623cb1d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/my-description.rst
@@ -0,0 +1 @@
+Sample description for test.
diff --git a/flit_core/flit_core/tests/samples/no_docstring-pkg.toml b/flit_core/flit_core/tests/samples/no_docstring-pkg.toml
new file mode 100644
index 0000000..b68827f
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/no_docstring-pkg.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "no_docstring"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/no_docstring"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.urls]
+Documentation = "https://example.com/no_docstring"
diff --git a/flit_core/flit_core/tests/samples/no_docstring.py b/flit_core/flit_core/tests/samples/no_docstring.py
new file mode 100644
index 0000000..29524eb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/no_docstring.py
@@ -0,0 +1 @@
+__version__ = '7.0'
diff --git a/flit_core/flit_core/tests/samples/normalization/my_python_module.py b/flit_core/flit_core/tests/samples/normalization/my_python_module.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/normalization/my_python_module.py
diff --git a/flit_core/flit_core/tests/samples/normalization/pyproject.toml b/flit_core/flit_core/tests/samples/normalization/pyproject.toml
new file mode 100644
index 0000000..c32e4a1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/normalization/pyproject.toml
@@ -0,0 +1,14 @@
+[build-system]
+requires = ["flit_core >=3.8,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "my-python-module"
+version = "0.0.1"
+description = "Hyphenated package name, infered import name"
+authors = [
+ {name = "Sir Robin", email = "robin@camelot.uk"}
+]
+
+[project.urls]
+homepage = "http://github.com/me/python-module"
diff --git a/flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst b/flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py b/flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py
new file mode 100644
index 0000000..445afbb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py
@@ -0,0 +1,8 @@
+"""
+==================
+ns1.pkg
+==================
+"""
+
+__version__ = '0.1'
+
diff --git a/flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml b/flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml
new file mode 100644
index 0000000..acbabb1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=3.5,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "ns1.pkg"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
diff --git a/flit_core/flit_core/tests/samples/package1.toml b/flit_core/flit_core/tests/samples/package1.toml
new file mode 100644
index 0000000..ca12080
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+description-file = "my-description.rst"
+home-page = "http://github.com/sirrobin/package1"
+
+[scripts]
+pkg_script = "package1:main"
diff --git a/flit_core/flit_core/tests/samples/package1/__init__.py b/flit_core/flit_core/tests/samples/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/flit_core/flit_core/tests/samples/package1/data_dir/foo.sh b/flit_core/flit_core/tests/samples/package1/data_dir/foo.sh
new file mode 100644
index 0000000..92abcfb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/data_dir/foo.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo "Example data file"
diff --git a/flit_core/flit_core/tests/samples/package1/foo.py b/flit_core/flit_core/tests/samples/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/flit_core/flit_core/tests/samples/package1/subpkg/__init__.py b/flit_core/flit_core/tests/samples/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/subpkg/__init__.py
diff --git a/flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json b/flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json
new file mode 100644
index 0000000..f77d03c
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json
@@ -0,0 +1 @@
+{"example": true}
diff --git a/flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py b/flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py
diff --git a/flit_core/flit_core/tests/samples/pep517/LICENSE b/flit_core/flit_core/tests/samples/pep517/LICENSE
new file mode 100644
index 0000000..7f5c194
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/LICENSE
@@ -0,0 +1 @@
+This file should be added to wheels
diff --git a/flit_core/flit_core/tests/samples/pep517/README.rst b/flit_core/flit_core/tests/samples/pep517/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/pep517/module1.py b/flit_core/flit_core/tests/samples/pep517/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/pep517/pyproject.toml b/flit_core/flit_core/tests/samples/pep517/pyproject.toml
new file mode 100644
index 0000000..b6cebac
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/pyproject.toml
@@ -0,0 +1,17 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "README.rst"
+requires = [
+ "requests >= 2.18",
+ "docutils",
+]
+
+[tool.flit.entrypoints.flit_test_example]
+foo = "module1:main"
diff --git a/flit_core/flit_core/tests/samples/pep621/LICENSE b/flit_core/flit_core/tests/samples/pep621/LICENSE
new file mode 100644
index 0000000..7f5c194
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/LICENSE
@@ -0,0 +1 @@
+This file should be added to wheels
diff --git a/flit_core/flit_core/tests/samples/pep621/README.rst b/flit_core/flit_core/tests/samples/pep621/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/pep621/module1a.py b/flit_core/flit_core/tests/samples/pep621/module1a.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/module1a.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/pep621/pyproject.toml b/flit_core/flit_core/tests/samples/pep621/pyproject.toml
new file mode 100644
index 0000000..72a85d0
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/pyproject.toml
@@ -0,0 +1,39 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "module1"
+authors = [
+ {name = "Sir Röbin", email = "robin@camelot.uk"}
+]
+maintainers = [
+ {name = "Sir Galahad"}
+]
+readme = "README.rst"
+license = {file = "LICENSE"}
+requires-python = ">=3.7"
+dependencies = [
+ "requests >= 2.18",
+ "docutils",
+]
+keywords = ["example", "test"]
+dynamic = [
+ "version",
+ "description",
+]
+
+[project.optional-dependencies]
+test = [
+ "pytest",
+ "mock; python_version<'3.6'"
+]
+
+[project.urls]
+homepage = "http://github.com/sirrobin/module1"
+
+[project.entry-points.flit_test_example]
+foo = "module1:main"
+
+[tool.flit.module]
+name = "module1a"
diff --git a/flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst b/flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py b/flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py
diff --git a/flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml b/flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml
new file mode 100644
index 0000000..0b579f3
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml
@@ -0,0 +1,28 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "module1"
+version = "0.03"
+description = "Statically specified description"
+authors = [
+ {name = "Sir Robin", email = "robin@camelot.uk"}
+]
+readme = {file = "README.rst", content-type = "text/x-rst"}
+classifiers = [
+ "Topic :: Internet :: WWW/HTTP",
+]
+dependencies = [
+ "requests >= 2.18",
+ "docutils",
+] # N.B. Using this to check behaviour with dependencies but no optional deps
+
+[project.urls]
+homepage = "http://github.com/sirrobin/module1"
+
+[project.scripts]
+foo = "module1:main"
+
+[project.gui-scripts]
+foo-gui = "module1:main"
diff --git a/flit_core/flit_core/tests/samples/requires-dev.toml b/flit_core/flit_core/tests/samples/requires-dev.toml
new file mode 100644
index 0000000..46e3170
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-dev.toml
@@ -0,0 +1,11 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+# This should generate a warning tell you to use requires-extra.dev
+dev-requires = ["apackage"]
diff --git a/flit_core/flit_core/tests/samples/requires-envmark.toml b/flit_core/flit_core/tests/samples/requires-envmark.toml
new file mode 100644
index 0000000..e97c5f0
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-envmark.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = [
+ "requests",
+ "pathlib2; python_version == '2.7'",
+]
diff --git a/flit_core/flit_core/tests/samples/requires-extra-envmark.toml b/flit_core/flit_core/tests/samples/requires-extra-envmark.toml
new file mode 100644
index 0000000..ac49cb0
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-extra-envmark.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.requires-extra]
+test = ["pathlib2; python_version == \"2.7\""]
diff --git a/flit_core/flit_core/tests/samples/requires-requests.toml b/flit_core/flit_core/tests/samples/requires-requests.toml
new file mode 100644
index 0000000..bf26ac5
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-requests.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+requires = ["requests"]
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/LICENSE b/flit_core/flit_core/tests/samples/with_data_dir/LICENSE
new file mode 100644
index 0000000..7f5c194
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/LICENSE
@@ -0,0 +1 @@
+This file should be added to wheels
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/README.rst b/flit_core/flit_core/tests/samples/with_data_dir/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.1 b/flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.1
new file mode 100644
index 0000000..c12128d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.1
@@ -0,0 +1 @@
+Example data file
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/module1.py b/flit_core/flit_core/tests/samples/with_data_dir/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml b/flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml
new file mode 100644
index 0000000..84d165e
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml
@@ -0,0 +1,26 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "module1"
+authors = [
+ {name = "Sir Röbin", email = "robin@camelot.uk"}
+]
+readme = "README.rst"
+license = {file = "LICENSE"}
+requires-python = ">=3.7"
+dependencies = [
+ "requests >= 2.18",
+ "docutils",
+]
+dynamic = [
+ "version",
+ "description",
+]
+
+[project.scripts]
+foo = "module1:main"
+
+[tool.flit.external-data]
+directory = "data"
diff --git a/flit_core/flit_core/tests/test_build_thyself.py b/flit_core/flit_core/tests/test_build_thyself.py
new file mode 100644
index 0000000..ad15819
--- /dev/null
+++ b/flit_core/flit_core/tests/test_build_thyself.py
@@ -0,0 +1,57 @@
+"""Tests of flit_core building itself"""
+import os
+import os.path as osp
+import pytest
+import tarfile
+from testpath import assert_isdir, assert_isfile
+import zipfile
+
+from flit_core import buildapi
+
+@pytest.fixture()
+def cwd_project():
+ proj_dir = osp.dirname(osp.dirname(osp.abspath(buildapi.__file__)))
+ if not osp.isfile(osp.join(proj_dir, 'pyproject.toml')):
+ pytest.skip("need flit_core source directory")
+
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(proj_dir)
+ yield
+ finally:
+ os.chdir(old_cwd)
+
+
+def test_prepare_metadata(tmp_path, cwd_project):
+ tmp_path = str(tmp_path)
+ dist_info = buildapi.prepare_metadata_for_build_wheel(tmp_path)
+
+ assert dist_info.endswith('.dist-info')
+ assert dist_info.startswith('flit_core')
+ dist_info = osp.join(tmp_path, dist_info)
+ assert_isdir(dist_info)
+
+ assert_isfile(osp.join(dist_info, 'WHEEL'))
+ assert_isfile(osp.join(dist_info, 'METADATA'))
+
+
+def test_wheel(tmp_path, cwd_project):
+ tmp_path = str(tmp_path)
+ filename = buildapi.build_wheel(tmp_path)
+
+ assert filename.endswith('.whl')
+ assert filename.startswith('flit_core')
+ path = osp.join(tmp_path, filename)
+ assert_isfile(path)
+ assert zipfile.is_zipfile(path)
+
+
+def test_sdist(tmp_path, cwd_project):
+ tmp_path = str(tmp_path)
+ filename = buildapi.build_sdist(tmp_path)
+
+ assert filename.endswith('.tar.gz')
+ assert filename.startswith('flit_core')
+ path = osp.join(tmp_path, filename)
+ assert_isfile(path)
+ assert tarfile.is_tarfile(path)
diff --git a/flit_core/flit_core/tests/test_buildapi.py b/flit_core/flit_core/tests/test_buildapi.py
new file mode 100644
index 0000000..3e621e6
--- /dev/null
+++ b/flit_core/flit_core/tests/test_buildapi.py
@@ -0,0 +1,93 @@
+from contextlib import contextmanager
+import os
+import os.path as osp
+import tarfile
+from testpath import assert_isfile, assert_isdir
+from testpath.tempdir import TemporaryDirectory
+import zipfile
+
+from flit_core import buildapi
+
+samples_dir = osp.join(osp.dirname(__file__), 'samples')
+
+@contextmanager
+def cwd(directory):
+ prev = os.getcwd()
+ os.chdir(directory)
+ try:
+ yield
+ finally:
+ os.chdir(prev)
+
+def test_get_build_requires():
+ # This module can be inspected (for docstring & __version__) without
+ # importing it, so there are no build dependencies.
+ with cwd(osp.join(samples_dir,'pep517')):
+ assert buildapi.get_requires_for_build_wheel() == []
+ assert buildapi.get_requires_for_build_editable() == []
+ assert buildapi.get_requires_for_build_sdist() == []
+
+def test_get_build_requires_pep621_nodynamic():
+ # This module isn't inspected because version & description are specified
+ # as static metadata in pyproject.toml, so there are no build dependencies
+ with cwd(osp.join(samples_dir, 'pep621_nodynamic')):
+ assert buildapi.get_requires_for_build_wheel() == []
+ assert buildapi.get_requires_for_build_editable() == []
+ assert buildapi.get_requires_for_build_sdist() == []
+
+def test_get_build_requires_import():
+ # This one has to be imported, so its runtime dependencies are also
+ # build dependencies.
+ expected = ["numpy >=1.16.0"]
+ with cwd(osp.join(samples_dir, 'constructed_version')):
+ assert buildapi.get_requires_for_build_wheel() == expected
+ assert buildapi.get_requires_for_build_editable() == expected
+ assert buildapi.get_requires_for_build_sdist() == expected
+
+def test_build_wheel():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ filename = buildapi.build_wheel(td)
+ assert filename.endswith('.whl'), filename
+ assert_isfile(osp.join(td, filename))
+ assert zipfile.is_zipfile(osp.join(td, filename))
+ with zipfile.ZipFile(osp.join(td, filename)) as zip:
+ assert "module1.py" in zip.namelist()
+ assert "module1.pth" not in zip.namelist()
+
+def test_build_wheel_pep621():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir, 'pep621')):
+ filename = buildapi.build_wheel(td)
+ assert filename.endswith('.whl'), filename
+ assert_isfile(osp.join(td, filename))
+ assert zipfile.is_zipfile(osp.join(td, filename))
+
+def test_build_editable():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ filename = buildapi.build_editable(td)
+ assert filename.endswith('.whl'), filename
+ assert_isfile(osp.join(td, filename))
+ assert zipfile.is_zipfile(osp.join(td, filename))
+ with zipfile.ZipFile(osp.join(td, filename)) as zip:
+ assert "module1.py" not in zip.namelist()
+ assert "module1.pth" in zip.namelist()
+
+def test_build_sdist():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ filename = buildapi.build_sdist(td)
+ assert filename.endswith('.tar.gz'), filename
+ assert_isfile(osp.join(td, filename))
+ assert tarfile.is_tarfile(osp.join(td, filename))
+
+def test_prepare_metadata_for_build_wheel():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ dirname = buildapi.prepare_metadata_for_build_wheel(td)
+ assert dirname.endswith('.dist-info'), dirname
+ assert_isdir(osp.join(td, dirname))
+ assert_isfile(osp.join(td, dirname, 'METADATA'))
+
+def test_prepare_metadata_for_build_editable():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ dirname = buildapi.prepare_metadata_for_build_editable(td)
+ assert dirname.endswith('.dist-info'), dirname
+ assert_isdir(osp.join(td, dirname))
+ assert_isfile(osp.join(td, dirname, 'METADATA'))
diff --git a/flit_core/flit_core/tests/test_common.py b/flit_core/flit_core/tests/test_common.py
new file mode 100644
index 0000000..b6d6290
--- /dev/null
+++ b/flit_core/flit_core/tests/test_common.py
@@ -0,0 +1,158 @@
+import email.parser
+import email.policy
+from io import StringIO
+from pathlib import Path
+import pytest
+from unittest import TestCase
+
+from flit_core import config
+from flit_core.common import (
+ Module, get_info_from_module, InvalidVersion, NoVersionError, check_version,
+ normalize_file_permissions, Metadata, make_metadata,
+)
+
+samples_dir = Path(__file__).parent / 'samples'
+
+class ModuleTests(TestCase):
+ def test_ns_package_importable(self):
+ i = Module('ns1.pkg', samples_dir / 'ns1-pkg')
+ assert i.path == Path(samples_dir, 'ns1-pkg', 'ns1', 'pkg')
+ assert i.file == Path(samples_dir, 'ns1-pkg', 'ns1', 'pkg', '__init__.py')
+ assert i.is_package
+
+ assert i.in_namespace_package
+ assert i.namespace_package_name == 'ns1'
+
+ def test_package_importable(self):
+ i = Module('package1', samples_dir)
+ assert i.path == samples_dir / 'package1'
+ assert i.file == samples_dir / 'package1' / '__init__.py'
+ assert i.is_package
+
+ def test_module_importable(self):
+ i = Module('module1', samples_dir)
+ assert i.path == samples_dir / 'module1.py'
+ assert not i.is_package
+
+ def test_missing_name(self):
+ with self.assertRaises(ValueError):
+ i = Module('doesnt_exist', samples_dir)
+
+ def test_conflicting_modules(self):
+ with pytest.raises(ValueError, match="Multiple"):
+ Module('module1', samples_dir / 'conflicting_modules')
+
+ def test_get_info_from_module(self):
+ info = get_info_from_module(Module('module1', samples_dir))
+ self.assertEqual(info, {'summary': 'Example module',
+ 'version': '0.1'}
+ )
+
+ info = get_info_from_module(Module('module2', samples_dir))
+ self.assertEqual(info, {'summary': 'Docstring formatted like this.',
+ 'version': '7.0'}
+ )
+
+ pkg1 = Module('package1', samples_dir)
+ info = get_info_from_module(pkg1)
+ self.assertEqual(info, {'summary': 'A sample package',
+ 'version': '0.1'}
+ )
+ info = get_info_from_module(pkg1, for_fields=['version'])
+ self.assertEqual(info, {'version': '0.1'})
+ info = get_info_from_module(pkg1, for_fields=['description'])
+ self.assertEqual(info, {'summary': 'A sample package'})
+ info = get_info_from_module(pkg1, for_fields=[])
+ self.assertEqual(info, {})
+
+ info = get_info_from_module(Module('moduleunimportable', samples_dir))
+ self.assertEqual(info, {'summary': 'A sample unimportable module',
+ 'version': '0.1'}
+ )
+
+ info = get_info_from_module(Module('moduleunimportabledouble', samples_dir))
+ self.assertEqual(info, {'summary': 'A sample unimportable module with double assignment',
+ 'version': '0.1'}
+ )
+
+ info = get_info_from_module(Module('module1', samples_dir / 'constructed_version'))
+ self.assertEqual(info, {'summary': 'This module has a __version__ that requires runtime interpretation',
+ 'version': '1.2.3'}
+ )
+
+ info = get_info_from_module(Module('package1', samples_dir / 'imported_version'))
+ self.assertEqual(info, {'summary': 'This module has a __version__ that requires a relative import',
+ 'version': '0.5.8'}
+ )
+
+ with self.assertRaises(InvalidVersion):
+ get_info_from_module(Module('invalid_version1', samples_dir))
+
+ def test_version_raise(self):
+ with pytest.raises(InvalidVersion):
+ check_version('a.1.0.beta0')
+
+ with pytest.raises(InvalidVersion):
+ check_version('3!')
+
+ with pytest.raises(InvalidVersion):
+ check_version((1, 2))
+
+ with pytest.raises(NoVersionError):
+ check_version(None)
+
+ assert check_version('4.1.0beta1') == '4.1.0b1'
+ assert check_version('v1.2') == '1.2'
+
+def test_normalize_file_permissions():
+ assert normalize_file_permissions(0o100664) == 0o100644 # regular file
+ assert normalize_file_permissions(0o40775) == 0o40755 # directory
+
+@pytest.mark.parametrize(
+ ("requires_python", "expected_result"),
+ [
+ ("", True),
+ (">2.7", True),
+ ("3", False),
+ (">= 3.7", False),
+ ("<4, > 3.2", False),
+ (">3.4", False),
+ (">=2.7, !=3.0.*, !=3.1.*, !=3.2.*", True),
+ ("== 3.9", False),
+ ("~=2.7", True),
+ ("~=3.9", False),
+ ],
+)
+def test_supports_py2(requires_python, expected_result):
+ metadata = object.__new__(Metadata)
+ metadata.requires_python = requires_python
+ result = metadata.supports_py2
+ assert result == expected_result
+
+def test_make_metadata():
+ project_dir = samples_dir / 'pep621_nodynamic'
+ ini_info = config.read_flit_config(project_dir / 'pyproject.toml')
+ module = Module(ini_info.module, project_dir)
+ print(module.file)
+ md = make_metadata(module, ini_info)
+ assert md.version == '0.3'
+ assert md.summary == "Statically specified description"
+
+def test_metadata_multiline(tmp_path):
+ d = {
+ 'name': 'foo',
+ 'version': '1.0',
+ # Example from: https://packaging.python.org/specifications/core-metadata/#author
+ 'author': ('C. Schultz, Universal Features Syndicate\n'
+ 'Los Angeles, CA <cschultz@peanuts.example.com>'),
+ }
+ md = Metadata(d)
+ sio = StringIO()
+ md.write_metadata_file(sio)
+ sio.seek(0)
+
+ msg = email.parser.Parser(policy=email.policy.compat32).parse(sio)
+ assert msg['Name'] == d['name']
+ assert msg['Version'] == d['version']
+ assert [l.lstrip() for l in msg['Author'].splitlines()] == d['author'].splitlines()
+ assert not msg.defects
diff --git a/flit_core/flit_core/tests/test_config.py b/flit_core/flit_core/tests/test_config.py
new file mode 100644
index 0000000..eafb7e9
--- /dev/null
+++ b/flit_core/flit_core/tests/test_config.py
@@ -0,0 +1,165 @@
+import logging
+from pathlib import Path
+import pytest
+
+from flit_core import config
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_flatten_entrypoints():
+ r = config.flatten_entrypoints({'a': {'b': {'c': 'd'}, 'e': {'f': {'g': 'h'}}, 'i': 'j'}})
+ assert r == {'a': {'i': 'j'}, 'a.b': {'c': 'd'}, 'a.e.f': {'g': 'h'}}
+
+def test_load_toml():
+ inf = config.read_flit_config(samples_dir / 'module1-pkg.toml')
+ assert inf.module == 'module1'
+ assert inf.metadata['home_page'] == 'http://github.com/sirrobin/module1'
+
+def test_load_toml_ns():
+ inf = config.read_flit_config(samples_dir / 'ns1-pkg' / 'pyproject.toml')
+ assert inf.module == 'ns1.pkg'
+ assert inf.metadata['home_page'] == 'http://github.com/sirrobin/module1'
+
+def test_load_normalization():
+ inf = config.read_flit_config(samples_dir / 'normalization' / 'pyproject.toml')
+ assert inf.module == 'my_python_module'
+ assert inf.metadata['name'] == 'my-python-module'
+
+def test_load_pep621():
+ inf = config.read_flit_config(samples_dir / 'pep621' / 'pyproject.toml')
+ assert inf.module == 'module1a'
+ assert inf.metadata['name'] == 'module1'
+ assert inf.metadata['description_content_type'] == 'text/x-rst'
+ # Remove all whitespace from requirements so we don't check exact format:
+ assert {r.replace(' ', '') for r in inf.metadata['requires_dist']} == {
+ 'docutils',
+ 'requests>=2.18',
+ 'pytest;extra=="test"', # from [project.optional-dependencies]
+ 'mock;extra=="test"and(python_version<\'3.6\')',
+ }
+ assert inf.metadata['author_email'] == "Sir Röbin <robin@camelot.uk>"
+ assert inf.entrypoints['flit_test_example']['foo'] == 'module1:main'
+ assert set(inf.dynamic_metadata) == {'version', 'description'}
+
+def test_load_pep621_nodynamic():
+ inf = config.read_flit_config(samples_dir / 'pep621_nodynamic' / 'pyproject.toml')
+ assert inf.module == 'module1'
+ assert inf.metadata['name'] == 'module1'
+ assert inf.metadata['version'] == '0.3'
+ assert inf.metadata['summary'] == 'Statically specified description'
+ assert set(inf.dynamic_metadata) == set()
+
+ # Filling reqs_by_extra when dependencies were specified but no optional
+ # dependencies was a bug.
+ assert inf.reqs_by_extra == {'.none': ['requests >= 2.18', 'docutils']}
+
+def test_misspelled_key():
+ with pytest.raises(config.ConfigError) as e_info:
+ config.read_flit_config(samples_dir / 'misspelled-key.toml')
+
+ assert 'description-file' in str(e_info.value)
+
+def test_description_file():
+ info = config.read_flit_config(samples_dir / 'package1.toml')
+ assert info.metadata['description'] == \
+ "Sample description for test.\n"
+ assert info.metadata['description_content_type'] == 'text/x-rst'
+
+def test_missing_description_file():
+ with pytest.raises(config.ConfigError, match=r"Description file .* does not exist"):
+ config.read_flit_config(samples_dir / 'missing-description-file.toml')
+
+def test_bad_description_extension(caplog):
+ info = config.read_flit_config(samples_dir / 'bad-description-ext.toml')
+ assert info.metadata['description_content_type'] is None
+ assert any((r.levelno == logging.WARN and "Unknown extension" in r.msg)
+ for r in caplog.records)
+
+def test_extras():
+ info = config.read_flit_config(samples_dir / 'extras.toml')
+ requires_dist = set(info.metadata['requires_dist'])
+ assert requires_dist == {
+ 'toml',
+ 'pytest ; extra == "test"',
+ 'requests ; extra == "custom"',
+ }
+ assert set(info.metadata['provides_extra']) == {'test', 'custom'}
+
+def test_extras_dev_conflict():
+ with pytest.raises(config.ConfigError, match=r'dev-requires'):
+ config.read_flit_config(samples_dir / 'extras-dev-conflict.toml')
+
+def test_extras_dev_warning(caplog):
+ info = config.read_flit_config(samples_dir / 'requires-dev.toml')
+ assert '"dev-requires = ..." is obsolete' in caplog.text
+ assert set(info.metadata['requires_dist']) == {'apackage ; extra == "dev"'}
+
+def test_requires_extra_env_marker():
+ info = config.read_flit_config(samples_dir / 'requires-extra-envmark.toml')
+ assert info.metadata['requires_dist'][0].startswith('pathlib2 ;')
+
+@pytest.mark.parametrize(('erroneous', 'match'), [
+ ({'requires-extra': None}, r'Expected a dict for requires-extra field'),
+ ({'requires-extra': dict(dev=None)}, r'Expected a dict of lists for requires-extra field'),
+ ({'requires-extra': dict(dev=[1])}, r'Expected a string list for requires-extra'),
+])
+def test_faulty_requires_extra(erroneous, match):
+ metadata = {'module': 'mymod', 'author': '', 'author-email': ''}
+ with pytest.raises(config.ConfigError, match=match):
+ config._prep_metadata(dict(metadata, **erroneous), None)
+
+@pytest.mark.parametrize(('path', 'err_match'), [
+ ('../bar', 'out of the directory'),
+ ('foo/../../bar', 'out of the directory'),
+ ('/home', 'absolute path'),
+ ('foo:bar', 'bad character'),
+])
+def test_bad_include_paths(path, err_match):
+ toml_cfg = {'tool': {'flit': {
+ 'metadata': {'module': 'xyz', 'author': 'nobody'},
+ 'sdist': {'include': [path]}
+ }}}
+
+ with pytest.raises(config.ConfigError, match=err_match):
+ config.prep_toml_config(toml_cfg, None)
+
+@pytest.mark.parametrize(('proj_bad', 'err_match'), [
+ ({'version': 1}, r'\bstr\b'),
+ ({'license': {'fromage': 2}}, '[Uu]nrecognised'),
+ ({'license': {'file': 'LICENSE', 'text': 'xyz'}}, 'both'),
+ ({'license': {}}, 'required'),
+ ({'keywords': 'foo'}, 'list'),
+ ({'keywords': ['foo', 7]}, 'strings'),
+ ({'entry-points': {'foo': 'module1:main'}}, 'entry-point.*tables'),
+ ({'entry-points': {'group': {'foo': 7}}}, 'entry-point.*string'),
+ ({'entry-points': {'gui_scripts': {'foo': 'a:b'}}}, r'\[project\.gui-scripts\]'),
+ ({'scripts': {'foo': 7}}, 'scripts.*string'),
+ ({'gui-scripts': {'foo': 7}}, 'gui-scripts.*string'),
+ ({'optional-dependencies': {'test': 'requests'}}, 'list.*optional-dep'),
+ ({'optional-dependencies': {'test': [7]}}, 'string.*optional-dep'),
+ ({'dynamic': ['classifiers']}, 'dynamic'),
+ ({'dynamic': ['version']}, r'dynamic.*\[project\]'),
+ ({'authors': ['thomas']}, r'author.*\bdict'),
+ ({'maintainers': [{'title': 'Dr'}]}, r'maintainer.*title'),
+])
+def test_bad_pep621_info(proj_bad, err_match):
+ proj = {'name': 'module1', 'version': '1.0', 'description': 'x'}
+ proj.update(proj_bad)
+ with pytest.raises(config.ConfigError, match=err_match):
+ config.read_pep621_metadata(proj, samples_dir / 'pep621')
+
+@pytest.mark.parametrize(('readme', 'err_match'), [
+ ({'file': 'README.rst'}, 'required'),
+ ({'file': 'README.rst', 'content-type': 'text/x-python'}, 'content-type'),
+ ('/opt/README.rst', 'relative'),
+ ({'file': 'README.rst', 'text': '', 'content-type': 'text/x-rst'}, 'both'),
+ ({'content-type': 'text/x-rst'}, 'required'),
+ ({'file': 'README.rst', 'content-type': 'text/x-rst', 'a': 'b'}, '[Uu]nrecognised'),
+ (5, r'readme.*string'),
+])
+def test_bad_pep621_readme(readme, err_match):
+ proj = {
+ 'name': 'module1', 'version': '1.0', 'description': 'x', 'readme': readme
+ }
+ with pytest.raises(config.ConfigError, match=err_match):
+ config.read_pep621_metadata(proj, samples_dir / 'pep621')
diff --git a/flit_core/flit_core/tests/test_sdist.py b/flit_core/flit_core/tests/test_sdist.py
new file mode 100644
index 0000000..cffea02
--- /dev/null
+++ b/flit_core/flit_core/tests/test_sdist.py
@@ -0,0 +1,61 @@
+from io import BytesIO
+import os.path as osp
+from pathlib import Path
+import tarfile
+from testpath import assert_isfile
+
+from flit_core import sdist
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_make_sdist(tmp_path):
+ # Smoke test of making a complete sdist
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1.toml')
+ builder.build(tmp_path)
+ assert_isfile(tmp_path / 'package1-0.1.tar.gz')
+
+
+def test_make_sdist_pep621(tmp_path):
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'pep621' / 'pyproject.toml')
+ path = builder.build(tmp_path)
+ assert path == tmp_path / 'module1-0.1.tar.gz'
+ assert_isfile(path)
+
+
+def test_make_sdist_pep621_nodynamic(tmp_path):
+ builder = sdist.SdistBuilder.from_ini_path(
+ samples_dir / 'pep621_nodynamic' / 'pyproject.toml'
+ )
+ path = builder.build(tmp_path)
+ assert path == tmp_path / 'module1-0.3.tar.gz'
+ assert_isfile(path)
+
+
+def test_clean_tarinfo():
+ with tarfile.open(mode='w', fileobj=BytesIO()) as tf:
+ ti = tf.gettarinfo(str(samples_dir / 'module1.py'))
+ cleaned = sdist.clean_tarinfo(ti, mtime=42)
+ assert cleaned.uid == 0
+ assert cleaned.uname == ''
+ assert cleaned.mtime == 42
+
+
+def test_include_exclude():
+ builder = sdist.SdistBuilder.from_ini_path(
+ samples_dir / 'inclusion' / 'pyproject.toml'
+ )
+ files = builder.apply_includes_excludes(builder.select_files())
+
+ assert osp.join('doc', 'test.rst') in files
+ assert osp.join('doc', 'test.txt') not in files
+ assert osp.join('doc', 'subdir', 'test.txt') in files
+ assert osp.join('doc', 'subdir', 'subsubdir', 'test.md') not in files
+
+
+def test_data_dir():
+ builder = sdist.SdistBuilder.from_ini_path(
+ samples_dir / 'with_data_dir' / 'pyproject.toml'
+ )
+ files = builder.apply_includes_excludes(builder.select_files())
+
+ assert osp.join('data', 'share', 'man', 'man1', 'foo.1') in files
diff --git a/flit_core/flit_core/tests/test_versionno.py b/flit_core/flit_core/tests/test_versionno.py
new file mode 100644
index 0000000..b02792b
--- /dev/null
+++ b/flit_core/flit_core/tests/test_versionno.py
@@ -0,0 +1,40 @@
+import pytest
+
+from flit_core.common import InvalidVersion
+from flit_core.versionno import normalise_version
+
+def test_normalise_version():
+ nv = normalise_version
+ assert nv('4.3.1') == '4.3.1'
+ assert nv('1.0b2') == '1.0b2'
+ assert nv('2!1.3') == '2!1.3'
+
+ # Prereleases
+ assert nv('1.0B2') == '1.0b2'
+ assert nv('1.0.b2') == '1.0b2'
+ assert nv('1.0beta2') == '1.0b2'
+ assert nv('1.01beta002') == '1.1b2'
+ assert nv('1.0-preview2') == '1.0rc2'
+ assert nv('1.0_c') == '1.0rc0'
+
+ # Post releases
+ assert nv('1.0post-2') == '1.0.post2'
+ assert nv('1.0post') == '1.0.post0'
+ assert nv('1.0-rev3') == '1.0.post3'
+ assert nv('1.0-2') == '1.0.post2'
+
+ # Development versions
+ assert nv('1.0dev-2') == '1.0.dev2'
+ assert nv('1.0dev') == '1.0.dev0'
+ assert nv('1.0-dev3') == '1.0.dev3'
+
+ assert nv('1.0+ubuntu-01') == '1.0+ubuntu.1'
+ assert nv('v1.3-pre2') == '1.3rc2'
+ assert nv(' 1.2.5.6\t') == '1.2.5.6'
+ assert nv('1.0-alpha3-post02+ubuntu_xenial_5') == '1.0a3.post2+ubuntu.xenial.5'
+
+ with pytest.raises(InvalidVersion):
+ nv('3!')
+
+ with pytest.raises(InvalidVersion):
+ nv('abc')
diff --git a/flit_core/flit_core/tests/test_wheel.py b/flit_core/flit_core/tests/test_wheel.py
new file mode 100644
index 0000000..310f9c6
--- /dev/null
+++ b/flit_core/flit_core/tests/test_wheel.py
@@ -0,0 +1,47 @@
+from pathlib import Path
+from zipfile import ZipFile
+
+from testpath import assert_isfile
+
+from flit_core.wheel import make_wheel_in, main
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_licenses_dir(tmp_path):
+ # Smoketest for https://github.com/pypa/flit/issues/399
+ info = make_wheel_in(samples_dir / 'inclusion' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+
+
+def test_source_date_epoch(tmp_path, monkeypatch):
+ monkeypatch.setenv('SOURCE_DATE_EPOCH', '1633007882')
+ info = make_wheel_in(samples_dir / 'pep621' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ # Minimum value for zip timestamps is 1980-1-1
+ with ZipFile(info.file, 'r') as zf:
+ assert zf.getinfo('module1a.py').date_time[:3] == (2021, 9, 30)
+
+
+def test_zero_timestamp(tmp_path, monkeypatch):
+ monkeypatch.setenv('SOURCE_DATE_EPOCH', '0')
+ info = make_wheel_in(samples_dir / 'pep621' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ # Minimum value for zip timestamps is 1980-1-1
+ with ZipFile(info.file, 'r') as zf:
+ assert zf.getinfo('module1a.py').date_time == (1980, 1, 1, 0, 0, 0)
+
+
+def test_main(tmp_path):
+ main(['--outdir', str(tmp_path), str(samples_dir / 'pep621')])
+ wheels = list(tmp_path.glob('*.whl'))
+ assert len(wheels) == 1
+ # Minimum value for zip timestamps is 1980-1-1
+ with ZipFile(wheels[0], 'r') as zf:
+ assert 'module1a.py' in zf.namelist()
+
+
+def test_data_dir(tmp_path):
+ info = make_wheel_in(samples_dir / 'with_data_dir' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ with ZipFile(info.file, 'r') as zf:
+ assert 'module1-0.1.data/data/share/man/man1/foo.1' in zf.namelist()
diff --git a/flit_core/flit_core/vendor/README b/flit_core/flit_core/vendor/README
new file mode 100644
index 0000000..32e1b00
--- /dev/null
+++ b/flit_core/flit_core/vendor/README
@@ -0,0 +1,13 @@
+flit_core bundles the 'tomli' TOML parser, to avoid a bootstrapping problem.
+tomli is packaged using Flit, so there would be a dependency cycle when building
+from source. Vendoring a copy of tomli avoids this. The code in tomli is under
+the MIT license, and the LICENSE file is in the .dist-info folder.
+
+If you want to unbundle tomli and rely on it as a separate package, you can
+replace the package with Python code doing 'from tomli import *'. You will
+probably need to work around the dependency cycle between flit_core and tomli.
+
+Bundling a TOML parser should be a special case - I don't plan on bundling
+anything else in flit_core (or depending on any other packages).
+I hope that a TOML parser will be added to the Python standard library, and then
+this bundled parser will go away.
diff --git a/flit_core/flit_core/vendor/__init__.py b/flit_core/flit_core/vendor/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/vendor/__init__.py
diff --git a/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE
new file mode 100644
index 0000000..e859590
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2021 Taneli Hukkinen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA
new file mode 100644
index 0000000..0ddc586
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA
@@ -0,0 +1,208 @@
+Metadata-Version: 2.1
+Name: tomli
+Version: 1.2.3
+Summary: A lil' TOML parser
+Keywords: toml
+Author-email: Taneli Hukkinen <hukkin@users.noreply.github.com>
+Requires-Python: >=3.6
+Description-Content-Type: text/markdown
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: MacOS
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Typing :: Typed
+Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md
+Project-URL: Homepage, https://github.com/hukkin/tomli
+
+[![Build Status](https://github.com/hukkin/tomli/workflows/Tests/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush)
+[![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli)
+[![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli)
+
+# Tomli
+
+> A lil' TOML parser
+
+**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)*
+
+<!-- mdformat-toc start --slug=github --maxlevel=6 --minlevel=2 -->
+
+- [Intro](#intro)
+- [Installation](#installation)
+- [Usage](#usage)
+ - [Parse a TOML string](#parse-a-toml-string)
+ - [Parse a TOML file](#parse-a-toml-file)
+ - [Handle invalid TOML](#handle-invalid-toml)
+ - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats)
+- [FAQ](#faq)
+ - [Why this parser?](#why-this-parser)
+ - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported)
+ - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function)
+ - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types)
+- [Performance](#performance)
+
+<!-- mdformat-toc end -->
+
+## Intro<a name="intro"></a>
+
+Tomli is a Python library for parsing [TOML](https://toml.io).
+Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0).
+
+## Installation<a name="installation"></a>
+
+```bash
+pip install tomli
+```
+
+## Usage<a name="usage"></a>
+
+### Parse a TOML string<a name="parse-a-toml-string"></a>
+
+```python
+import tomli
+
+toml_str = """
+ gretzky = 99
+
+ [kurri]
+ jari = 17
+ """
+
+toml_dict = tomli.loads(toml_str)
+assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}}
+```
+
+### Parse a TOML file<a name="parse-a-toml-file"></a>
+
+```python
+import tomli
+
+with open("path_to_file/conf.toml", "rb") as f:
+ toml_dict = tomli.load(f)
+```
+
+The file must be opened in binary mode (with the `"rb"` flag).
+Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled,
+both of which are required to correctly parse TOML.
+Support for text file objects is deprecated for removal in the next major release.
+
+### Handle invalid TOML<a name="handle-invalid-toml"></a>
+
+```python
+import tomli
+
+try:
+ toml_dict = tomli.loads("]] this is invalid TOML [[")
+except tomli.TOMLDecodeError:
+ print("Yep, definitely not valid.")
+```
+
+Note that while the `TOMLDecodeError` type is public API, error messages of raised instances of it are not.
+Error messages should not be assumed to stay constant across Tomli versions.
+
+### Construct `decimal.Decimal`s from TOML floats<a name="construct-decimaldecimals-from-toml-floats"></a>
+
+```python
+from decimal import Decimal
+import tomli
+
+toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal)
+assert toml_dict["precision-matters"] == Decimal("0.982492")
+```
+
+Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type.
+The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated.
+
+Illegal types include `dict`, `list`, and anything that has the `append` attribute.
+Parsing floats into an illegal type results in undefined behavior.
+
+## FAQ<a name="faq"></a>
+
+### Why this parser?<a name="why-this-parser"></a>
+
+- it's lil'
+- pure Python with zero dependencies
+- the fastest pure Python parser [\*](#performance):
+ 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/),
+ 2.4x as fast as [toml](https://pypi.org/project/toml/)
+- outputs [basic data types](#how-do-toml-types-map-into-python-types) only
+- 100% spec compliant: passes all tests in
+ [a test set](https://github.com/toml-lang/compliance/pull/8)
+ soon to be merged to the official
+ [compliance tests for TOML](https://github.com/toml-lang/compliance)
+ repository
+- thoroughly tested: 100% branch coverage
+
+### Is comment preserving round-trip parsing supported?<a name="is-comment-preserving-round-trip-parsing-supported"></a>
+
+No.
+
+The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only.
+Preserving comments requires a custom type to be returned so will not be supported,
+at least not by the `tomli.loads` and `tomli.load` functions.
+
+Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need.
+
+### Is there a `dumps`, `write` or `encode` function?<a name="is-there-a-dumps-write-or-encode-function"></a>
+
+[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions.
+
+The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal.
+
+### How do TOML types map into Python types?<a name="how-do-toml-types-map-into-python-types"></a>
+
+| TOML type | Python type | Details |
+| ---------------- | ------------------- | ------------------------------------------------------------ |
+| Document Root | `dict` | |
+| Key | `str` | |
+| String | `str` | |
+| Integer | `int` | |
+| Float | `float` | |
+| Boolean | `bool` | |
+| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` |
+| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` |
+| Local Date | `datetime.date` | |
+| Local Time | `datetime.time` | |
+| Array | `list` | |
+| Table | `dict` | |
+| Inline Table | `dict` | |
+
+## Performance<a name="performance"></a>
+
+The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers.
+The benchmark can be run with `tox -e benchmark-pypi`.
+Running the benchmark on my personal computer output the following:
+
+```console
+foo@bar:~/dev/tomli$ tox -e benchmark-pypi
+benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2
+benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909'
+benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())'
+2021-07-23
+benchmark-pypi run-test: commands[1] | python --version
+Python 3.8.10
+benchmark-pypi run-test: commands[2] | python benchmark/run.py
+Parsing data.toml 5000 times:
+------------------------------------------------------
+ parser | exec time | performance (more is better)
+-----------+------------+-----------------------------
+ rtoml | 0.901 s | baseline (100%)
+ pytomlpp | 1.08 s | 83.15%
+ tomli | 3.89 s | 23.15%
+ toml | 9.36 s | 9.63%
+ qtoml | 11.5 s | 7.82%
+ tomlkit | 56.8 s | 1.59%
+```
+
+The parsers are ordered from fastest to slowest, using the fastest parser as baseline.
+Tomli performed the best out of all pure Python TOML parsers,
+losing only to pytomlpp (wraps C++) and rtoml (wraps Rust).
+
diff --git a/flit_core/flit_core/vendor/tomli/__init__.py b/flit_core/flit_core/vendor/tomli/__init__.py
new file mode 100644
index 0000000..8597467
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/__init__.py
@@ -0,0 +1,9 @@
+"""A lil' TOML parser."""
+
+__all__ = ("loads", "load", "TOMLDecodeError")
+__version__ = "1.2.3" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
+
+from ._parser import TOMLDecodeError, load, loads
+
+# Pretend this exception was created here.
+TOMLDecodeError.__module__ = "tomli"
diff --git a/flit_core/flit_core/vendor/tomli/_parser.py b/flit_core/flit_core/vendor/tomli/_parser.py
new file mode 100644
index 0000000..093afe5
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/_parser.py
@@ -0,0 +1,663 @@
+import string
+from types import MappingProxyType
+from typing import Any, BinaryIO, Dict, FrozenSet, Iterable, NamedTuple, Optional, Tuple
+import warnings
+
+from ._re import (
+ RE_DATETIME,
+ RE_LOCALTIME,
+ RE_NUMBER,
+ match_to_datetime,
+ match_to_localtime,
+ match_to_number,
+)
+from ._types import Key, ParseFloat, Pos
+
+ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
+
+# Neither of these sets include quotation mark or backslash. They are
+# currently handled as separate cases in the parser functions.
+ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t")
+ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n")
+
+ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS
+ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+
+ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS
+
+TOML_WS = frozenset(" \t")
+TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n")
+BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_")
+KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'")
+HEXDIGIT_CHARS = frozenset(string.hexdigits)
+
+BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType(
+ {
+ "\\b": "\u0008", # backspace
+ "\\t": "\u0009", # tab
+ "\\n": "\u000A", # linefeed
+ "\\f": "\u000C", # form feed
+ "\\r": "\u000D", # carriage return
+ '\\"': "\u0022", # quote
+ "\\\\": "\u005C", # backslash
+ }
+)
+
+
+class TOMLDecodeError(ValueError):
+ """An error raised if a document is not valid TOML."""
+
+
+def load(fp: BinaryIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]:
+ """Parse TOML from a binary file object."""
+ s_bytes = fp.read()
+ try:
+ s = s_bytes.decode()
+ except AttributeError:
+ warnings.warn(
+ "Text file object support is deprecated in favor of binary file objects."
+ ' Use `open("foo.toml", "rb")` to open the file in binary mode.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ s = s_bytes # type: ignore[assignment]
+ return loads(s, parse_float=parse_float)
+
+
+def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901
+ """Parse TOML from a string."""
+
+ # The spec allows converting "\r\n" to "\n", even in string
+ # literals. Let's do so to simplify parsing.
+ src = s.replace("\r\n", "\n")
+ pos = 0
+ out = Output(NestedDict(), Flags())
+ header: Key = ()
+
+ # Parse one statement at a time
+ # (typically means one line in TOML source)
+ while True:
+ # 1. Skip line leading whitespace
+ pos = skip_chars(src, pos, TOML_WS)
+
+ # 2. Parse rules. Expect one of the following:
+ # - end of file
+ # - end of line
+ # - comment
+ # - key/value pair
+ # - append dict to list (and move to its namespace)
+ # - create dict (and move to its namespace)
+ # Skip trailing whitespace when applicable.
+ try:
+ char = src[pos]
+ except IndexError:
+ break
+ if char == "\n":
+ pos += 1
+ continue
+ if char in KEY_INITIAL_CHARS:
+ pos = key_value_rule(src, pos, out, header, parse_float)
+ pos = skip_chars(src, pos, TOML_WS)
+ elif char == "[":
+ try:
+ second_char: Optional[str] = src[pos + 1]
+ except IndexError:
+ second_char = None
+ if second_char == "[":
+ pos, header = create_list_rule(src, pos, out)
+ else:
+ pos, header = create_dict_rule(src, pos, out)
+ pos = skip_chars(src, pos, TOML_WS)
+ elif char != "#":
+ raise suffixed_err(src, pos, "Invalid statement")
+
+ # 3. Skip comment
+ pos = skip_comment(src, pos)
+
+ # 4. Expect end of line or end of file
+ try:
+ char = src[pos]
+ except IndexError:
+ break
+ if char != "\n":
+ raise suffixed_err(
+ src, pos, "Expected newline or end of document after a statement"
+ )
+ pos += 1
+
+ return out.data.dict
+
+
+class Flags:
+ """Flags that map to parsed keys/namespaces."""
+
+ # Marks an immutable namespace (inline array or inline table).
+ FROZEN = 0
+ # Marks a nest that has been explicitly created and can no longer
+ # be opened using the "[table]" syntax.
+ EXPLICIT_NEST = 1
+
+ def __init__(self) -> None:
+ self._flags: Dict[str, dict] = {}
+
+ def unset_all(self, key: Key) -> None:
+ cont = self._flags
+ for k in key[:-1]:
+ if k not in cont:
+ return
+ cont = cont[k]["nested"]
+ cont.pop(key[-1], None)
+
+ def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None:
+ cont = self._flags
+ for k in head_key:
+ if k not in cont:
+ cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+ for k in rel_key:
+ if k in cont:
+ cont[k]["flags"].add(flag)
+ else:
+ cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+
+ def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003
+ cont = self._flags
+ key_parent, key_stem = key[:-1], key[-1]
+ for k in key_parent:
+ if k not in cont:
+ cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+ if key_stem not in cont:
+ cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag)
+
+ def is_(self, key: Key, flag: int) -> bool:
+ if not key:
+ return False # document root has no flags
+ cont = self._flags
+ for k in key[:-1]:
+ if k not in cont:
+ return False
+ inner_cont = cont[k]
+ if flag in inner_cont["recursive_flags"]:
+ return True
+ cont = inner_cont["nested"]
+ key_stem = key[-1]
+ if key_stem in cont:
+ cont = cont[key_stem]
+ return flag in cont["flags"] or flag in cont["recursive_flags"]
+ return False
+
+
+class NestedDict:
+ def __init__(self) -> None:
+ # The parsed content of the TOML document
+ self.dict: Dict[str, Any] = {}
+
+ def get_or_create_nest(
+ self,
+ key: Key,
+ *,
+ access_lists: bool = True,
+ ) -> dict:
+ cont: Any = self.dict
+ for k in key:
+ if k not in cont:
+ cont[k] = {}
+ cont = cont[k]
+ if access_lists and isinstance(cont, list):
+ cont = cont[-1]
+ if not isinstance(cont, dict):
+ raise KeyError("There is no nest behind this key")
+ return cont
+
+ def append_nest_to_list(self, key: Key) -> None:
+ cont = self.get_or_create_nest(key[:-1])
+ last_key = key[-1]
+ if last_key in cont:
+ list_ = cont[last_key]
+ try:
+ list_.append({})
+ except AttributeError:
+ raise KeyError("An object other than list found behind this key")
+ else:
+ cont[last_key] = [{}]
+
+
+class Output(NamedTuple):
+ data: NestedDict
+ flags: Flags
+
+
+def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos:
+ try:
+ while src[pos] in chars:
+ pos += 1
+ except IndexError:
+ pass
+ return pos
+
+
+def skip_until(
+ src: str,
+ pos: Pos,
+ expect: str,
+ *,
+ error_on: FrozenSet[str],
+ error_on_eof: bool,
+) -> Pos:
+ try:
+ new_pos = src.index(expect, pos)
+ except ValueError:
+ new_pos = len(src)
+ if error_on_eof:
+ raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None
+
+ if not error_on.isdisjoint(src[pos:new_pos]):
+ while src[pos] not in error_on:
+ pos += 1
+ raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}")
+ return new_pos
+
+
+def skip_comment(src: str, pos: Pos) -> Pos:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char == "#":
+ return skip_until(
+ src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False
+ )
+ return pos
+
+
+def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos:
+ while True:
+ pos_before_skip = pos
+ pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+ pos = skip_comment(src, pos)
+ if pos == pos_before_skip:
+ return pos
+
+
+def create_dict_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]:
+ pos += 1 # Skip "["
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key = parse_key(src, pos)
+
+ if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Can not declare {key} twice")
+ out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+ try:
+ out.data.get_or_create_nest(key)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+
+ if not src.startswith("]", pos):
+ raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration')
+ return pos + 1, key
+
+
+def create_list_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]:
+ pos += 2 # Skip "[["
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key = parse_key(src, pos)
+
+ if out.flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}")
+ # Free the namespace now that it points to another empty list item...
+ out.flags.unset_all(key)
+ # ...but this key precisely is still prohibited from table declaration
+ out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+ try:
+ out.data.append_nest_to_list(key)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+
+ if not src.startswith("]]", pos):
+ raise suffixed_err(src, pos, 'Expected "]]" at the end of an array declaration')
+ return pos + 2, key
+
+
+def key_value_rule(
+ src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat
+) -> Pos:
+ pos, key, value = parse_key_value_pair(src, pos, parse_float)
+ key_parent, key_stem = key[:-1], key[-1]
+ abs_key_parent = header + key_parent
+
+ if out.flags.is_(abs_key_parent, Flags.FROZEN):
+ raise suffixed_err(
+ src, pos, f"Can not mutate immutable namespace {abs_key_parent}"
+ )
+ # Containers in the relative path can't be opened with the table syntax after this
+ out.flags.set_for_relative_key(header, key, Flags.EXPLICIT_NEST)
+ try:
+ nest = out.data.get_or_create_nest(abs_key_parent)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+ if key_stem in nest:
+ raise suffixed_err(src, pos, "Can not overwrite a value")
+ # Mark inline table and array namespaces recursively immutable
+ if isinstance(value, (dict, list)):
+ out.flags.set(header + key, Flags.FROZEN, recursive=True)
+ nest[key_stem] = value
+ return pos
+
+
+def parse_key_value_pair(
+ src: str, pos: Pos, parse_float: ParseFloat
+) -> Tuple[Pos, Key, Any]:
+ pos, key = parse_key(src, pos)
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char != "=":
+ raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair')
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, value = parse_value(src, pos, parse_float)
+ return pos, key, value
+
+
+def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]:
+ pos, key_part = parse_key_part(src, pos)
+ key: Key = (key_part,)
+ pos = skip_chars(src, pos, TOML_WS)
+ while True:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char != ".":
+ return pos, key
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key_part = parse_key_part(src, pos)
+ key += (key_part,)
+ pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char in BARE_KEY_CHARS:
+ start_pos = pos
+ pos = skip_chars(src, pos, BARE_KEY_CHARS)
+ return pos, src[start_pos:pos]
+ if char == "'":
+ return parse_literal_str(src, pos)
+ if char == '"':
+ return parse_one_line_basic_str(src, pos)
+ raise suffixed_err(src, pos, "Invalid initial character for a key part")
+
+
+def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]:
+ pos += 1
+ return parse_basic_str(src, pos, multiline=False)
+
+
+def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]:
+ pos += 1
+ array: list = []
+
+ pos = skip_comments_and_array_ws(src, pos)
+ if src.startswith("]", pos):
+ return pos + 1, array
+ while True:
+ pos, val = parse_value(src, pos, parse_float)
+ array.append(val)
+ pos = skip_comments_and_array_ws(src, pos)
+
+ c = src[pos : pos + 1]
+ if c == "]":
+ return pos + 1, array
+ if c != ",":
+ raise suffixed_err(src, pos, "Unclosed array")
+ pos += 1
+
+ pos = skip_comments_and_array_ws(src, pos)
+ if src.startswith("]", pos):
+ return pos + 1, array
+
+
+def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]:
+ pos += 1
+ nested_dict = NestedDict()
+ flags = Flags()
+
+ pos = skip_chars(src, pos, TOML_WS)
+ if src.startswith("}", pos):
+ return pos + 1, nested_dict.dict
+ while True:
+ pos, key, value = parse_key_value_pair(src, pos, parse_float)
+ key_parent, key_stem = key[:-1], key[-1]
+ if flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}")
+ try:
+ nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+ if key_stem in nest:
+ raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}")
+ nest[key_stem] = value
+ pos = skip_chars(src, pos, TOML_WS)
+ c = src[pos : pos + 1]
+ if c == "}":
+ return pos + 1, nested_dict.dict
+ if c != ",":
+ raise suffixed_err(src, pos, "Unclosed inline table")
+ if isinstance(value, (dict, list)):
+ flags.set(key, Flags.FROZEN, recursive=True)
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_basic_str_escape( # noqa: C901
+ src: str, pos: Pos, *, multiline: bool = False
+) -> Tuple[Pos, str]:
+ escape_id = src[pos : pos + 2]
+ pos += 2
+ if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}:
+ # Skip whitespace until next non-whitespace character or end of
+ # the doc. Error if non-whitespace is found before newline.
+ if escape_id != "\\\n":
+ pos = skip_chars(src, pos, TOML_WS)
+ try:
+ char = src[pos]
+ except IndexError:
+ return pos, ""
+ if char != "\n":
+ raise suffixed_err(src, pos, 'Unescaped "\\" in a string')
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+ return pos, ""
+ if escape_id == "\\u":
+ return parse_hex_char(src, pos, 4)
+ if escape_id == "\\U":
+ return parse_hex_char(src, pos, 8)
+ try:
+ return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
+ except KeyError:
+ if len(escape_id) != 2:
+ raise suffixed_err(src, pos, "Unterminated string") from None
+ raise suffixed_err(src, pos, 'Unescaped "\\" in a string') from None
+
+
+def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]:
+ return parse_basic_str_escape(src, pos, multiline=True)
+
+
+def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]:
+ hex_str = src[pos : pos + hex_len]
+ if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str):
+ raise suffixed_err(src, pos, "Invalid hex value")
+ pos += hex_len
+ hex_int = int(hex_str, 16)
+ if not is_unicode_scalar_value(hex_int):
+ raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value")
+ return pos, chr(hex_int)
+
+
+def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]:
+ pos += 1 # Skip starting apostrophe
+ start_pos = pos
+ pos = skip_until(
+ src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True
+ )
+ return pos + 1, src[start_pos:pos] # Skip ending apostrophe
+
+
+def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]:
+ pos += 3
+ if src.startswith("\n", pos):
+ pos += 1
+
+ if literal:
+ delim = "'"
+ end_pos = skip_until(
+ src,
+ pos,
+ "'''",
+ error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
+ error_on_eof=True,
+ )
+ result = src[pos:end_pos]
+ pos = end_pos + 3
+ else:
+ delim = '"'
+ pos, result = parse_basic_str(src, pos, multiline=True)
+
+ # Add at maximum two extra apostrophes/quotes if the end sequence
+ # is 4 or 5 chars long instead of just 3.
+ if not src.startswith(delim, pos):
+ return pos, result
+ pos += 1
+ if not src.startswith(delim, pos):
+ return pos, result + delim
+ pos += 1
+ return pos, result + (delim * 2)
+
+
+def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]:
+ if multiline:
+ error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+ parse_escapes = parse_basic_str_escape_multiline
+ else:
+ error_on = ILLEGAL_BASIC_STR_CHARS
+ parse_escapes = parse_basic_str_escape
+ result = ""
+ start_pos = pos
+ while True:
+ try:
+ char = src[pos]
+ except IndexError:
+ raise suffixed_err(src, pos, "Unterminated string") from None
+ if char == '"':
+ if not multiline:
+ return pos + 1, result + src[start_pos:pos]
+ if src.startswith('"""', pos):
+ return pos + 3, result + src[start_pos:pos]
+ pos += 1
+ continue
+ if char == "\\":
+ result += src[start_pos:pos]
+ pos, parsed_escape = parse_escapes(src, pos)
+ result += parsed_escape
+ start_pos = pos
+ continue
+ if char in error_on:
+ raise suffixed_err(src, pos, f"Illegal character {char!r}")
+ pos += 1
+
+
+def parse_value( # noqa: C901
+ src: str, pos: Pos, parse_float: ParseFloat
+) -> Tuple[Pos, Any]:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+
+ # Basic strings
+ if char == '"':
+ if src.startswith('"""', pos):
+ return parse_multiline_str(src, pos, literal=False)
+ return parse_one_line_basic_str(src, pos)
+
+ # Literal strings
+ if char == "'":
+ if src.startswith("'''", pos):
+ return parse_multiline_str(src, pos, literal=True)
+ return parse_literal_str(src, pos)
+
+ # Booleans
+ if char == "t":
+ if src.startswith("true", pos):
+ return pos + 4, True
+ if char == "f":
+ if src.startswith("false", pos):
+ return pos + 5, False
+
+ # Dates and times
+ datetime_match = RE_DATETIME.match(src, pos)
+ if datetime_match:
+ try:
+ datetime_obj = match_to_datetime(datetime_match)
+ except ValueError as e:
+ raise suffixed_err(src, pos, "Invalid date or datetime") from e
+ return datetime_match.end(), datetime_obj
+ localtime_match = RE_LOCALTIME.match(src, pos)
+ if localtime_match:
+ return localtime_match.end(), match_to_localtime(localtime_match)
+
+ # Integers and "normal" floats.
+ # The regex will greedily match any type starting with a decimal
+ # char, so needs to be located after handling of dates and times.
+ number_match = RE_NUMBER.match(src, pos)
+ if number_match:
+ return number_match.end(), match_to_number(number_match, parse_float)
+
+ # Arrays
+ if char == "[":
+ return parse_array(src, pos, parse_float)
+
+ # Inline tables
+ if char == "{":
+ return parse_inline_table(src, pos, parse_float)
+
+ # Special floats
+ first_three = src[pos : pos + 3]
+ if first_three in {"inf", "nan"}:
+ return pos + 3, parse_float(first_three)
+ first_four = src[pos : pos + 4]
+ if first_four in {"-inf", "+inf", "-nan", "+nan"}:
+ return pos + 4, parse_float(first_four)
+
+ raise suffixed_err(src, pos, "Invalid value")
+
+
+def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError:
+ """Return a `TOMLDecodeError` where error message is suffixed with
+ coordinates in source."""
+
+ def coord_repr(src: str, pos: Pos) -> str:
+ if pos >= len(src):
+ return "end of document"
+ line = src.count("\n", 0, pos) + 1
+ if line == 1:
+ column = pos + 1
+ else:
+ column = pos - src.rindex("\n", 0, pos)
+ return f"line {line}, column {column}"
+
+ return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})")
+
+
+def is_unicode_scalar_value(codepoint: int) -> bool:
+ return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
diff --git a/flit_core/flit_core/vendor/tomli/_re.py b/flit_core/flit_core/vendor/tomli/_re.py
new file mode 100644
index 0000000..45e17e2
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/_re.py
@@ -0,0 +1,101 @@
+from datetime import date, datetime, time, timedelta, timezone, tzinfo
+from functools import lru_cache
+import re
+from typing import Any, Optional, Union
+
+from ._types import ParseFloat
+
+# E.g.
+# - 00:32:00.999999
+# - 00:32:00
+_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?"
+
+RE_NUMBER = re.compile(
+ r"""
+0
+(?:
+ x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
+ |
+ b[01](?:_?[01])* # bin
+ |
+ o[0-7](?:_?[0-7])* # oct
+)
+|
+[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
+(?P<floatpart>
+ (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
+ (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
+)
+""",
+ flags=re.VERBOSE,
+)
+RE_LOCALTIME = re.compile(_TIME_RE_STR)
+RE_DATETIME = re.compile(
+ fr"""
+([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
+(?:
+ [Tt ]
+ {_TIME_RE_STR}
+ (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
+)?
+""",
+ flags=re.VERBOSE,
+)
+
+
+def match_to_datetime(match: "re.Match") -> Union[datetime, date]:
+ """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
+
+ Raises ValueError if the match does not correspond to a valid date
+ or datetime.
+ """
+ (
+ year_str,
+ month_str,
+ day_str,
+ hour_str,
+ minute_str,
+ sec_str,
+ micros_str,
+ zulu_time,
+ offset_sign_str,
+ offset_hour_str,
+ offset_minute_str,
+ ) = match.groups()
+ year, month, day = int(year_str), int(month_str), int(day_str)
+ if hour_str is None:
+ return date(year, month, day)
+ hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
+ micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+ if offset_sign_str:
+ tz: Optional[tzinfo] = cached_tz(
+ offset_hour_str, offset_minute_str, offset_sign_str
+ )
+ elif zulu_time:
+ tz = timezone.utc
+ else: # local date-time
+ tz = None
+ return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
+
+
+@lru_cache(maxsize=None)
+def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone:
+ sign = 1 if sign_str == "+" else -1
+ return timezone(
+ timedelta(
+ hours=sign * int(hour_str),
+ minutes=sign * int(minute_str),
+ )
+ )
+
+
+def match_to_localtime(match: "re.Match") -> time:
+ hour_str, minute_str, sec_str, micros_str = match.groups()
+ micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+ return time(int(hour_str), int(minute_str), int(sec_str), micros)
+
+
+def match_to_number(match: "re.Match", parse_float: "ParseFloat") -> Any:
+ if match.group("floatpart"):
+ return parse_float(match.group())
+ return int(match.group(), 0)
diff --git a/flit_core/flit_core/vendor/tomli/_types.py b/flit_core/flit_core/vendor/tomli/_types.py
new file mode 100644
index 0000000..e37cc80
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/_types.py
@@ -0,0 +1,6 @@
+from typing import Any, Callable, Tuple
+
+# Type annotations
+ParseFloat = Callable[[str], Any]
+Key = Tuple[str, ...]
+Pos = int
diff --git a/flit_core/flit_core/vendor/tomli/py.typed b/flit_core/flit_core/vendor/tomli/py.typed
new file mode 100644
index 0000000..7632ecf
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561
diff --git a/flit_core/flit_core/versionno.py b/flit_core/flit_core/versionno.py
new file mode 100644
index 0000000..eed1a5b
--- /dev/null
+++ b/flit_core/flit_core/versionno.py
@@ -0,0 +1,127 @@
+"""Normalise version number according to PEP 440"""
+import logging
+import os
+import re
+
+log = logging.getLogger(__name__)
+
+# Regex below from packaging, via PEP 440. BSD License:
+# Copyright (c) Donald Stufft and individual contributors.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+VERSION_PERMISSIVE = re.compile(r"""
+ \s*v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+\s*$""", re.VERBOSE)
+
+pre_spellings = {
+ 'a': 'a', 'alpha': 'a',
+ 'b': 'b', 'beta': 'b',
+ 'rc': 'rc', 'c': 'rc', 'pre': 'rc', 'preview': 'rc',
+}
+
+def normalise_version(orig_version):
+ """Normalise version number according to rules in PEP 440
+
+ Raises InvalidVersion if the version does not match PEP 440. This can be
+ overridden with the FLIT_ALLOW_INVALID environment variable.
+
+ https://www.python.org/dev/peps/pep-0440/#normalization
+ """
+ version = orig_version.lower()
+ m = VERSION_PERMISSIVE.match(version)
+ if not m:
+ if os.environ.get('FLIT_ALLOW_INVALID'):
+ log.warning("Invalid version number {!r} allowed by FLIT_ALLOW_INVALID"
+ .format(orig_version))
+ return version
+ else:
+ from .common import InvalidVersion
+ raise InvalidVersion("Version number {!r} does not match PEP 440 rules"
+ .format(orig_version))
+
+ components = []
+ add = components.append
+
+ epoch, release = m.group('epoch', 'release')
+ if epoch is not None:
+ add(str(int(epoch)) + '!')
+ add('.'.join(str(int(rp)) for rp in release.split('.')))
+
+ pre_l, pre_n = m.group('pre_l', 'pre_n')
+ if pre_l is not None:
+ pre_l = pre_spellings[pre_l]
+ pre_n = '0' if pre_n is None else str(int(pre_n))
+ add(pre_l + pre_n)
+
+ post_n1, post_l, post_n2 = m.group('post_n1', 'post_l', 'post_n2')
+ if post_n1 is not None:
+ add('.post' + str(int(post_n1)))
+ elif post_l is not None:
+ post_n = '0' if post_n2 is None else str(int(post_n2))
+ add('.post' + str(int(post_n)))
+
+ dev_l, dev_n = m.group('dev_l', 'dev_n')
+ if dev_l is not None:
+ dev_n = '0' if dev_n is None else str(int(dev_n))
+ add('.dev' + dev_n)
+
+ local = m.group('local')
+ if local is not None:
+ local = local.replace('-', '.').replace('_', '.')
+ l = [str(int(c)) if c.isdigit() else c
+ for c in local.split('.')]
+ add('+' + '.'.join(l))
+
+ version = ''.join(components)
+ if version != orig_version:
+ log.warning("Version number normalised: {!r} -> {!r} (see PEP 440)"
+ .format(orig_version, version))
+ return version
+
diff --git a/flit_core/flit_core/wheel.py b/flit_core/flit_core/wheel.py
new file mode 100644
index 0000000..08cb70a
--- /dev/null
+++ b/flit_core/flit_core/wheel.py
@@ -0,0 +1,259 @@
+import argparse
+from base64 import urlsafe_b64encode
+import contextlib
+from datetime import datetime
+import hashlib
+import io
+import logging
+import os
+import os.path as osp
+import stat
+import tempfile
+from pathlib import Path
+from types import SimpleNamespace
+from typing import Optional
+import zipfile
+
+from flit_core import __version__
+from . import common
+
+log = logging.getLogger(__name__)
+
+wheel_file_template = u"""\
+Wheel-Version: 1.0
+Generator: flit {version}
+Root-Is-Purelib: true
+""".format(version=__version__)
+
+def _write_wheel_file(f, supports_py2=False):
+ f.write(wheel_file_template)
+ if supports_py2:
+ f.write(u"Tag: py2-none-any\n")
+ f.write(u"Tag: py3-none-any\n")
+
+
+def _set_zinfo_mode(zinfo, mode):
+ # Set the bits for the mode and bit 0xFFFF for “regular file”
+ zinfo.external_attr = mode << 16
+
+
+def zip_timestamp_from_env() -> Optional[tuple]:
+ """Prepare a timestamp from $SOURCE_DATE_EPOCH, if set"""
+ try:
+ # If SOURCE_DATE_EPOCH is set (e.g. by Debian), it's used for
+ # timestamps inside the zip file.
+ d = datetime.utcfromtimestamp(int(os.environ['SOURCE_DATE_EPOCH']))
+ except (KeyError, ValueError):
+ # Otherwise, we'll use the mtime of files, and generated files will
+ # default to 2016-1-1 00:00:00
+ return None
+
+ if d.year >= 1980:
+ log.info("Zip timestamps will be from SOURCE_DATE_EPOCH: %s", d)
+ # zipfile expects a 6-tuple, not a datetime object
+ return d.year, d.month, d.day, d.hour, d.minute, d.second
+ else:
+ log.info("SOURCE_DATE_EPOCH is below the minimum for zip file timestamps")
+ log.info("Zip timestamps will be 1980-01-01 00:00:00")
+ return 1980, 1, 1, 0, 0, 0
+
+
+class WheelBuilder:
+ def __init__(
+ self, directory, module, metadata, entrypoints, target_fp, data_directory
+ ):
+ """Build a wheel from a module/package
+ """
+ self.directory = directory
+ self.module = module
+ self.metadata = metadata
+ self.entrypoints = entrypoints
+ self.data_directory = data_directory
+
+ self.records = []
+ self.source_time_stamp = zip_timestamp_from_env()
+
+ # Open the zip file ready to write
+ self.wheel_zip = zipfile.ZipFile(target_fp, 'w',
+ compression=zipfile.ZIP_DEFLATED)
+
+ @classmethod
+ def from_ini_path(cls, ini_path, target_fp):
+ from .config import read_flit_config
+ directory = ini_path.parent
+ ini_info = read_flit_config(ini_path)
+ entrypoints = ini_info.entrypoints
+ module = common.Module(ini_info.module, directory)
+ metadata = common.make_metadata(module, ini_info)
+ return cls(
+ directory, module, metadata, entrypoints, target_fp, ini_info.data_directory
+ )
+
+ @property
+ def dist_info(self):
+ return common.dist_info_name(self.metadata.name, self.metadata.version)
+
+ @property
+ def wheel_filename(self):
+ dist_name = common.normalize_dist_name(self.metadata.name, self.metadata.version)
+ tag = ('py2.' if self.metadata.supports_py2 else '') + 'py3-none-any'
+ return '{}-{}.whl'.format(dist_name, tag)
+
+ def _add_file(self, full_path, rel_path):
+ log.debug("Adding %s to zip file", full_path)
+ full_path, rel_path = str(full_path), str(rel_path)
+ if os.sep != '/':
+ # We always want to have /-separated paths in the zip file and in
+ # RECORD
+ rel_path = rel_path.replace(os.sep, '/')
+
+ if self.source_time_stamp is None:
+ zinfo = zipfile.ZipInfo.from_file(full_path, rel_path)
+ else:
+ # Set timestamps in zipfile for reproducible build
+ zinfo = zipfile.ZipInfo(rel_path, self.source_time_stamp)
+
+ # Normalize permission bits to either 755 (executable) or 644
+ st_mode = os.stat(full_path).st_mode
+ new_mode = common.normalize_file_permissions(st_mode)
+ _set_zinfo_mode(zinfo, new_mode & 0xFFFF) # Unix attributes
+
+ if stat.S_ISDIR(st_mode):
+ zinfo.external_attr |= 0x10 # MS-DOS directory flag
+
+ zinfo.compress_type = zipfile.ZIP_DEFLATED
+
+ hashsum = hashlib.sha256()
+ with open(full_path, 'rb') as src, self.wheel_zip.open(zinfo, 'w') as dst:
+ while True:
+ buf = src.read(1024 * 8)
+ if not buf:
+ break
+ hashsum.update(buf)
+ dst.write(buf)
+
+ size = os.stat(full_path).st_size
+ hash_digest = urlsafe_b64encode(hashsum.digest()).decode('ascii').rstrip('=')
+ self.records.append((rel_path, hash_digest, size))
+
+ @contextlib.contextmanager
+ def _write_to_zip(self, rel_path, mode=0o644):
+ sio = io.StringIO()
+ yield sio
+
+ log.debug("Writing data to %s in zip file", rel_path)
+ # The default is a fixed timestamp rather than the current time, so
+ # that building a wheel twice on the same computer can automatically
+ # give you the exact same result.
+ date_time = self.source_time_stamp or (2016, 1, 1, 0, 0, 0)
+ zi = zipfile.ZipInfo(rel_path, date_time)
+ _set_zinfo_mode(zi, mode)
+ b = sio.getvalue().encode('utf-8')
+ hashsum = hashlib.sha256(b)
+ hash_digest = urlsafe_b64encode(hashsum.digest()).decode('ascii').rstrip('=')
+ self.wheel_zip.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED)
+ self.records.append((rel_path, hash_digest, len(b)))
+
+ def copy_module(self):
+ log.info('Copying package file(s) from %s', self.module.path)
+ source_dir = str(self.module.source_dir)
+
+ for full_path in self.module.iter_files():
+ rel_path = osp.relpath(full_path, source_dir)
+ self._add_file(full_path, rel_path)
+
+ def add_pth(self):
+ with self._write_to_zip(self.module.name + ".pth") as f:
+ f.write(str(self.module.source_dir.resolve()))
+
+ def add_data_directory(self):
+ dir_in_whl = '{}.data/data/'.format(
+ common.normalize_dist_name(self.metadata.name, self.metadata.version)
+ )
+ for full_path in common.walk_data_dir(self.data_directory):
+ rel_path = os.path.relpath(full_path, self.data_directory)
+ self._add_file(full_path, dir_in_whl + rel_path)
+
+ def write_metadata(self):
+ log.info('Writing metadata files')
+
+ if self.entrypoints:
+ with self._write_to_zip(self.dist_info + '/entry_points.txt') as f:
+ common.write_entry_points(self.entrypoints, f)
+
+ for base in ('COPYING', 'LICENSE'):
+ for path in sorted(self.directory.glob(base + '*')):
+ if path.is_file():
+ self._add_file(path, '%s/%s' % (self.dist_info, path.name))
+
+ with self._write_to_zip(self.dist_info + '/WHEEL') as f:
+ _write_wheel_file(f, supports_py2=self.metadata.supports_py2)
+
+ with self._write_to_zip(self.dist_info + '/METADATA') as f:
+ self.metadata.write_metadata_file(f)
+
+ def write_record(self):
+ log.info('Writing the record of files')
+ # Write a record of the files in the wheel
+ with self._write_to_zip(self.dist_info + '/RECORD') as f:
+ for path, hash, size in self.records:
+ f.write(u'{},sha256={},{}\n'.format(path, hash, size))
+ # RECORD itself is recorded with no hash or size
+ f.write(self.dist_info + '/RECORD,,\n')
+
+ def build(self, editable=False):
+ try:
+ if editable:
+ self.add_pth()
+ else:
+ self.copy_module()
+ self.add_data_directory()
+ self.write_metadata()
+ self.write_record()
+ finally:
+ self.wheel_zip.close()
+
+def make_wheel_in(ini_path, wheel_directory, editable=False):
+ # We don't know the final filename until metadata is loaded, so write to
+ # a temporary_file, and rename it afterwards.
+ (fd, temp_path) = tempfile.mkstemp(suffix='.whl', dir=str(wheel_directory))
+ try:
+ with io.open(fd, 'w+b') as fp:
+ wb = WheelBuilder.from_ini_path(ini_path, fp)
+ wb.build(editable)
+
+ wheel_path = wheel_directory / wb.wheel_filename
+ os.replace(temp_path, str(wheel_path))
+ except:
+ os.unlink(temp_path)
+ raise
+
+ log.info("Built wheel: %s", wheel_path)
+ return SimpleNamespace(builder=wb, file=wheel_path)
+
+
+def main(argv=None):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ 'srcdir',
+ type=Path,
+ nargs='?',
+ default=Path.cwd(),
+ help='source directory (defaults to current directory)',
+ )
+
+ parser.add_argument(
+ '--outdir',
+ '-o',
+ help='output directory (defaults to {srcdir}/dist)',
+ )
+ args = parser.parse_args(argv)
+ outdir = args.srcdir / 'dist' if args.outdir is None else Path(args.outdir)
+ print("Building wheel from", args.srcdir)
+ pyproj_toml = args.srcdir / 'pyproject.toml'
+ outdir.mkdir(parents=True, exist_ok=True)
+ info = make_wheel_in(pyproj_toml, outdir)
+ print("Wheel built", outdir / info.file.name)
+
+if __name__ == "__main__":
+ main()
diff --git a/flit_core/pyproject.toml b/flit_core/pyproject.toml
new file mode 100644
index 0000000..e11bf62
--- /dev/null
+++ b/flit_core/pyproject.toml
@@ -0,0 +1,25 @@
+[build-system]
+requires = []
+build-backend = "flit_core.buildapi"
+backend-path = ["."]
+
+[project]
+name="flit_core"
+authors=[
+ {name = "Thomas Kluyver & contributors", email = "thomas@kluyver.me.uk"},
+]
+description = "Distribution-building parts of Flit. See flit package for more information"
+dependencies = []
+requires-python = '>=3.6'
+license = {file = "LICENSE"}
+classifiers = [
+ "License :: OSI Approved :: BSD License",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+]
+dynamic = ["version"]
+
+[project.urls]
+Source = "https://github.com/pypa/flit"
+
+[tool.flit.sdist]
+include = ["bootstrap_install.py", "build_dists.py"]
diff --git a/flit_core/update-vendored-tomli.sh b/flit_core/update-vendored-tomli.sh
new file mode 100755
index 0000000..c10af1f
--- /dev/null
+++ b/flit_core/update-vendored-tomli.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Update the vendored copy of tomli
+set -euo pipefail
+
+version=$1
+echo "Bundling tomli version $version"
+
+rm -rf flit_core/vendor/tomli*
+pip install --target flit_core/vendor/ "tomli==$version"
+
+# Convert absolute imports to relative (from tomli.foo -> from .foo)
+for file in flit_core/vendor/tomli/*.py; do
+ sed -i -E 's/((from|import)[[:space:]]+)tomli\./\1\./' "$file"
+done
+
+# Delete some files that aren't useful in this context.
+# Leave LICENSE & METADATA present.
+rm flit_core/vendor/tomli*.dist-info/{INSTALLER,RECORD,REQUESTED,WHEEL}