summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.bumpversion.cfg14
-rw-r--r--.coveragerc3
-rw-r--r--.github/dependabot.yml6
-rw-r--r--.github/workflows/test.yml40
-rw-r--r--.gitignore16
-rw-r--r--LICENSE29
-rw-r--r--PKG-INFO110
-rw-r--r--README.rst80
-rw-r--r--bootstrap_dev.py40
-rw-r--r--codecov.yml1
-rw-r--r--doc/Makefile177
-rw-r--r--doc/_static/flit_logo_nobg.svg149
-rw-r--r--doc/_static/flit_logo_nobg_cropped.pngbin0 -> 5890 bytes
-rw-r--r--doc/_static/flit_logo_nobg_cropped.svg158
-rw-r--r--doc/bootstrap.rst41
-rw-r--r--doc/cmdline.rst262
-rw-r--r--doc/conf.py264
-rw-r--r--doc/development.rst26
-rw-r--r--doc/flit_ini.rst113
-rw-r--r--doc/history.rst497
-rw-r--r--doc/index.rst34
-rw-r--r--doc/make.bat242
-rw-r--r--doc/pyproject_toml.rst480
-rw-r--r--doc/rationale.rst58
-rw-r--r--doc/reproducible.rst34
-rw-r--r--doc/requirements.txt3
-rw-r--r--doc/upload.rst77
-rw-r--r--flit/__init__.py216
-rw-r--r--flit/__main__.py5
-rw-r--r--flit/_get_dirs.py27
-rw-r--r--flit/build.py60
-rw-r--r--flit/buildapi.py1
-rw-r--r--flit/config.py18
-rw-r--r--flit/init.py251
-rw-r--r--flit/install.py432
-rw-r--r--flit/license_templates/apache68
-rw-r--r--flit/license_templates/gpl3619
-rw-r--r--flit/license_templates/mit21
-rw-r--r--flit/log.py110
-rw-r--r--flit/logo.py20
-rw-r--r--flit/sdist.py236
-rw-r--r--flit/tomlify.py83
-rw-r--r--flit/upload.py276
-rw-r--r--flit/validate.py301
-rw-r--r--flit/vcs/__init__.py14
-rw-r--r--flit/vcs/git.py15
-rw-r--r--flit/vcs/hg.py34
-rw-r--r--flit/vendorized/__init__.py0
-rw-r--r--flit/vendorized/readme/__init__.py0
-rw-r--r--flit/vendorized/readme/clean.py2
-rw-r--r--flit/vendorized/readme/rst.py128
-rw-r--r--flit/wheel.py12
-rw-r--r--flit_core/LICENSE29
-rw-r--r--flit_core/README.rst6
-rw-r--r--flit_core/bootstrap_install.py57
-rw-r--r--flit_core/build_dists.py17
-rw-r--r--flit_core/flit_core/__init__.py7
-rw-r--r--flit_core/flit_core/buildapi.py83
-rw-r--r--flit_core/flit_core/common.py449
-rw-r--r--flit_core/flit_core/config.py660
-rw-r--r--flit_core/flit_core/sdist.py202
-rw-r--r--flit_core/flit_core/tests/__init__.py0
-rw-r--r--flit_core/flit_core/tests/samples/EG_README.rst4
-rw-r--r--flit_core/flit_core/tests/samples/bad-description-ext.toml9
-rw-r--r--flit_core/flit_core/tests/samples/conflicting_modules/module1.py0
-rw-r--r--flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml8
-rw-r--r--flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py0
-rw-r--r--flit_core/flit_core/tests/samples/constructed_version/module1.py4
-rw-r--r--flit_core/flit_core/tests/samples/constructed_version/pyproject.toml12
-rw-r--r--flit_core/flit_core/tests/samples/extras-dev-conflict.toml13
-rw-r--r--flit_core/flit_core/tests/samples/extras.toml15
-rw-r--r--flit_core/flit_core/tests/samples/imported_version/package1/__init__.py3
-rw-r--r--flit_core/flit_core/tests/samples/imported_version/package1/_version.py1
-rw-r--r--flit_core/flit_core/tests/samples/imported_version/pyproject.toml10
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/LICENSES/README2
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md0
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt1
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/test.rst1
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/doc/test.txt1
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/inclusion/pyproject.toml12
-rw-r--r--flit_core/flit_core/tests/samples/invalid_version1.py3
-rw-r--r--flit_core/flit_core/tests/samples/missing-description-file.toml9
-rw-r--r--flit_core/flit_core/tests/samples/misspelled-key.toml10
-rw-r--r--flit_core/flit_core/tests/samples/module1-pkg.ini5
-rw-r--r--flit_core/flit_core/tests/samples/module1-pkg.toml12
-rw-r--r--flit_core/flit_core/tests/samples/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/module2.py10
-rw-r--r--flit_core/flit_core/tests/samples/moduleunimportable.py8
-rw-r--r--flit_core/flit_core/tests/samples/moduleunimportabledouble.py8
-rw-r--r--flit_core/flit_core/tests/samples/my-description.rst1
-rw-r--r--flit_core/flit_core/tests/samples/no_docstring-pkg.toml12
-rw-r--r--flit_core/flit_core/tests/samples/no_docstring.py1
-rw-r--r--flit_core/flit_core/tests/samples/normalization/my_python_module.py0
-rw-r--r--flit_core/flit_core/tests/samples/normalization/pyproject.toml14
-rw-r--r--flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst4
-rw-r--r--flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py8
-rw-r--r--flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml10
-rw-r--r--flit_core/flit_core/tests/samples/package1.toml13
-rw-r--r--flit_core/flit_core/tests/samples/package1/__init__.py6
-rw-r--r--flit_core/flit_core/tests/samples/package1/data_dir/foo.sh2
-rw-r--r--flit_core/flit_core/tests/samples/package1/foo.py1
-rw-r--r--flit_core/flit_core/tests/samples/package1/subpkg/__init__.py0
-rw-r--r--flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json1
-rw-r--r--flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py0
-rw-r--r--flit_core/flit_core/tests/samples/pep517/LICENSE1
-rw-r--r--flit_core/flit_core/tests/samples/pep517/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/pep517/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/pep517/pyproject.toml17
-rw-r--r--flit_core/flit_core/tests/samples/pep621/LICENSE1
-rw-r--r--flit_core/flit_core/tests/samples/pep621/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/pep621/module1a.py3
-rw-r--r--flit_core/flit_core/tests/samples/pep621/pyproject.toml39
-rw-r--r--flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py0
-rw-r--r--flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml28
-rw-r--r--flit_core/flit_core/tests/samples/requires-dev.toml11
-rw-r--r--flit_core/flit_core/tests/samples/requires-envmark.toml12
-rw-r--r--flit_core/flit_core/tests/samples/requires-extra-envmark.toml12
-rw-r--r--flit_core/flit_core/tests/samples/requires-requests.toml10
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/LICENSE1
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/README.rst1
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.11
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/module1.py3
-rw-r--r--flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml26
-rw-r--r--flit_core/flit_core/tests/test_build_thyself.py57
-rw-r--r--flit_core/flit_core/tests/test_buildapi.py93
-rw-r--r--flit_core/flit_core/tests/test_common.py158
-rw-r--r--flit_core/flit_core/tests/test_config.py165
-rw-r--r--flit_core/flit_core/tests/test_sdist.py61
-rw-r--r--flit_core/flit_core/tests/test_versionno.py40
-rw-r--r--flit_core/flit_core/tests/test_wheel.py47
-rw-r--r--flit_core/flit_core/vendor/README13
-rw-r--r--flit_core/flit_core/vendor/__init__.py0
-rw-r--r--flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE21
-rw-r--r--flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA208
-rw-r--r--flit_core/flit_core/vendor/tomli/__init__.py9
-rw-r--r--flit_core/flit_core/vendor/tomli/_parser.py663
-rw-r--r--flit_core/flit_core/vendor/tomli/_re.py101
-rw-r--r--flit_core/flit_core/vendor/tomli/_types.py6
-rw-r--r--flit_core/flit_core/vendor/tomli/py.typed1
-rw-r--r--flit_core/flit_core/versionno.py127
-rw-r--r--flit_core/flit_core/wheel.py259
-rw-r--r--flit_core/pyproject.toml25
-rwxr-xr-xflit_core/update-vendored-tomli.sh18
-rw-r--r--pyproject.toml46
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/conftest.py15
-rw-r--r--tests/samples/EG_README.rst4
-rw-r--r--tests/samples/altdistname/package1/__init__.py6
-rw-r--r--tests/samples/altdistname/package1/data_dir/foo.sh2
-rw-r--r--tests/samples/altdistname/package1/foo.py1
-rw-r--r--tests/samples/altdistname/package1/subpkg/__init__.py0
-rw-r--r--tests/samples/altdistname/package1/subpkg/sp_data_dir/test.json1
-rw-r--r--tests/samples/altdistname/package1/subpkg2/__init__.py0
-rw-r--r--tests/samples/altdistname/pyproject.toml11
-rw-r--r--tests/samples/bad-description-ext.toml9
-rw-r--r--tests/samples/entrypoints_conflict/console_entry_points.txt2
-rw-r--r--tests/samples/entrypoints_conflict/package1/__init__.py6
-rw-r--r--tests/samples/entrypoints_conflict/package1/data_dir/foo.sh2
-rw-r--r--tests/samples/entrypoints_conflict/package1/foo.py1
-rw-r--r--tests/samples/entrypoints_conflict/package1/subpkg/__init__.py0
-rw-r--r--tests/samples/entrypoints_conflict/package1/subpkg/sp_data_dir/test.json1
-rw-r--r--tests/samples/entrypoints_conflict/package1/subpkg2/__init__.py0
-rw-r--r--tests/samples/entrypoints_conflict/pyproject.toml16
-rw-r--r--tests/samples/entrypoints_valid/package1/__init__.py6
-rw-r--r--tests/samples/entrypoints_valid/package1/data_dir/foo.sh2
-rw-r--r--tests/samples/entrypoints_valid/package1/foo.py1
-rw-r--r--tests/samples/entrypoints_valid/package1/subpkg/__init__.py0
-rw-r--r--tests/samples/entrypoints_valid/package1/subpkg/sp_data_dir/test.json1
-rw-r--r--tests/samples/entrypoints_valid/package1/subpkg2/__init__.py0
-rw-r--r--tests/samples/entrypoints_valid/pyproject.toml15
-rw-r--r--tests/samples/extras-dev-conflict.toml13
-rw-r--r--tests/samples/extras/module1.py3
-rw-r--r--tests/samples/extras/pyproject.toml13
-rw-r--r--tests/samples/invalid_classifier.toml14
-rw-r--r--tests/samples/invalid_version1.py3
-rw-r--r--tests/samples/missing-description-file.toml9
-rw-r--r--tests/samples/module1.py3
-rw-r--r--tests/samples/module1_ini/flit.ini5
-rw-r--r--tests/samples/module1_ini/module1.py3
-rw-r--r--tests/samples/module1_toml/EG_README.rst4
-rw-r--r--tests/samples/module1_toml/module1.py3
-rw-r--r--tests/samples/module1_toml/pyproject.toml12
-rw-r--r--tests/samples/module2.py5
-rw-r--r--tests/samples/module3/LICENSE1
-rw-r--r--tests/samples/module3/pyproject.toml10
-rw-r--r--tests/samples/module3/src/module3.py3
-rw-r--r--tests/samples/moduleunimportable.py8
-rw-r--r--tests/samples/modulewithconstructedversion.py4
-rw-r--r--tests/samples/modulewithlocalversion/modulewithlocalversion.py5
-rw-r--r--tests/samples/modulewithlocalversion/pyproject.toml10
-rw-r--r--tests/samples/my-description.rst1
-rw-r--r--tests/samples/no_docstring-pkg.toml12
-rw-r--r--tests/samples/no_docstring.py1
-rw-r--r--tests/samples/ns1-pkg-mod/ns1/module.py5
-rw-r--r--tests/samples/ns1-pkg-mod/pyproject.toml7
-rw-r--r--tests/samples/ns1-pkg/EG_README.rst4
-rw-r--r--tests/samples/ns1-pkg/ns1/pkg/__init__.py8
-rw-r--r--tests/samples/ns1-pkg/pyproject.toml10
-rw-r--r--tests/samples/ns1-pkg2/EG_README.rst4
-rw-r--r--tests/samples/ns1-pkg2/ns1/pkg2/__init__.py8
-rw-r--r--tests/samples/ns1-pkg2/pyproject.toml10
-rw-r--r--tests/samples/package1/my-description.rst1
-rw-r--r--tests/samples/package1/package1/__init__.py6
-rw-r--r--tests/samples/package1/package1/data_dir/foo.sh2
-rw-r--r--tests/samples/package1/package1/foo.py1
-rw-r--r--tests/samples/package1/package1/subpkg/__init__.py0
-rw-r--r--tests/samples/package1/package1/subpkg/sp_data_dir/test.json1
-rw-r--r--tests/samples/package1/package1/subpkg2/__init__.py0
-rw-r--r--tests/samples/package1/pyproject.toml13
-rw-r--r--tests/samples/package2/package2-pkg.ini8
-rw-r--r--tests/samples/package2/pyproject.toml12
-rw-r--r--tests/samples/package2/src/package2/__init__.py6
-rw-r--r--tests/samples/package2/src/package2/foo.py1
-rw-r--r--tests/samples/packageinsrc/pyproject.toml10
-rw-r--r--tests/samples/packageinsrc/src/module1.py3
-rw-r--r--tests/samples/pep517/module1.py3
-rw-r--r--tests/samples/pep517/pyproject.toml13
-rw-r--r--tests/samples/requires-dev.toml11
-rw-r--r--tests/samples/requires-envmark/module1.py3
-rw-r--r--tests/samples/requires-envmark/pyproject.toml12
-rw-r--r--tests/samples/requires-extra-envmark/module1.py3
-rw-r--r--tests/samples/requires-extra-envmark/pyproject.toml11
-rw-r--r--tests/samples/requires-requests.toml10
-rw-r--r--tests/samples/with_flit_ini/flit.ini9
-rw-r--r--tests/samples/with_flit_ini/package1/__init__.py6
-rw-r--r--tests/samples/with_flit_ini/package1/foo.py1
-rw-r--r--tests/samples/with_flit_ini/package1/subpkg/__init__.py0
-rw-r--r--tests/samples/with_flit_ini/some_entry_points.txt2
-rw-r--r--tests/test_build.py84
-rw-r--r--tests/test_command.py13
-rw-r--r--tests/test_config.py10
-rw-r--r--tests/test_find_python_executable.py30
-rw-r--r--tests/test_init.py255
-rw-r--r--tests/test_install.py365
-rw-r--r--tests/test_sdist.py152
-rw-r--r--tests/test_tomlify.py32
-rw-r--r--tests/test_upload.py168
-rw-r--r--tests/test_validate.py243
-rw-r--r--tests/test_vcs.py27
-rw-r--r--tests/test_wheel.py221
-rw-r--r--tox.ini42
243 files changed, 12036 insertions, 0 deletions
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
new file mode 100644
index 0000000..90b26fd
--- /dev/null
+++ b/.bumpversion.cfg
@@ -0,0 +1,14 @@
+[bumpversion]
+current_version = 3.8.0
+commit = True
+tag = False
+
+[bumpversion:file:pyproject.toml]
+search = flit_core >={current_version}
+replace = flit_core >={new_version}
+
+[bumpversion:file:flit/__init__.py]
+
+[bumpversion:file:flit_core/flit_core/__init__.py]
+
+[bumpversion:file:doc/conf.py]
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..a24883e
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,3 @@
+[run]
+omit = */tests/*
+ */flit_core/vendor/*
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..1230149
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "daily"
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..617d92a
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,40 @@
+name: Test
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+
+concurrency:
+ group: >-
+ ${{ github.workflow }}-
+ ${{ github.ref_type }}-
+ ${{ github.event.pull_request.number || github.sha }}
+ cancel-in-progress: true
+
+jobs:
+ test:
+ runs-on: ${{ matrix.platform }}
+ strategy:
+ matrix:
+ platform: ["ubuntu-latest", "windows-latest"]
+ python-version: [ "3.6", "3.7", "3.8", "3.9", "3.10", "3.11-dev" ]
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install tox tox-gh-actions codecov
+
+ - name: Run tests
+ run: tox
+
+ - name: Codecov upload
+ run: codecov
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..165fb1d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,16 @@
+/build/
+/dist/
+/flit_core/dist/
+__pycache__/
+/doc/_build/
+/tests/samples/build/
+/tests/samples/dist/
+/tests/samples/ns1-pkg/dist/
+/htmlcov/
+/.coverage
+/.pytest_cache
+/.tox
+.idea/
+venv/
+*.pyc
+.python-version
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..1bd2e2d
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,29 @@
+Copyright (c) 2015, Thomas Kluyver and contributors
+All rights reserved.
+
+BSD 3-clause license:
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..62da13d
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,110 @@
+Metadata-Version: 2.1
+Name: flit
+Version: 3.8.0
+Summary: A simple packaging tool for simple packages.
+Author-email: Thomas Kluyver <thomas@kluyver.me.uk>
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Dist: flit_core >=3.8.0
+Requires-Dist: requests
+Requires-Dist: docutils
+Requires-Dist: tomli-w
+Requires-Dist: sphinx ; extra == "doc"
+Requires-Dist: sphinxcontrib_github_alt ; extra == "doc"
+Requires-Dist: pygments-github-lexers ; extra == "doc"
+Requires-Dist: testpath ; extra == "test"
+Requires-Dist: responses ; extra == "test"
+Requires-Dist: pytest>=2.7.3 ; extra == "test"
+Requires-Dist: pytest-cov ; extra == "test"
+Requires-Dist: tomli ; extra == "test"
+Project-URL: Changelog, https://flit.pypa.io/en/stable/history.html
+Project-URL: Documentation, https://flit.pypa.io
+Project-URL: Source, https://github.com/pypa/flit
+Provides-Extra: doc
+Provides-Extra: test
+
+**Flit** is a simple way to put Python packages and modules on PyPI.
+It tries to require less thought about packaging and help you avoid common
+mistakes.
+See `Why use Flit? <https://flit.readthedocs.io/en/latest/rationale.html>`_ for
+more about how it compares to other Python packaging tools.
+
+Install
+-------
+
+::
+
+ $ python3 -m pip install flit
+
+Flit requires Python 3 and therefore needs to be installed using the Python 3
+version of pip.
+
+Python 2 modules can be distributed using Flit, but need to be importable on
+Python 3 without errors.
+
+Usage
+-----
+
+Say you're writing a module ``foobar`` — either as a single file ``foobar.py``,
+or as a directory — and you want to distribute it.
+
+1. Make sure that foobar's docstring starts with a one-line summary of what
+ the module is, and that it has a ``__version__``:
+
+ .. code-block:: python
+
+ """An amazing sample package!"""
+
+ __version__ = "0.1"
+
+2. Install flit if you don't already have it::
+
+ python3 -m pip install flit
+
+3. Run ``flit init`` in the directory containing the module to create a
+ ``pyproject.toml`` file. It will look something like this:
+
+ .. code-block:: ini
+
+ [build-system]
+ requires = ["flit_core >=3.2,<4"]
+ build-backend = "flit_core.buildapi"
+
+ [project]
+ name = "foobar"
+ authors = [{name = "Sir Robin", email = "robin@camelot.uk"}]
+ dynamic = ["version", "description"]
+
+ [project.urls]
+ Home = "https://github.com/sirrobin/foobar"
+
+ You can edit this file to add other metadata, for example to set up
+ command line scripts. See the
+ `pyproject.toml page <https://flit.readthedocs.io/en/latest/pyproject_toml.html#scripts-section>`_
+ of the documentation.
+
+ If you have already got a ``flit.ini`` file to use with older versions of
+ Flit, convert it to ``pyproject.toml`` by running ``python3 -m flit.tomlify``.
+
+4. Run this command to upload your code to PyPI::
+
+ flit publish
+
+Once your package is published, people can install it using *pip* just like
+any other package. In most cases, pip will download a 'wheel' package, a
+standard format it knows how to install. If you specifically ask pip to install
+an 'sdist' package, it will install and use Flit in a temporary environment.
+
+
+To install a package locally for development, run::
+
+ flit install [--symlink] [--python path/to/python]
+
+Flit packages a single importable module or package at a time, using the import
+name as the name on PyPI. All subpackages and data files within a package are
+included automatically.
+
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..f2de269
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,80 @@
+**Flit** is a simple way to put Python packages and modules on PyPI.
+It tries to require less thought about packaging and help you avoid common
+mistakes.
+See `Why use Flit? <https://flit.readthedocs.io/en/latest/rationale.html>`_ for
+more about how it compares to other Python packaging tools.
+
+Install
+-------
+
+::
+
+ $ python3 -m pip install flit
+
+Flit requires Python 3 and therefore needs to be installed using the Python 3
+version of pip.
+
+Python 2 modules can be distributed using Flit, but need to be importable on
+Python 3 without errors.
+
+Usage
+-----
+
+Say you're writing a module ``foobar`` — either as a single file ``foobar.py``,
+or as a directory — and you want to distribute it.
+
+1. Make sure that foobar's docstring starts with a one-line summary of what
+ the module is, and that it has a ``__version__``:
+
+ .. code-block:: python
+
+ """An amazing sample package!"""
+
+ __version__ = "0.1"
+
+2. Install flit if you don't already have it::
+
+ python3 -m pip install flit
+
+3. Run ``flit init`` in the directory containing the module to create a
+ ``pyproject.toml`` file. It will look something like this:
+
+ .. code-block:: ini
+
+ [build-system]
+ requires = ["flit_core >=3.2,<4"]
+ build-backend = "flit_core.buildapi"
+
+ [project]
+ name = "foobar"
+ authors = [{name = "Sir Robin", email = "robin@camelot.uk"}]
+ dynamic = ["version", "description"]
+
+ [project.urls]
+ Home = "https://github.com/sirrobin/foobar"
+
+ You can edit this file to add other metadata, for example to set up
+ command line scripts. See the
+ `pyproject.toml page <https://flit.readthedocs.io/en/latest/pyproject_toml.html#scripts-section>`_
+ of the documentation.
+
+ If you have already got a ``flit.ini`` file to use with older versions of
+ Flit, convert it to ``pyproject.toml`` by running ``python3 -m flit.tomlify``.
+
+4. Run this command to upload your code to PyPI::
+
+ flit publish
+
+Once your package is published, people can install it using *pip* just like
+any other package. In most cases, pip will download a 'wheel' package, a
+standard format it knows how to install. If you specifically ask pip to install
+an 'sdist' package, it will install and use Flit in a temporary environment.
+
+
+To install a package locally for development, run::
+
+ flit install [--symlink] [--python path/to/python]
+
+Flit packages a single importable module or package at a time, using the import
+name as the name on PyPI. All subpackages and data files within a package are
+included automatically.
diff --git a/bootstrap_dev.py b/bootstrap_dev.py
new file mode 100644
index 0000000..e7cf3e1
--- /dev/null
+++ b/bootstrap_dev.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+
+# Symlink install flit & flit_core for development.
+# Most projects can do the same with 'flit install --symlink'.
+# But that doesn't work until Flit is installed, so we need some bootstrapping.
+
+import argparse
+import logging
+import os
+from pathlib import Path
+import sys
+
+my_dir = Path(__file__).parent
+os.chdir(str(my_dir))
+sys.path.insert(0, 'flit_core')
+
+from flit.install import Installer
+
+ap = argparse.ArgumentParser()
+ap.add_argument('--user')
+args = ap.parse_args()
+
+logging.basicConfig(level=logging.INFO)
+
+install_kwargs = {'symlink': True}
+if os.name == 'nt':
+ # Use .pth files instead of symlinking on Windows
+ install_kwargs = {'symlink': False, 'pth': True}
+
+# Install flit_core
+Installer.from_ini_path(
+ my_dir / 'flit_core' / 'pyproject.toml', user=args.user, **install_kwargs
+).install()
+print("Linked flit_core into site-packages.")
+
+# Install flit
+Installer.from_ini_path(
+ my_dir / 'pyproject.toml', user=args.user, **install_kwargs
+).install()
+print("Linked flit into site-packages.")
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000..db24720
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1 @@
+comment: off
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..b3d458c
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Flit.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Flit.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/Flit"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Flit"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/doc/_static/flit_logo_nobg.svg b/doc/_static/flit_logo_nobg.svg
new file mode 100644
index 0000000..72a3e7d
--- /dev/null
+++ b/doc/_static/flit_logo_nobg.svg
@@ -0,0 +1,149 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="293.71423"
+ height="293.71423"
+ id="svg3116"
+ version="1.1"
+ inkscape:version="0.48.5 r10040"
+ sodipodi:docname="New document 2">
+ <defs
+ id="defs3118">
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4452"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(2.0646126,0,0,2.0646126,2131.1883,-505.68517)"
+ x1="294.93112"
+ y1="187.01703"
+ x2="536.55017"
+ y2="453.6973" />
+ <linearGradient
+ inkscape:collect="always"
+ id="linearGradient4432">
+ <stop
+ style="stop-color:#00c48a;stop-opacity:1"
+ offset="0"
+ id="stop4434" />
+ <stop
+ style="stop-color:#00cbff;stop-opacity:1"
+ offset="1"
+ id="stop4436" />
+ </linearGradient>
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4460"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4454"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4456"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4458"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ </defs>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="0.98994949"
+ inkscape:cx="421.10472"
+ inkscape:cy="117.57302"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:window-width="1680"
+ inkscape:window-height="987"
+ inkscape:window-x="1680"
+ inkscape:window-y="27"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata3121">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-204.57382,-259.79316)">
+ <g
+ transform="matrix(0.24278029,0,0,0.24278029,-425.3047,353.61591)"
+ id="g4440">
+ <path
+ id="path4442"
+ style="fill:url(#linearGradient4452);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ d="m 3027.3295,254.07953 c -13.7048,13.09172 -26.0357,28.06465 -37.0176,44.61328 l 0,41.02539 c 0,5.00744 1.7866,9.29826 5.3633,12.875 3.5768,3.57674 7.8695,5.36524 12.877,5.36524 5.3651,0 9.8353,-1.7885 13.4121,-5.36524 3.5767,-3.57674 5.3652,-7.86756 5.3652,-12.875 l 0,-85.63867 z m 40.2012,-35.61328 -40.2012,0 0,35.61328 c 2.8934,-2.76398 5.8436,-5.45128 8.8594,-8.04492 11.4454,-9.57254 21.8635,-18.67845 31.3418,-27.56836 z m 34.6465,-37.01953 c -9.7805,12.36045 -21.2701,24.47349 -34.6465,37.01953 l 46.7148,0 c 5.0075,0 9.2983,-1.78849 12.875,-5.36524 3.5768,-3.57674 5.3653,-7.86951 5.3653,-12.87695 0,-5.36511 -1.7885,-9.83537 -5.3653,-13.41211 -3.5767,-3.57674 -7.8675,-5.36523 -12.875,-5.36523 l -12.0683,0 z m 33.4824,-60.625 -108.3301,0 0,60.625 74.8477,0 c 14.8892,-18.81683 25.825,-38.22167 33.4824,-60.625 z m 9.5449,-37.019527 c -1.2585,5.3045 -2.516,10.94337 -3.7636,17.044917 -1.6666,6.94336 -3.5949,13.57773 -5.7813,19.97461 l 23.6523,0 c 5.0075,0 9.3003,-1.78849 12.877,-5.36524 3.5767,-3.93441 5.3652,-8.40662 5.3652,-13.41406 0,-5.007437 -1.7885,-9.298257 -5.3652,-12.874997 -3.5767,-3.57674 -7.8695,-5.36523 -12.877,-5.36523 l -14.1074,0 z m -476.3222,-177.33008 c -2.0115,-10e-4 -4.0205,0.0285 -6.0352,0.0957 -32.2353,1.07179 -39.6278,16.38542 -12.3691,36.69141 27.2586,20.30597 67.4768,42.69338 95.8925,62.71679 28.4157,20.02339 35.7846,52.54784 62.5118,70.95899 26.7272,18.41115 54.3969,37.466677 74.8691,57.441407 20.4723,19.97478 42.8139,29.80954 62.9473,12.42382 -41.9066,63.48263 12.2332,134.84987 -173.1446,298.5918 29.2843,16.0722 57.5499,-20.52973 74.8907,-31.10547 2.5476,26.72831 -25.0012,91.1355 -29.2129,135.5625 21.025,-46.6543 56.9285,-120.22794 63.457,-115.36328 9.8881,7.3681 14.1396,135.40743 24.3965,116.33203 21.0116,-25.8867 22.3263,-107.36363 34.0937,-138.89062 11.2667,-42.29933 27.605,-80.7941 49.1328,-113.23438 l 0,-196.65039 c 0,-5.007437 1.7866,-9.298257 5.3633,-12.874997 3.5768,-3.57674 7.8695,-5.36523 12.877,-5.36523 l 136.6523,0 c 15.517,-65.40122 31.2666,-72.74791 32.5215,-83.04883003 13.2678,-9.65059997 122.3748,-33.57495997 180.1094,-40.48437997 -50.3218,-5.57548 -143.4532,11.69044 -195.586,7.36328 -73.7651,-34.78206 -111.4896,10.40375 -121.4531,64.52149 -83.501,33.35098 -204.8488,-83.78957 -259.7637,-101.41602 -51.4826,-16.52477 -81.9782,-24.2477 -112.1503,-24.26562 z m 83.1191,-45.779303 c -0.8667,-0.008 -1.7091,4.6e-4 -2.5293,0.0254 -22.9645,0.69796 -27.5787,14.12698 -6.4688,35.07813 7.5478,7.490993 16.3801,15.378493 25.6504,23.388673 3.4327,1.07931 6.7052,2.08334 10.3399,3.25 54.6631,17.54562 175.142,133.67703 258.6035,101.84765 -64.3681,-33.00238 -126.9043,-104.68293 -163.5801,-122.48828 -48.6408,-23.613873 -77.7408,-35.557113 -107.6094,-39.826173 -1.9912,-0.28458 -3.988,-0.53646 -5.9921,-0.7539 -3.0061,-0.32634 -5.8141,-0.49799 -8.4141,-0.52149 z"
+ inkscape:connector-curvature="0" />
+ <g
+ id="g4444"
+ style="fill:url(#linearGradient4460);fill-opacity:1"
+ transform="translate(-52.527932,-383.85797)">
+ <path
+ id="path4446"
+ d="m 3295.8047,467.66016 c -5.0075,0 -9.3002,1.78849 -12.877,5.36523 -3.5767,3.57674 -5.3652,7.86756 -5.3652,12.875 l 0,237.67578 c 0,5.00744 1.7885,9.29826 5.3652,12.875 3.5768,3.57674 7.8695,5.36524 12.877,5.36524 l 150.7598,0 c 5.0074,0 9.2982,-1.7885 12.875,-5.36524 3.5767,-3.57674 5.3652,-7.86756 5.3652,-12.875 0,-5.36511 -1.7885,-9.83732 -5.3652,-13.41406 -3.5768,-3.57674 -7.8676,-5.36328 -12.875,-5.36328 l -131.9825,0 0,-218.89844 c 0,-5.00744 -1.7885,-9.29826 -5.3652,-12.875 -3.5767,-3.57674 -8.047,-5.36523 -13.4121,-5.36523 z"
+ style="fill:url(#linearGradient4454);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ inkscape:connector-curvature="0" />
+ <path
+ id="path4448"
+ d="m 3532.623,467.66016 c -5.0074,0 -9.2982,1.78849 -12.875,5.36523 -3.5767,3.57674 -5.3652,7.86756 -5.3652,12.875 l 0,237.67578 c 0,5.00744 1.7885,9.29826 5.3652,12.875 3.5768,3.57674 7.8676,5.36524 12.875,5.36524 5.3652,0 9.8374,-1.7885 13.4141,-5.36524 3.5768,-3.57674 5.3652,-7.86756 5.3652,-12.875 l 0,-237.67578 c 0,-5.00744 -1.7884,-9.29826 -5.3652,-12.875 -3.5767,-3.57674 -8.0489,-5.36523 -13.4141,-5.36523 z"
+ style="fill:url(#linearGradient4456);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ inkscape:connector-curvature="0" />
+ <path
+ id="path4450"
+ d="m 3617.123,467.66016 c -5.0074,0 -9.2982,1.78849 -12.875,5.36523 -3.5767,3.57674 -5.3652,7.86756 -5.3652,12.875 0,5.00744 1.7885,9.47965 5.3652,13.41406 3.5768,3.57675 7.8676,5.36524 12.875,5.36524 l 56.8711,0 0,218.89648 c 0,5.00744 1.7885,9.29826 5.3653,12.875 3.5767,3.57674 7.8695,5.36524 12.8769,5.36524 5.3651,0 9.8354,-1.7885 13.4121,-5.36524 3.5768,-3.57674 5.3653,-7.86756 5.3653,-12.875 l 0,-218.89648 56.8691,0 c 5.0075,0 9.3002,-1.78849 12.877,-5.36524 3.5767,-3.93441 5.3652,-8.40662 5.3652,-13.41406 0,-5.00744 -1.7885,-9.29826 -5.3652,-12.875 -3.5768,-3.57674 -7.8695,-5.36523 -12.877,-5.36523 l -150.7598,0 z"
+ style="fill:url(#linearGradient4458);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ inkscape:connector-curvature="0" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/doc/_static/flit_logo_nobg_cropped.png b/doc/_static/flit_logo_nobg_cropped.png
new file mode 100644
index 0000000..5385556
--- /dev/null
+++ b/doc/_static/flit_logo_nobg_cropped.png
Binary files differ
diff --git a/doc/_static/flit_logo_nobg_cropped.svg b/doc/_static/flit_logo_nobg_cropped.svg
new file mode 100644
index 0000000..2918743
--- /dev/null
+++ b/doc/_static/flit_logo_nobg_cropped.svg
@@ -0,0 +1,158 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="293.54721"
+ height="194.0495"
+ id="svg3116"
+ version="1.1"
+ inkscape:version="0.48.5 r10040"
+ sodipodi:docname="flit_logo_nobg.svg">
+ <defs
+ id="defs3118">
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4452"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(2.0646126,0,0,2.0646126,2131.1883,-505.68517)"
+ x1="294.93112"
+ y1="187.01703"
+ x2="536.55017"
+ y2="453.6973" />
+ <linearGradient
+ inkscape:collect="always"
+ id="linearGradient4432">
+ <stop
+ style="stop-color:#00c48a;stop-opacity:1"
+ offset="0"
+ id="stop4434" />
+ <stop
+ style="stop-color:#00cbff;stop-opacity:1"
+ offset="1"
+ id="stop4436" />
+ </linearGradient>
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4460"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4454"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4456"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient4458"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4432"
+ id="linearGradient3991"
+ gradientUnits="userSpaceOnUse"
+ x1="3277.5625"
+ y1="604.73828"
+ x2="3786.125"
+ y2="604.73828" />
+ </defs>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="1.4"
+ inkscape:cx="221.08413"
+ inkscape:cy="35.027636"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:window-width="1680"
+ inkscape:window-height="987"
+ inkscape:window-x="1680"
+ inkscape:window-y="27"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata3121">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-204.65524,-306.77908)">
+ <g
+ transform="matrix(0.24278029,0,0,0.24278029,-425.3047,353.61591)"
+ id="g4440">
+ <path
+ id="path4442"
+ style="fill:url(#linearGradient4452);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ d="m 3027.3295,254.07953 c -13.7048,13.09172 -26.0357,28.06465 -37.0176,44.61328 l 0,41.02539 c 0,5.00744 1.7866,9.29826 5.3633,12.875 3.5768,3.57674 7.8695,5.36524 12.877,5.36524 5.3651,0 9.8353,-1.7885 13.4121,-5.36524 3.5767,-3.57674 5.3652,-7.86756 5.3652,-12.875 l 0,-85.63867 z m 40.2012,-35.61328 -40.2012,0 0,35.61328 c 2.8934,-2.76398 5.8436,-5.45128 8.8594,-8.04492 11.4454,-9.57254 21.8635,-18.67845 31.3418,-27.56836 z m 34.6465,-37.01953 c -9.7805,12.36045 -21.2701,24.47349 -34.6465,37.01953 l 46.7148,0 c 5.0075,0 9.2983,-1.78849 12.875,-5.36524 3.5768,-3.57674 5.3653,-7.86951 5.3653,-12.87695 0,-5.36511 -1.7885,-9.83537 -5.3653,-13.41211 -3.5767,-3.57674 -7.8675,-5.36523 -12.875,-5.36523 l -12.0683,0 z m 33.4824,-60.625 -108.3301,0 0,60.625 74.8477,0 c 14.8892,-18.81683 25.825,-38.22167 33.4824,-60.625 z m 9.5449,-37.019527 c -1.2585,5.3045 -2.516,10.94337 -3.7636,17.044917 -1.6666,6.94336 -3.5949,13.57773 -5.7813,19.97461 l 23.6523,0 c 5.0075,0 9.3003,-1.78849 12.877,-5.36524 3.5767,-3.93441 5.3652,-8.40662 5.3652,-13.41406 0,-5.007437 -1.7885,-9.298257 -5.3652,-12.874997 -3.5767,-3.57674 -7.8695,-5.36523 -12.877,-5.36523 l -14.1074,0 z m -476.3222,-177.33008 c -2.0115,-10e-4 -4.0205,0.0285 -6.0352,0.0957 -32.2353,1.07179 -39.6278,16.38542 -12.3691,36.69141 27.2586,20.30597 67.4768,42.69338 95.8925,62.71679 28.4157,20.02339 35.7846,52.54784 62.5118,70.95899 26.7272,18.41115 54.3969,37.466677 74.8691,57.441407 20.4723,19.97478 42.8139,29.80954 62.9473,12.42382 -41.9066,63.48263 12.2332,134.84987 -173.1446,298.5918 29.2843,16.0722 57.5499,-20.52973 74.8907,-31.10547 2.5476,26.72831 -25.0012,91.1355 -29.2129,135.5625 21.025,-46.6543 56.9285,-120.22794 63.457,-115.36328 9.8881,7.3681 14.1396,135.40743 24.3965,116.33203 21.0116,-25.8867 22.3263,-107.36363 34.0937,-138.89062 11.2667,-42.29933 27.605,-80.7941 49.1328,-113.23438 l 0,-196.65039 c 0,-5.007437 1.7866,-9.298257 5.3633,-12.874997 3.5768,-3.57674 7.8695,-5.36523 12.877,-5.36523 l 136.6523,0 c 15.517,-65.40122 31.2666,-72.74791 32.5215,-83.04883003 13.2678,-9.65059997 122.3748,-33.57495997 180.1094,-40.48437997 -50.3218,-5.57548 -143.4532,11.69044 -195.586,7.36328 -73.7651,-34.78206 -111.4896,10.40375 -121.4531,64.52149 -83.501,33.35098 -204.8488,-83.78957 -259.7637,-101.41602 -51.4826,-16.52477 -81.9782,-24.2477 -112.1503,-24.26562 z m 83.1191,-45.779303 c -0.8667,-0.008 -1.7091,4.6e-4 -2.5293,0.0254 -22.9645,0.69796 -27.5787,14.12698 -6.4688,35.07813 7.5478,7.490993 16.3801,15.378493 25.6504,23.388673 3.4327,1.07931 6.7052,2.08334 10.3399,3.25 54.6631,17.54562 175.142,133.67703 258.6035,101.84765 -64.3681,-33.00238 -126.9043,-104.68293 -163.5801,-122.48828 -48.6408,-23.613873 -77.7408,-35.557113 -107.6094,-39.826173 -1.9912,-0.28458 -3.988,-0.53646 -5.9921,-0.7539 -3.0061,-0.32634 -5.8141,-0.49799 -8.4141,-0.52149 z"
+ inkscape:connector-curvature="0" />
+ <g
+ id="g4444"
+ style="fill:url(#linearGradient3991);fill-opacity:1"
+ transform="translate(-52.527932,-383.85797)">
+ <path
+ id="path4446"
+ d="m 3295.8047,467.66016 c -5.0075,0 -9.3002,1.78849 -12.877,5.36523 -3.5767,3.57674 -5.3652,7.86756 -5.3652,12.875 l 0,237.67578 c 0,5.00744 1.7885,9.29826 5.3652,12.875 3.5768,3.57674 7.8695,5.36524 12.877,5.36524 l 150.7598,0 c 5.0074,0 9.2982,-1.7885 12.875,-5.36524 3.5767,-3.57674 5.3652,-7.86756 5.3652,-12.875 0,-5.36511 -1.7885,-9.83732 -5.3652,-13.41406 -3.5768,-3.57674 -7.8676,-5.36328 -12.875,-5.36328 l -131.9825,0 0,-218.89844 c 0,-5.00744 -1.7885,-9.29826 -5.3652,-12.875 -3.5767,-3.57674 -8.047,-5.36523 -13.4121,-5.36523 z"
+ style="fill:url(#linearGradient4454);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ inkscape:connector-curvature="0" />
+ <path
+ id="path4448"
+ d="m 3532.623,467.66016 c -5.0074,0 -9.2982,1.78849 -12.875,5.36523 -3.5767,3.57674 -5.3652,7.86756 -5.3652,12.875 l 0,237.67578 c 0,5.00744 1.7885,9.29826 5.3652,12.875 3.5768,3.57674 7.8676,5.36524 12.875,5.36524 5.3652,0 9.8374,-1.7885 13.4141,-5.36524 3.5768,-3.57674 5.3652,-7.86756 5.3652,-12.875 l 0,-237.67578 c 0,-5.00744 -1.7884,-9.29826 -5.3652,-12.875 -3.5767,-3.57674 -8.0489,-5.36523 -13.4141,-5.36523 z"
+ style="fill:url(#linearGradient4456);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ inkscape:connector-curvature="0" />
+ <path
+ id="path4450"
+ d="m 3617.123,467.66016 c -5.0074,0 -9.2982,1.78849 -12.875,5.36523 -3.5767,3.57674 -5.3652,7.86756 -5.3652,12.875 0,5.00744 1.7885,9.47965 5.3652,13.41406 3.5768,3.57675 7.8676,5.36524 12.875,5.36524 l 56.8711,0 0,218.89648 c 0,5.00744 1.7885,9.29826 5.3653,12.875 3.5767,3.57674 7.8695,5.36524 12.8769,5.36524 5.3651,0 9.8354,-1.7885 13.4121,-5.36524 3.5768,-3.57674 5.3653,-7.86756 5.3653,-12.875 l 0,-218.89648 56.8691,0 c 5.0075,0 9.3002,-1.78849 12.877,-5.36524 3.5767,-3.93441 5.3652,-8.40662 5.3652,-13.41406 0,-5.00744 -1.7885,-9.29826 -5.3652,-12.875 -3.5768,-3.57674 -7.8695,-5.36523 -12.877,-5.36523 l -150.7598,0 z"
+ style="fill:url(#linearGradient4458);fill-opacity:1;fill-rule:evenodd;stroke:none"
+ inkscape:connector-curvature="0" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/doc/bootstrap.rst b/doc/bootstrap.rst
new file mode 100644
index 0000000..19d7d92
--- /dev/null
+++ b/doc/bootstrap.rst
@@ -0,0 +1,41 @@
+Bootstrapping
+=============
+
+Flit is itself packaged using Flit, as are some foundational packaging tools
+such as ``pep517``. So where can you start if you need to install everything
+from source?
+
+.. note::
+
+ For most users, ``pip`` handles all this automatically. You should only need
+ to deal with this if you're building things entirely from scratch, such as
+ putting Python packages into another package format.
+
+The key piece is ``flit_core``. This is a package which can build itself using
+nothing except Python and the standard library. From an unpacked source archive,
+you can make a wheel by running::
+
+ python -m flit_core.wheel
+
+And then you can install this wheel with the ``bootstrap_install.py`` script
+included in the sdist (or by unzipping it to the correct directory)::
+
+ # Install to site-packages for this Python:
+ python bootstrap_install.py dist/flit_core-*.whl
+
+ # Install somewhere else:
+ python bootstrap_install.py --installdir /path/to/site-packages dist/flit_core-*.whl
+
+As of version 3.6, flit_core bundles the ``tomli`` TOML parser, to avoid a
+dependency cycle. If you need to unbundle it, you will need to special-case
+installing flit_core and/or tomli to get around that cycle.
+
+After ``flit_core``, I recommend that you get `installer
+<https://pypi.org/project/installer/>`_ set up. You can use
+``python -m flit_core.wheel`` again to make a wheel, and then use installer
+itself (from the source directory) to install it.
+
+After that, you probably want to get `build <https://pypi.org/project/build/>`_
+and its dependencies installed as the goal of the bootstrapping phase. You can
+then use ``build`` to create wheels of any other Python packages, and
+``installer`` to install them.
diff --git a/doc/cmdline.rst b/doc/cmdline.rst
new file mode 100644
index 0000000..c443ddd
--- /dev/null
+++ b/doc/cmdline.rst
@@ -0,0 +1,262 @@
+Flit command line interface
+===========================
+
+All operations use the ``flit`` command, followed by one of a number of
+subcommands.
+
+Common options
+--------------
+
+.. program:: flit
+
+.. option:: -f <path>, --ini-file <path>
+
+ Path to a config file specifying the module to build. The default is
+ ``pyproject.toml``.
+
+.. option:: --version
+
+ Show the version of Flit in use.
+
+.. option:: --help
+
+ Show help on the command-line interface.
+
+.. option:: --debug
+
+ Show more detailed logs about what flit is doing.
+
+.. _build_cmd:
+
+``flit build``
+--------------
+
+.. program:: flit build
+
+Build a wheel and an sdist (tarball) from the package.
+
+.. option:: --format <format>
+
+ Limit to building either ``wheel`` or ``sdist``.
+
+.. option:: --setup-py
+
+ Generate a ``setup.py`` file in the sdist, so it can be installed by older
+ versions of pip.
+
+.. option:: --no-setup-py
+
+ Don't generate a setup.py file in the sdist. This is the default.
+ An sdist built without this will only work with tools that support PEP 517,
+ but the wheel will still be usable by any compatible tool.
+
+ .. versionchanged:: 3.5
+
+ Generating ``setup.py`` disabled by default.
+
+.. _publish_cmd:
+
+``flit publish``
+----------------
+
+.. program:: flit publish
+
+Build a wheel and an sdist (tarball) from the package, and upload them to PyPI
+or another repository.
+
+.. option:: --format <format>
+
+ Limit to publishing either ``wheel`` or ``sdist``.
+ You should normally publish the two formats together.
+
+.. option:: --setup-py
+
+ Generate a ``setup.py`` file in the sdist, so it can be installed by older
+ versions of pip.
+
+.. option:: --no-setup-py
+
+ Don't generate a setup.py file in the sdist. This is the default.
+ An sdist built without this will only work with tools that support PEP 517,
+ but the wheel will still be usable by any compatible tool.
+
+ .. versionchanged:: 3.5
+
+ Generating ``setup.py`` disabled by default.
+
+.. option:: --repository <repository>
+
+ Name of a repository to upload packages to. Should match a section in
+ ``~/.pypirc``. The default is ``pypi``.
+
+.. option:: --pypirc <pypirc>
+
+ The .pypirc config file to be used. The default is ``~/.pypirc``.
+
+.. seealso:: :doc:`upload`
+
+.. _install_cmd:
+
+``flit install``
+----------------
+
+.. program:: flit install
+
+Install the package on your system.
+
+By default, the package is installed to the same Python environment that Flit
+itself is installed in; use :option:`--python` or :envvar:`FLIT_INSTALL_PYTHON`
+to override this.
+
+If you don't have permission to modify the environment (e.g. the system Python
+on Linux), Flit may do a user install instead. Use the :option:`--user` or
+:option:`--env` flags to force this one way or the other, rather than letting
+Flit guess.
+
+.. option:: -s, --symlink
+
+ Symlink the module into site-packages rather than copying it, so that you
+ can test changes without reinstalling the module.
+
+.. option:: --pth-file
+
+ Create a ``.pth`` file in site-packages rather than copying the module, so
+ you can test changes without reinstalling. This is a less elegant alternative
+ to ``--symlink``, but it works on Windows, which typically doesn't allow
+ symlinks.
+
+.. option:: --deps <dependency option>
+
+ Which dependencies to install. One of ``all``, ``production``, ``develop``,
+ or ``none``. ``all`` and ``develop`` install the extras ``test``, ``doc``,
+ and ``dev``. Default ``all``.
+
+.. option:: --extras <extra[,extra,...]>
+
+ Which named extra features to install dependencies for. Specify ``all`` to
+ install all optional dependencies, or a comma-separated list of extras.
+ Default depends on ``--deps``.
+
+.. option:: --only-deps
+
+ Install the dependencies of this package, but not the package itself.
+
+ This can be useful for e.g. building a container image, where your own code
+ is copied or mounted into the container at a later stage.
+
+ .. versionadded:: 3.8
+
+.. option:: --user
+
+ Do a user-local installation. This is the default if flit is not in a
+ virtualenv or conda env (if the environment's library directory is
+ read-only and ``site.ENABLE_USER_SITE`` is true).
+
+.. option:: --env
+
+ Install into the environment - the opposite of :option:`--user`.
+ This is the default in a virtualenv or conda env (if the environment's
+ library directory is writable or ``site.ENABLE_USER_SITE`` is false).
+
+.. option:: --python <path to python>
+
+ Install for another Python, identified by the path of the python
+ executable. Using this option, you can install a module for Python 2, for
+ instance. See :envvar:`FLIT_INSTALL_PYTHON` if this option is not given.
+
+ .. versionchanged:: 2.1
+ Added :envvar:`FLIT_INSTALL_PYTHON` and use its value over the Python
+ running Flit when an explicit :option:`--python` option is not given.
+
+.. note::
+
+ Flit calls pip to do the installation. You can set any of pip's options
+ `using its environment variables
+ <https://pip.pypa.io/en/stable/topics/configuration/#environment-variables>`__.
+
+ When you use the :option:`--symlink` or :option:`--pth-file` options, pip
+ is used to install dependencies. Otherwise, Flit builds a wheel and then
+ calls pip to install that.
+
+.. _init_cmd:
+
+``flit init``
+-------------
+
+.. program:: flit init
+
+Create a new ``pyproject.toml`` config file by prompting for information about
+the module in the current directory.
+
+Environment variables
+---------------------
+
+.. envvar:: FLIT_NO_NETWORK
+
+ .. versionadded:: 0.10
+
+ Setting this to any non-empty value will stop flit from making network
+ connections (unless you explicitly ask to upload a package). This
+ is intended for downstream packagers, so if you use this, it's up to you to
+ ensure any necessary dependencies are installed.
+
+.. envvar:: FLIT_ROOT_INSTALL
+
+ By default, ``flit install`` will fail when run as root on POSIX systems,
+ because installing Python modules systemwide is not recommended. Setting
+ this to any non-empty value allows installation as root. It has no effect on
+ Windows.
+
+.. envvar:: FLIT_USERNAME
+ FLIT_PASSWORD
+ FLIT_INDEX_URL
+
+ .. versionadded:: 0.11
+
+ Set a username, password, and index URL for uploading packages.
+ See :ref:`uploading packages with environment variables <upload_envvars>`
+ for more information.
+
+ Token-based upload to PyPI is supported. To upload using a PyPI token,
+ set ``FLIT_USERNAME`` to ``__token__``, and ``FLIT_PASSWORD`` to the
+ token value.
+
+.. envvar:: FLIT_ALLOW_INVALID
+
+ .. versionadded:: 0.13
+
+ Setting this to any non-empty value tells Flit to continue if it detects
+ invalid metadata, instead of failing with an error. Problems will still be
+ reported in the logs, but won't cause Flit to stop.
+
+ If the metadata is invalid, uploading the package to PyPI may fail. This
+ environment variable provides an escape hatch in case Flit incorrectly
+ rejects your valid metadata. If you need to use it and you believe your
+ metadata is valid, please `open an issue <https://github.com/pypa/flit/issues>`__.
+
+.. envvar:: FLIT_INSTALL_PYTHON
+
+ .. versionadded:: 2.1
+
+ .. program:: flit install
+
+ Set a default Python interpreter for :ref:`install_cmd` to use when
+ :option:`--python` is not specified. The value can be either an absolute
+ path, or a command name (which will be found in ``PATH``). If this is unset
+ or empty, the module is installed for the copy of Python that is running
+ Flit.
+
+.. envvar:: SOURCE_DATE_EPOCH
+
+ To make reproducible builds, set this to a timestamp as a number of seconds
+ since the start of the year 1970 in UTC, and document the value you used.
+ On Unix systems, you can get a value for the current time by running::
+
+ date +%s
+
+
+ .. seealso::
+
+ `The SOURCE_DATE_EPOCH specification
+ <https://reproducible-builds.org/specs/source-date-epoch/>`__
+
diff --git a/doc/conf.py b/doc/conf.py
new file mode 100644
index 0000000..cce9bee
--- /dev/null
+++ b/doc/conf.py
@@ -0,0 +1,264 @@
+# -*- coding: utf-8 -*-
+#
+# Flit documentation build configuration file, created by
+# sphinx-quickstart on Sun Mar 15 19:16:41 2015.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinxcontrib_github_alt',
+ 'sphinx_rtd_theme',
+]
+
+github_project_url = "https://github.com/pypa/flit"
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Flit'
+copyright = u'2015, Thomas Kluyver'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '3.8.0'
+# The full version, including alpha/beta/rc tags.
+release = version #+ '.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = '_static/flit_logo_nobg_cropped.svg'
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Flitdoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ ('index', 'Flit.tex', u'Flit Documentation',
+ u'Thomas Kluyver', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'flit', u'Flit Documentation',
+ [u'Thomas Kluyver'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'Flit', u'Flit Documentation',
+ u'Thomas Kluyver', 'Flit', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
diff --git a/doc/development.rst b/doc/development.rst
new file mode 100644
index 0000000..18090af
--- /dev/null
+++ b/doc/development.rst
@@ -0,0 +1,26 @@
+Developing Flit
+===============
+
+To get a development installation of Flit itself::
+
+ git clone https://github.com/pypa/flit.git
+ cd flit
+ python3 -m pip install docutils requests
+ python3 bootstrap_dev.py
+
+This links Flit into the current Python environment, so you can make changes
+and try them without having to reinstall each time.
+
+Testing
+-------
+
+To run the tests in separate environments for each available Python version::
+
+ tox
+
+`tox <https://tox.readthedocs.io/en/latest/>`_ has many options.
+
+To run the tests in your current environment, run::
+
+ pytest
+
diff --git a/doc/flit_ini.rst b/doc/flit_ini.rst
new file mode 100644
index 0000000..b41d967
--- /dev/null
+++ b/doc/flit_ini.rst
@@ -0,0 +1,113 @@
+:orphan:
+
+The flit.ini config file
+========================
+
+This file lives next to the module or package.
+
+.. note::
+
+ Flit 0.12 and above uses a :doc:`pyproject.toml file <pyproject_toml>` file
+ to store this information. Run ``python3 -m flit.tomlify`` to convert a
+ ``flit.ini`` file to ``pyproject.toml``.
+
+Metadata section
+----------------
+
+There are four required fields:
+
+module
+ The name of the module/package, as you'd use in an import statement.
+author
+ Your name
+author-email
+ Your email address
+home-page
+ A URL for the project, such as its Github repository.
+
+e.g. for flit itself
+
+.. code-block:: ini
+
+ [metadata]
+ module=flit
+ author=Thomas Kluyver
+ author-email=thomas@kluyver.me.uk
+ home-page=https://github.com/pypa/flit
+
+The remaining fields are optional:
+
+requires
+ A list of other packages from PyPI that this package needs. Each package
+ should be on its own line, and may be followed by a version specifier in
+ parentheses, like ``(>=4.1)``, and/or an `environment marker
+ <https://www.python.org/dev/peps/pep-0345/#environment-markers>`_
+ after a semicolon. For example:
+
+ .. code-block:: ini
+
+ requires = requests (>=2.6)
+ configparser; python_version == '2.7'
+
+dev-requires
+ Packages that are required for development. This field is in the same format
+ as ``requires``.
+
+ These are not (yet) encoded in the wheel, but are used when doing
+ ``flit install``.
+description-file
+ A path (relative to the .ini file) to a file containing a longer description
+ of your package to show on PyPI. This should be written in `reStructuredText
+ <http://docutils.sourceforge.net/docs/user/rst/quickref.html>`_, if your long
+ description is not valid reStructuredText, a warning will be printed,
+ and it will be interpreted as plain text on PyPI.
+classifiers
+ A list of `Trove classifiers <https://pypi.python.org/pypi?%3Aaction=list_classifiers>`_,
+ one per line, indented.
+requires-python
+ A version specifier for the versions of Python this requires, e.g. ``~=3.3`` or
+ ``>=3.3,<4`` which are equivalents.
+dist-name
+ If you want your package's name on PyPI to be different from the importable
+ module name, set this to the PyPI name.
+keywords
+ Comma separated list of words to help with searching for your package.
+license
+ The name of a license, if you're using one for which there isn't a Trove
+ classifier. It's recommended to use Trove classifiers instead of this in
+ most cases.
+maintainer, maintainer-email
+ Like author, for if you've taken over a project from someone else.
+
+Here's the full example from flit itself:
+
+.. code-block:: ini
+
+ [metadata]
+ author=Thomas Kluyver
+ author-email=thomas@kluyver.me.uk
+ home-page=https://github.com/pypa/flit
+ requires=requests
+ requires-python= >=3
+ description-file=README.rst
+ classifiers=Intended Audience :: Developers
+ License :: OSI Approved :: BSD License
+ Programming Language :: Python :: 3
+ Topic :: Software Development :: Libraries :: Python Modules
+
+.. _flit_ini_scripts:
+
+Scripts section
+---------------
+
+Each key and value in this describes a shell command to be installed along with
+your package. These work like setuptools 'entry points'. Here's the section
+for flit:
+
+.. code-block:: ini
+
+ [scripts]
+ flit = flit:main
+
+This will create a ``flit`` command, which will call the function ``main()``
+imported from :mod:`flit`.
diff --git a/doc/history.rst b/doc/history.rst
new file mode 100644
index 0000000..e04e1fb
--- /dev/null
+++ b/doc/history.rst
@@ -0,0 +1,497 @@
+Release history
+===============
+
+Version 3.8
+-----------
+
+- A project name containing hyphens is now automatically translated to use
+ underscores for the import name (:ghpull:`566`).
+- New option :option:`flit install --only-deps` to install the dependencies of
+ the package, but not the package itself.
+- Add support for recursive globbing (``**``) in sdist includes and excludes
+ (:ghpull:`550`).
+- Python's bytecode cache files (``__pycache__`` folders and ``.pyc`` files)
+ are now always excluded from sdists (:ghpull:`581`).
+- Use tomllib in Python 3.11, rather than tomli (:ghpull:`573`, :ghpull:`604`).
+- Fix crash when unable to get a password from ``keyring`` (:ghpull:`567`).
+- Fix including modified files in sdist when using Mercurial (:ghpull:`541`).
+- Fix for some cases of determining whether a package supports Python 2 or not
+ (:ghpull:`593`).
+- Fix parsing version number from code using multiple assignments (:ghpull:`474`).
+- Document how to use a PyPI token with :envvar:`FLIT_PASSWORD` (:ghpull:`602`).
+- Fix link to information about environment variables for pip (:ghpull:`576`).
+- Link to the docs for the latest stable version in package metadata
+ (:ghpull:`589`).
+- Remove a mention of the ``toml`` package, which is no longer needed, from the
+ :doc:`development` page (:ghpull:`601`).
+- The :doc:`bootstrap <bootstrap>` install script for ``flit_core`` accepts a
+ new ``--install-root`` option.
+- Ensure the license file is included in packages on PyPI (:ghpull:`603`).
+
+Version 3.7.1
+-------------
+
+- Fix building packages which need execution to get the version number,
+ and have a relative import in ``__init__.py`` (:ghpull:`531`).
+
+Version 3.7
+-----------
+
+- Support for :ref:`external data files <pyproject_toml_external_data>` such
+ as man pages or Jupyter extension support files (:ghpull:`510`).
+- Project names are now lowercase in wheel filenames and ``.dist-info`` folder
+ names, in line with the specifications (:ghpull:`498`).
+- Improved support for :doc:`bootstrapping <bootstrap>` a Python environment,
+ e.g. for downstream packagers (:ghpull:`511`). ``flit_core.wheel`` is usable
+ with ``python -m`` to create wheels before the `build <https://pypi.org/project/build/>`_
+ tool is available, and ``flit_core`` sdists also include a script to install
+ itself from a wheel before `installer <https://pypi.org/project/installer/>`_
+ is available.
+- Use newer importlib APIs, fixing some deprecation warnings (:ghpull:`499`).
+
+Version 3.6
+-----------
+
+- ``flit_core`` now bundles the `tomli <https://pypi.org/project/tomli/>`_ TOML
+ parser library (version 1.2.3) to avoid a circular dependency between
+ ``flit_core`` and ``tomli`` (:ghpull:`492`). This means ``flit_core`` now has
+ no dependencies except Python itself, both at build time and at runtime,
+ simplifying :doc:`bootstrapping <bootstrap>`.
+
+Version 3.5.1
+-------------
+
+- Fix development installs with ``flit install --symlink`` and ``--pth-file``,
+ which were broken in 3.5.0, especially for packages using a ``src`` folder
+ (:ghpull:`472`).
+
+Version 3.5
+-----------
+
+- You can now use Flit to distribute a module or package inside a namespace
+ package (as defined by :pep:`420`). To do this, specify the import name of the
+ concrete, inner module you are packaging - e.g. ``name = "sphinxcontrib.foo"``
+ - either in the ``[project]`` table, or under ``[tool.flit.module]`` if you
+ want to use a different name on PyPI (:ghpull:`468`).
+- Flit no longer generates a ``setup.py`` file in sdists (``.tar.gz`` packages)
+ by default (:ghpull:`462`). Modern packaging tools don't need this. You can
+ use the ``--setup-py`` flag to keep adding it for now, but this will probably
+ be removed at some point in the future.
+- Fixed how ``flit init`` handles authors' names with non-ASCII characters
+ (:ghpull:`460`).
+- When ``flit init`` generates a LICENSE file, the new ``pyproject.toml`` now
+ references it (:ghpull:`467`).
+
+Version 3.4
+-----------
+
+- Python 3.6 or above is now required, both for ``flit`` and ``flit_core``.
+- Add a ``--setup-py`` option to ``flit build`` and ``flit publish``, and a
+ warning when neither this nor ``--no-setup-py`` are specified (:ghpull:`431`).
+ A future version will stop generating ``setup.py`` files in sdists by default.
+- Add support for standardised editable installs - ``pip install -e`` -
+ according to :pep:`660` (:ghpull:`400`).
+- Add a ``--pypirc`` option for ``flit publish`` to specify an alternative path
+ to a ``.pypirc`` config file describing package indexes (:ghpull:`434`).
+- Fix installing dependencies specified in a ``[project]`` table (:ghpull:`433`).
+- Fix building wheels when ``SOURCE_DATE_EPOCH`` (see :doc:`reproducible`) is
+ set to a date before 1980 (:ghpull:`448`).
+- Switch to using the `tomli <https://pypi.org/project/tomli/>`_ TOML parser,
+ in common with other packaging projects (:ghpull:`438`).
+ This supports TOML version 1.0.
+- Add a document on :doc:`bootstrap` (:ghpull:`441`).
+
+Version 3.3
+-----------
+
+- ``PKG-INFO`` files in sdists are now generated the same way as ``METADATA`` in
+ wheels, fixing some issues with sdists (:ghpull:`410`).
+- ``flit publish`` now sends SHA-256 hashes, fixing uploads to GitLab package
+ repositories (:ghpull:`416`).
+- The ``[project]`` metadata table from :pep:`621` is now fully supported and
+ :ref:`documented <pyproject_toml_project>`. Projects using this can now
+ specify ``requires = ["flit_core >=3.2,<4"]`` in the ``[build-system]`` table.
+
+Version 3.2
+-----------
+
+- Experimental support for specifying metadata in a ``[project]`` table in
+ ``pyproject.toml`` as specified by :pep:`621` (:ghpull:`393`). If you try
+ using this, please specify ``requires = ["flit_core >=3.2.0,<3.3"]`` in the
+ ``[build-system]`` table for now, in case it needs to change for the next
+ release.
+- Fix writing METADATA file with multi-line information in certain fields
+ such as ``Author`` (:ghpull:`402`).
+- Fix building wheel when a directory such as LICENSES appears in the project
+ root directory (:ghpull:`401`).
+
+Version 3.1
+-----------
+
+- Update handling of names & version numbers in wheel filenames and
+ ``.dist-info`` folders in line with changes in the specs (:ghpull:`395`).
+- Switch from the deprecated ``pytoml`` package to ``toml`` (:ghpull:`378`).
+- Fix specifying backend-path in ``pyproject.toml`` for flit-core (as a list
+ instead of a string).
+
+Version 3.0
+-----------
+
+Breaking changes:
+
+- Projects must now provide Flit with information in ``pyproject.toml`` files,
+ not the older ``flit.ini`` format (:ghpull:`338`).
+- ``flit_core`` once again requires Python 3 (>=3.4). Packages that support
+ Python 2 can still be built by ``flit_core`` 2.x, but can't rely on new
+ features (:ghpull:`342`).
+- The deprecated ``flit installfrom`` command was removed (:ghpull:`334`).
+ You can use ``pip install git+https://github.com/...`` instead.
+
+Features and fixes:
+
+- Fix building sdists from a git repository with non-ASCII characters in
+ filenames (:ghpull:`346`).
+- Fix identifying the version number when the code contains a subscript
+ assignment before ``__version__ =`` (:ghpull:`348`).
+- Script entry points can now use a class method (:ghpull:`359`).
+- Set suitable permission bits on metadata files in wheels (:ghpull:`256`).
+- Fixed line endings in the ``RECORD`` file when installing on Windows
+ (:ghpull:`368`).
+- Support for recording the source of local installations, as in :pep:`610`
+ (:ghpull:`335`).
+- ``flit init`` will check for a README in the root of the project and
+ automatically set it as ``description-file`` (:ghpull:`337`).
+- Pygments is not required for checking reStructuredText READMEs (:ghpull:`357`).
+- Packages where the version number can be recognised without executing their
+ code don't need their dependencies installed to build, which should make them
+ build faster (:ghpull:`361`).
+- Ensure the installed ``RECORD`` file is predictably ordered (:ghpull:`366`).
+
+Version 2.3
+-----------
+
+- New projects created with :ref:`init_cmd` now declare that they require
+ ``flit_core >=2,<4`` (:ghpull:`328`). Any projects using ``pyproject.toml``
+ (not ``flit.ini``) should be compatible with flit 3.x.
+- Fix selecting files from a git submodule to include in an sdist
+ (:ghpull:`324`).
+- Fix checking classifiers when no writeable cache directory is available
+ (:ghpull:`319`).
+- Better errors when trying to install to a mis-spelled or missing Python
+ interpreter (:ghpull:`331`).
+- Fix specifying ``--repository`` before ``upload`` (:ghpull:`322`). Passing the
+ option like this is deprecated, and you should now pass it after ``upload``.
+- Documentation improvements (:ghpull:`327`, :ghpull:`318`, :ghpull:`314`)
+
+Version 2.2
+-----------
+
+- Allow underscores in package names with Python 2 (:ghpull:`305`).
+- Add a ``--no-setup-py`` option to build sdists without a backwards-compatible
+ ``setup.py`` file (:ghpull:`311`).
+- Fix the generated ``setup.py`` file for packages using a ``src/`` layout
+ (:ghpull:`303`).
+- Fix detecting when more than one file matches the module name specified
+ (:ghpull:`307`).
+- Fix installing to a venv on Windows with the ``--python`` option
+ (:ghpull:`300`).
+- Don't echo the command in scripts installed with ``--symlink`` or
+ ``--pth-file`` on Windows (:ghpull:`310`).
+- New ``bootstrap_dev.py`` script to set up a development installation of Flit
+ from the repository (:ghpull:`301`, :ghpull:`306`).
+
+Version 2.1
+-----------
+
+- Use compression when adding files to wheels.
+- Added the :envvar:`FLIT_INSTALL_PYTHON` environment variable (:ghpull:`295`),
+ to configure flit to always install into a Python other than the one it's
+ running on.
+- ``flit_core`` uses the ``intreehooks`` shim package to load its bootstrapping
+ backend, until a released version of pip supports the standard
+ ``backend-path`` mechanism.
+
+Version 2.0
+-----------
+
+Flit 2 is a major architecture change. The ``flit_core`` package now provides
+a :pep:`517` backend for building packages, while ``flit`` is a
+:doc:`command line interface <cmdline>` extending that.
+
+The build backend works on Python 2, so tools like pip should be able to install
+packages built with flit from source on Python 2.
+The ``flit`` command requires Python 3.5 or above.
+You will need to change the build-system table in your ``pyproject.toml`` file
+to look like this:
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["flit_core >=2,<4"]
+ build-backend = "flit_core.buildapi"
+
+Other changes include:
+
+- Support for storing your code under a ``src/`` folder (:ghpull:`260`).
+ You don't need to change any configuration if you do this.
+- Options to control what files are included in an sdist - see
+ :ref:`pyproject_toml_sdist` for the details.
+- Requirements can specify a URL 'direct reference', as an alternative to a
+ version number, with the syntax defined in :pep:`440`:
+ ``requests @ https://example.com/requests-2.22.0.tar.gz``.
+- Fix the shebang of scripts installed with the ``--python`` option and the
+ ``--symlink`` flag (:ghpull:`286`).
+- Installing with ``--deps develop`` now installs normal dependencies
+ as well as development dependencies.
+- Author email is no longer required in the metadata table (:ghpull:`289`).
+- More error messages are now shown without a traceback (:ghpull:`254`)
+
+Version 1.3
+-----------
+
+- Fix for building sdists from a subdirectory in a Mercurial repository
+ (:ghpull:`233`).
+- Fix for getting the docstring and version from modules defining their encoding
+ (:ghpull:`239`).
+- Fix for installing packages with ``flit installfrom`` (:ghpull:`221`).
+- Packages with requirements no longer get a spurious ``Provides-Extra: .none``
+ metadata entry (:ghissue:`228`).
+- Better check of whether ``python-requires`` includes any Python 2 version
+ (:ghpull:`232`).
+- Better check of home page URLs in ``flit init`` (:ghpull:`230`).
+- Better error message when the description file is not found (:ghpull:`234`).
+- Updated a help message to refer to ``pyproject.toml`` (:ghpull:`240`).
+- Improve tests of ``flit init`` (:ghpull:`229`).
+
+Version 1.2.1
+-------------
+
+- Fix for installing packages with ``flit install``.
+- Make ``requests_download`` an extra dependency, to avoid a circular build
+ dependency. To use ``flit installfrom``, you can install with
+ ``pip install flit[installfrom]``. Note that the ``installfrom`` subcommand
+ is deprecated, as it will soon be possible to use pip to install Flit projects
+ directly from a VCS URL.
+
+Version 1.2
+-----------
+
+- Fixes for packages specifying ``requires-extra``: sdists should now work, and
+ environment markers can be used together with ``requires-extra``.
+- Fix running ``flit installfrom`` without a config file present in the
+ working directory.
+- The error message for a missing or empty docstring tells you what file
+ the docstring should be in.
+- Improvements to documentation on version selectors for requirements.
+
+Version 1.1
+-----------
+
+- Packages can now have 'extras', specified as ``requires-extra`` in the
+ :doc:`pyproject.toml file <pyproject_toml>`. These are additional dependencies
+ for optional features.
+- The ``home-page`` metadata field is no longer required.
+- Additional project URLs are now validated.
+- ``flit -V`` is now equivalent to ``flit --version``.
+- Various improvements to documentation.
+
+Version 1.0
+-----------
+
+- The description file may now be written in reStructuredText, Markdown or
+ plain text. The file extension should indicate which of these formats it is
+ (``.rst``, ``.md`` or ``.txt``). Previously, only reStructuredText was
+ officially supported.
+- Multiple links (e.g. documentation, bug tracker) can now be specified in a
+ new :ref:`[tool.flit.metadata.urls] section <pyproject_toml_urls>` of
+ ``pyproject.toml``.
+- Dependencies are now correctly installed to the target Python when you use
+ the ``--symlink`` or ``--pth-file`` options.
+- Dependencies are only installed to the Python where Flit is running if
+ it fails to get the docstring and version number without them.
+- The commands deprecated in 0.13—``flit wheel``, ``flit sdist`` and
+ ``flit register``—have been removed.
+
+Although version 1.0 sounds like a milestone, there's nothing that makes this
+release especially significant. It doesn't represent a step change in stability
+or completeness. Flit has been gradually maturing for some time, and I chose
+this point to end the series of 0.x version numbers.
+
+Version 0.13
+------------
+
+- Better validation of several metadata fields (``dist-name``, ``requires``,
+ ``requires-python``, ``home-page``), and of the version number.
+- New :envvar:`FLIT_ALLOW_INVALID` environment variable to ignore validation
+ failures in case they go wrong.
+- The list of valid classifiers is now fetched from Warehouse (https://pypi.org),
+ rather than the older https://pypi.python.org site.
+- Deprecated ``flit wheel`` and ``flit sdist`` subcommands: use
+ :ref:`build_cmd`.
+- Deprecated ``flit register``: you can no longer register a package separately
+ from uploading it.
+
+Version 0.12.3
+--------------
+
+- Fix building and installing packages with a ``-`` in the distribution name.
+- Fix numbering in README.
+
+Version 0.12.2
+--------------
+
+- New tool to convert ``flit.ini`` to ``pyproject.toml``::
+
+ python3 -m flit.tomlify
+
+- Use the PAX tar format for sdists, as specified by PEP 517.
+
+Version 0.12.1
+--------------
+
+- Restore dependency on ``zipfile36`` backport package.
+- Add some missing options to documentation of ``flit install`` subcommand.
+- Rearrange environment variables in the docs.
+
+Version 0.12
+------------
+
+- Switch the config to ``pyproject.toml`` by default instead of ``flit.ini``,
+ and implement the PEP 517 API.
+- A new option ``--pth-file`` allows for development installation on Windows
+ (where ``--symlink`` usually won't work).
+- Normalise file permissions in the zip file, making builds more reproducible
+ across different systems.
+- Sdists (.tar.gz packages) can now also be reproducibly built by setting
+ :envvar:`SOURCE_DATE_EPOCH`.
+- For most modules, Flit can now extract the version number and docstring
+ without importing it. It will still fall back to importing where getting
+ these from the AST fails.
+- ``flit build`` will build the wheel from the sdist, helping to ensure that
+ files aren't left out of the sdist.
+- All list fields in the INI file now ignore blank lines (``requires``,
+ ``dev-requires``, ``classifiers``).
+- Fix the path separator in the ``RECORD`` file of a wheel built on Windows.
+- Some minor fixes to building reproducible wheels.
+- If building a wheel fails, the temporary file created will be cleaned up.
+- Various improvements to docs and README.
+
+Version 0.11.4
+--------------
+
+- Explicitly open various files as UTF-8, rather than relying on locale
+ encoding.
+- Link to docs from README.
+- Better test coverage, and a few minor fixes for problems revealed by tests.
+
+Version 0.11.3
+--------------
+
+- Fixed a bug causing failed uploads when the password is entered in the
+ terminal.
+
+Version 0.11.2
+--------------
+
+- A couple of behaviour changes when uploading to warehouse.
+
+Version 0.11.1
+--------------
+
+- Fixed a bug when you use flit to build an sdist from a subdirectory inside a
+ VCS checkout. The VCS is now correctly detected.
+- Fix the rst checker for newer versions of docutils, by upgrading the bundled
+ copy of readme_renderer.
+
+Version 0.11
+------------
+
+- Flit can now build sdists (tarballs) and upload them to PyPI, if your code is
+ in a git or mercurial repository. There are new commands:
+
+ - ``flit build`` builds both a wheel and an sdist.
+ - ``flit publish`` builds and uploads a wheel and an sdist.
+
+- Smarter ways of getting the information needed for upload:
+
+ - If you have the `keyring <https://github.com/jaraco/keyring>`_ package
+ installed, flit can use it to store your password, rather than keeping it
+ in plain text in ``~/.pypirc``.
+ - If ``~/.pypirc`` does not already exist, and you are prompted for your
+ username, flit will write it into that file.
+ - You can provide the information as environment variables:
+ :envvar:`FLIT_USERNAME`, :envvar:`FLIT_PASSWORD` and :envvar:`FLIT_INDEX_URL`.
+ Use this to upload packages from a CI service, for instance.
+
+- Include 'LICENSE' or 'COPYING' files in wheels.
+- Fix for ``flit install --symlink`` inside a virtualenv.
+
+
+Version 0.10
+------------
+
+- Downstream packagers can use the :envvar:`FLIT_NO_NETWORK` environment
+ variable to stop flit downloading data from the network.
+
+Version 0.9
+-----------
+
+- ``flit install`` and ``flit installfrom`` now take an optional ``--python`` argument,
+ with the path to the Python executable you want to install it for.
+ Using this, you can install modules to Python 2.
+- Installing a module normally (without ``--symlink``) builds a wheel and uses
+ pip to install it, which should work better in some corner cases.
+
+Version 0.8
+-----------
+
+- A new ``flit installfrom`` subcommand to install a project from a source
+ archive, such as from Github.
+- :doc:`Reproducible builds <reproducible>` - you can produce byte-for-byte
+ identical wheels.
+- A warning for non-canonical version numbers according to `PEP 440
+ <https://www.python.org/dev/peps/pep-0440/>`__.
+- Fix for installing projects on Windows.
+- Better error message when module docstring is only whitespace.
+
+Version 0.7
+-----------
+
+- A new ``dev-requires`` field in the config file for development requirements,
+ used when doing ``flit install``.
+- Added a ``--deps`` option for ``flit install`` to control which dependencies
+ are installed.
+- Flit can now be invoked with ``python -m flit``.
+
+Version 0.6
+-----------
+
+- ``flit install`` now ensures requirements specified in ``flit.ini`` are
+ installed, using pip.
+- If you specify a description file, flit now warns you if it's not valid
+ reStructuredText (since invalid reStructuredText is treated as plain text on
+ PyPI).
+- Improved the error message for mis-spelled keys in ``flit.ini``.
+
+Version 0.5
+-----------
+
+- A new ``flit init`` command to quickly define the essential basic metadata
+ for a package.
+- Support for entry points.
+- A new ``flit register`` command to register a package without uploading it,
+ for when you want to claim a name before you're ready to release.
+- Added a ``--repository`` option for specifying an alternative PyPI instance.
+- Added a ``--debug`` flag to show debug-level log messages.
+- Better error messages when the module docstring or ``__version__`` is missing.
+
+Version 0.4
+-----------
+
+- Users can now specify ``dist-name`` in the config file if they need to use
+ different names on PyPI and for imports.
+- Classifiers are now checked against a locally cached list of valid
+ classifiers.
+- Packages can be locally installed into environments for development.
+- Local installation now creates a PEP 376 ``.dist-info`` folder instead of
+ ``.egg-info``.
diff --git a/doc/index.rst b/doc/index.rst
new file mode 100644
index 0000000..9740a9c
--- /dev/null
+++ b/doc/index.rst
@@ -0,0 +1,34 @@
+Flit |version|
+==============
+
+.. raw:: html
+
+ <img src="_static/flit_logo_nobg_cropped.svg" width="200px" style="float: right"/>
+
+.. include:: ../README.rst
+
+Documentation contents
+----------------------
+
+.. toctree::
+ :maxdepth: 2
+
+ pyproject_toml
+ cmdline
+ upload
+ reproducible
+ rationale
+ bootstrap
+
+.. toctree::
+ :maxdepth: 1
+
+ development
+ history
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`search`
+
diff --git a/doc/make.bat b/doc/make.bat
new file mode 100644
index 0000000..965332d
--- /dev/null
+++ b/doc/make.bat
@@ -0,0 +1,242 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. xml to make Docutils-native XML files
+ echo. pseudoxml to make pseudoxml-XML files for display purposes
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Flit.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Flit.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdf" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdfja" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf-ja
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+if "%1" == "xml" (
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.
+ goto end
+)
+
+if "%1" == "pseudoxml" (
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+ goto end
+)
+
+:end
diff --git a/doc/pyproject_toml.rst b/doc/pyproject_toml.rst
new file mode 100644
index 0000000..18d1d26
--- /dev/null
+++ b/doc/pyproject_toml.rst
@@ -0,0 +1,480 @@
+The pyproject.toml config file
+==============================
+
+This file lives next to the module or package.
+
+.. note::
+
+ Older version of Flit (up to 0.11) used a :doc:`flit.ini file <flit_ini>` for
+ similar information. These files no longer work with Flit 3 and above.
+
+ Run ``python3 -m flit.tomlify`` to convert a ``flit.ini`` file to
+ ``pyproject.toml``.
+
+Build system section
+--------------------
+
+This tells tools like pip to build your project with flit. It's a standard
+defined by PEP 517. For any new project using Flit, it will look like this:
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["flit_core >=3.2,<4"]
+ build-backend = "flit_core.buildapi"
+
+Version constraints:
+
+- For now, all packages should specify ``<4``, so they won't be impacted by
+ changes in the next major version.
+- :ref:`pyproject_toml_project` requires ``flit_core >=3.2``
+- :ref:`pyproject_old_metadata` requires ``flit_core >=2,<4``
+- The older :doc:`flit.ini file <flit_ini>` requires ``flit_core <3``.
+- TOML features new in version 1.0 require ``flit_core >=3.4``.
+- ``flit_core`` 3.3 is the last version supporting Python 3.4 & 3.5. Packages
+ supporting these Python versions can only use `TOML v0.5
+ <https://toml.io/en/v0.5.0>`_.
+- Only ``flit_core`` 2.x can build packages on Python 2, so packages still
+ supporting Python 2 cannot use new-style metadata (the ``[project]`` table).
+
+.. _pyproject_toml_project:
+
+New style metadata
+------------------
+
+.. versionadded:: 3.2
+
+The new standard way to specify project metadata is in a ``[project]`` table,
+as defined by :pep:`621`. Flit works for now with either this or the older
+``[tool.flit.metadata]`` table (:ref:`described below <pyproject_old_metadata>`),
+but it won't allow you to mix them.
+
+A simple ``[project]`` table might look like this:
+
+.. code-block:: toml
+
+ [project]
+ name = "astcheck"
+ authors = [
+ {name = "Thomas Kluyver", email = "thomas@kluyver.me.uk"},
+ ]
+ readme = "README.rst"
+ classifiers = [
+ "License :: OSI Approved :: MIT License",
+ ]
+ requires-python = ">=3.5"
+ dynamic = ["version", "description"]
+
+The allowed fields are:
+
+name
+ The name your package will have on PyPI. This field is required. For Flit,
+ this name, with any hyphens replaced by underscores, is also the default value
+ of the import name (see :ref:`pyproject_module` if that needs to be
+ different).
+
+ .. versionchanged:: 3.8
+ Hyphens in the project name are now translated to underscores for the
+ import name.
+version
+ Version number as a string. If you want Flit to get this from a
+ ``__version__`` attribute, leave it out of the TOML config and include
+ "version" in the ``dynamic`` field.
+description
+ A one-line description of your project. If you want Flit to get this from
+ the module docstring, leave it out of the TOML config and include
+ "description" in the ``dynamic`` field.
+readme
+ A path (relative to the .toml file) to a file containing a longer description
+ of your package to show on PyPI. This should be written in `reStructuredText
+ <http://docutils.sourceforge.net/docs/user/rst/quickref.html>`_, Markdown or
+ plain text, and the filename should have the appropriate extension
+ (``.rst``, ``.md`` or ``.txt``). Alternatively, ``readme`` can be a table with
+ either a ``file`` key (a relative path) or a ``text`` key (literal text), and
+ an optional ``content-type`` key (e.g. ``text/x-rst``).
+requires-python
+ A version specifier for the versions of Python this requires, e.g. ``~=3.3`` or
+ ``>=3.3,<4``, which are equivalents.
+license
+ A table with either a ``file`` key (a relative path to a license file) or a
+ ``text`` key (the license text).
+authors
+ A list of tables with ``name`` and ``email`` keys (both optional) describing
+ the authors of the project.
+maintainers
+ Same format as authors.
+keywords
+ A list of words to help with searching for your package.
+classifiers
+ A list of `Trove classifiers <https://pypi.python.org/pypi?%3Aaction=list_classifiers>`_.
+ Add ``Private :: Do Not Upload`` into the list to prevent a private package
+ from being uploaded to PyPI by accident.
+dependencies & optional-dependencies
+ See :ref:`pyproject_project_dependencies`.
+urls
+ See :ref:`pyproject_project_urls`.
+scripts & gui-scripts
+ See :ref:`pyproject_project_scripts`.
+entry-points
+ See :ref:`pyproject_project_entrypoints`.
+dynamic
+ A list of field names which aren't specified here, for which Flit should
+ find a value at build time. Only "version" and "description" are accepted.
+
+.. _pyproject_project_dependencies:
+
+Dependencies
+~~~~~~~~~~~~
+
+The ``dependencies`` field is a list of other packages from PyPI that this
+package needs. Each package may be followed by a version specifier like
+``>=4.1``, and/or an `environment marker`_
+after a semicolon. For example:
+
+ .. code-block:: toml
+
+ dependencies = [
+ "requests >=2.6",
+ "configparser; python_version == '2.7'",
+ ]
+
+The ``[project.optional-dependencies]`` table contains lists of packages needed
+for every optional feature. The requirements are specified in the same format as
+for ``dependencies``. For example:
+
+ .. code-block:: toml
+
+ [project.optional-dependencies]
+ test = [
+ "pytest >=2.7.3",
+ "pytest-cov",
+ ]
+ doc = ["sphinx"]
+
+You can call these optional features anything you want, although ``test`` and
+``doc`` are common ones. You specify them for installation in square brackets
+after the package name or directory, e.g. ``pip install '.[test]'``.
+
+.. _pyproject_project_urls:
+
+URLs table
+~~~~~~~~~~
+
+Your project's page on `pypi.org <https://pypi.org/>`_ can show a number of
+links. You can point people to documentation or a bug tracker, for example.
+
+This section is called ``[project.urls]`` in the file. You can use
+any names inside it. Here it is for flit:
+
+.. code-block:: toml
+
+ [project.urls]
+ Documentation = "https://flit.pypa.io"
+ Source = "https://github.com/pypa/flit"
+
+.. _pyproject_project_scripts:
+
+Scripts section
+~~~~~~~~~~~~~~~
+
+This section is called ``[project.scripts]`` in the file.
+Each key and value describes a shell command to be installed along with
+your package. These work like setuptools 'entry points'. Here's the section
+for flit:
+
+.. code-block:: toml
+
+ [project.scripts]
+ flit = "flit:main"
+
+
+This will create a ``flit`` command, which will call the function ``main()``
+imported from :mod:`flit`.
+
+A similar table called ``[project.gui-scripts]`` defines commands which launch
+a GUI. This only makes a difference on Windows, where GUI scripts are run
+without a console.
+
+.. _pyproject_project_entrypoints:
+
+Entry points sections
+~~~~~~~~~~~~~~~~~~~~~
+
+You can declare `entry points <http://entrypoints.readthedocs.io/en/latest/>`_
+using sections named :samp:`[project.entry-points.{groupname}]`. E.g. to
+provide a pygments lexer from your package:
+
+.. code-block:: toml
+
+ [project.entry-points."pygments.lexers"]
+ dogelang = "dogelang.lexer:DogeLexer"
+
+In each ``package:name`` value, the part before the colon should be an
+importable module name, and the latter part should be the name of an object
+accessible within that module. The details of what object to expose depend on
+the application you're extending.
+
+If the group name contains a dot, it must be quoted (``"pygments.lexers"``
+above). Script entry points are defined in :ref:`scripts tables
+<pyproject_project_scripts>`, so you can't use the group names
+``console_scripts`` or ``gui_scripts`` here.
+
+.. _pyproject_module:
+
+Module section
+~~~~~~~~~~~~~~
+
+If your package will have different names for installation and import,
+you should specify the install (PyPI) name in the ``[project]`` table
+(:ref:`see above <pyproject_toml_project>`), and the import name in a
+``[tool.flit.module]`` table:
+
+.. code-block:: toml
+
+ [project]
+ name = "pynsist"
+ # ...
+
+ [tool.flit.module]
+ name = "nsist"
+
+Flit looks for the source of the package by its import name. The source may be
+located either in the directory that holds the ``pyproject.toml`` file, or in a
+``src/`` subdirectory.
+
+.. _pyproject_old_metadata:
+
+Old style metadata
+------------------
+
+Flit's older way to specify metadata is in a ``[tool.flit.metadata]`` table,
+along with ``[tool.flit.scripts]`` and ``[tool.flit.entrypoints]``, described
+below. This is still recognised for now, but you can't mix it with
+:ref:`pyproject_toml_project`.
+
+There are three required fields:
+
+module
+ The name of the module/package, as you'd use in an import statement.
+author
+ Your name
+author-email
+ Your email address
+
+e.g. for flit itself
+
+.. code-block:: toml
+
+ [tool.flit.metadata]
+ module = "flit"
+ author = "Thomas Kluyver"
+ author-email = "thomas@kluyver.me.uk"
+
+.. versionchanged:: 1.1
+
+ ``home-page`` was previously required.
+
+The remaining fields are optional:
+
+home-page
+ A URL for the project, such as its Github repository.
+requires
+ A list of other packages from PyPI that this package needs. Each package may
+ be followed by a version specifier like ``(>=4.1)`` or ``>=4.1``, and/or an
+ `environment marker`_
+ after a semicolon. For example:
+
+ .. code-block:: toml
+
+ requires = [
+ "requests >=2.6",
+ "configparser; python_version == '2.7'",
+ ]
+
+requires-extra
+ Lists of packages needed for every optional feature. The requirements
+ are specified in the same format as for ``requires``. The requirements of
+ the two reserved extras ``test`` and ``doc`` as well as the extra ``dev``
+ are installed by ``flit install``. For example:
+
+ .. code-block:: toml
+
+ [tool.flit.metadata.requires-extra]
+ test = [
+ "pytest >=2.7.3",
+ "pytest-cov",
+ ]
+ doc = ["sphinx"]
+
+ .. versionadded:: 1.1
+
+description-file
+ A path (relative to the .toml file) to a file containing a longer description
+ of your package to show on PyPI. This should be written in `reStructuredText
+ <http://docutils.sourceforge.net/docs/user/rst/quickref.html>`_, Markdown or
+ plain text, and the filename should have the appropriate extension
+ (``.rst``, ``.md`` or ``.txt``).
+classifiers
+ A list of `Trove classifiers <https://pypi.python.org/pypi?%3Aaction=list_classifiers>`_.
+ Add ``Private :: Do Not Upload`` into the list to prevent a private package
+ from uploading on PyPI by accident.
+requires-python
+ A version specifier for the versions of Python this requires, e.g. ``~=3.3`` or
+ ``>=3.3,<4`` which are equivalents.
+dist-name
+ If you want your package's name on PyPI to be different from the importable
+ module name, set this to the PyPI name.
+keywords
+ Comma separated list of words to help with searching for your package.
+license
+ The name of a license, if you're using one for which there isn't a Trove
+ classifier. It's recommended to use Trove classifiers instead of this in
+ most cases.
+maintainer, maintainer-email
+ Like author, for if you've taken over a project from someone else.
+
+Here was the metadata section from flit using the older style:
+
+.. code-block:: toml
+
+ [tool.flit.metadata]
+ module="flit"
+ author="Thomas Kluyver"
+ author-email="thomas@kluyver.me.uk"
+ home-page="https://github.com/pypa/flit"
+ requires=[
+ "flit_core >=2.2.0",
+ "requests",
+ "docutils",
+ "tomli",
+ "tomli-w",
+ ]
+ requires-python=">=3.6"
+ description-file="README.rst"
+ classifiers=[
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Programming Language :: Python :: 3",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ ]
+
+.. _pyproject_toml_urls:
+
+URLs subsection
+~~~~~~~~~~~~~~~
+
+Your project's page on `pypi.org <https://pypi.org/>`_ can show a number of
+links, in addition to the ``home-page`` URL described above. You can
+point people to documentation or a bug tracker, for example.
+
+This section is called ``[tool.flit.metadata.urls]`` in the file. You can use
+any names inside it. Here it is for flit:
+
+.. code-block:: toml
+
+ [tool.flit.metadata.urls]
+ Documentation = "https://flit.pypa.io"
+
+.. versionadded:: 1.0
+
+.. _pyproject_toml_scripts:
+
+Scripts section
+~~~~~~~~~~~~~~~
+
+A ``[tool.flit.scripts]`` table can be used along with ``[tool.flit.metadata]``.
+It is in the same format as the newer ``[project.scripts]`` table
+:ref:`described above <pyproject_project_scripts>`.
+
+Entry points sections
+~~~~~~~~~~~~~~~~~~~~~
+
+``[tool.flit.entrypoints]`` tables can be used along with ``[tool.flit.metadata]``.
+They are in the same format as the newer ``[project.entry-points]`` tables
+:ref:`described above <pyproject_project_entrypoints>`.
+
+.. _pyproject_toml_sdist:
+
+Sdist section
+-------------
+
+.. versionadded:: 2.0
+
+When you use :ref:`build_cmd` or :ref:`publish_cmd`, Flit builds an sdist
+(source distribution) tarball containing the files that are checked into version
+control (git or mercurial). If you want more control, or it doesn't recognise
+your version control system, you can give lists of paths or glob patterns as
+``include`` and ``exclude`` in this section. For example:
+
+.. code-block:: toml
+
+ [tool.flit.sdist]
+ include = ["doc/"]
+ exclude = ["doc/*.html"]
+
+These paths:
+
+- Always use ``/`` as a separator (POSIX style)
+- Must be relative paths from the directory containing ``pyproject.toml``
+- Cannot go outside that directory (no ``../`` paths)
+- Cannot contain control characters or ``<>:"\\``
+- Can refer to directories, in which case they include everything under the
+ directory, including subdirectories
+- Should match the case of the files they refer to, as case-insensitive matching
+ is platform dependent
+
+.. versionchanged:: 3.8
+ Include and exclude patterns can now use recursive glob patterns (``**``).
+
+Exclusions have priority over inclusions. Bytecode is excluded by default and cannot
+be included.
+
+.. note::
+
+ If you are not using :ref:`build_cmd` but ``flit_core`` via another build
+ frontend, Flit doesn't doesn't check the VCS for files to include but instead
+ builds a 'minimal' sdist (which includes the files necessary to build a wheel).
+ You'll have to adapt your inclusion/exclusion rules to achieve the same result
+ as you'd get with :ref:`build_cmd`.
+
+.. _pyproject_toml_external_data:
+
+External data section
+---------------------
+
+.. versionadded:: 3.7
+
+Data files which your code will use should go inside the Python package folder.
+Flit will package these with no special configuration.
+
+However, sometimes it's useful to package external files for system integration,
+such as man pages or files defining a Jupyter extension. To do this, arrange
+the files within a directory such as ``data``, next to your ``pyproject.toml``
+file, and add a section like this:
+
+.. code-block:: toml
+
+ [tool.flit.external-data]
+ directory = "data"
+
+Paths within this directory are typically installed to corresponding paths under
+a prefix (such as a virtualenv directory). E.g. you might save a man page for a
+script as ``(data)/share/man/man1/foo.1``.
+
+Whether these files are detected by the systems they're meant to integrate with
+depends on how your package is installed and how those systems are configured.
+For instance, installing in a virtualenv usually doesn't affect anything outside
+that environment. Don't rely on these files being picked up unless you have
+close control of how the package will be installed.
+
+If you install a package with ``flit install --symlink``, a symlink is made
+for each file in the external data directory. Otherwise (including development
+installs with ``pip install -e``), these files are copied to their destination,
+so changes here won't take effect until you reinstall the package.
+
+.. note::
+
+ For users coming from setuptools: external data corresponds to setuptools'
+ ``data_files`` parameter, although setuptools offers more flexibility.
+
+.. _environment marker: https://www.python.org/dev/peps/pep-0508/#environment-markers
diff --git a/doc/rationale.rst b/doc/rationale.rst
new file mode 100644
index 0000000..559f79e
--- /dev/null
+++ b/doc/rationale.rst
@@ -0,0 +1,58 @@
+Why use Flit?
+=============
+
+*Make the easy things easy and the hard things possible* is an old motto from
+the Perl community. Flit is entirely focused on the *easy things* part of that,
+and leaves the hard things up to other tools.
+
+Specifically, the easy things are pure Python packages with no build steps
+(neither compiling C code, nor bundling Javascript, etc.). The vast majority of
+packages on PyPI are like this: plain Python code, with maybe some static data
+files like icons included.
+
+It's easy to underestimate the challenges involved in distributing and
+installing code, because it seems like you just need to copy some files into
+the right place. There's a whole lot of metadata and tooling that has to work
+together around that fundamental step. But with the right tooling, a developer
+who wants to release their code doesn't need to know about most of that.
+
+What, specifically, does Flit make easy?
+
+- ``flit init`` helps you set up the information Flit needs about your
+ package.
+- Subpackages are automatically included: you only need to specify the
+ top-level package.
+- Data files within a package directory are automatically included.
+ Missing data files has been a common packaging mistake with other tools.
+- The version number is taken from your package's ``__version__`` attribute,
+ so that always matches the version that tools like pip see.
+- ``flit publish`` uploads a package to PyPI, so you don't need a separate tool
+ to do this.
+
+Setuptools, the most common tool for Python packaging, now has shortcuts for
+many of the same things. But it has to stay compatible with projects published
+many years ago, which limits what it can do by default.
+
+Flit also has some support for :doc:`reproducible builds <reproducible>`,
+a feature which some people care about.
+
+There have been many other efforts to improve the user experience of Python
+packaging, such as `pbr <https://pypi.org/project/pbr/>`_, but before Flit,
+these tended to build on setuptools and distutils. That was a pragmatic
+decision, but it's hard to build something radically different on top of those
+libraries. The existence of Flit spurred the development of new standards,
+like :pep:`518` and :pep:`517`, which are now used by other packaging tools
+such as `Poetry <https://python-poetry.org/>`_ and
+`Enscons <https://pypi.org/project/enscons/>`_.
+
+Other options
+-------------
+
+If your package needs a build step, you won't be able to use Flit.
+`Setuptools <https://setuptools.readthedocs.io/en/latest/>`_ is the de-facto
+standard, but newer tools such as Enscons_ also cover this case.
+
+Flit also doesn't help you manage dependencies: you have to add them to
+``pyproject.toml`` by hand. Tools like Poetry_ and `Pipenv
+<https://pypi.org/project/pipenv/>`_ have features which help add and update
+dependencies on other packages.
diff --git a/doc/reproducible.rst b/doc/reproducible.rst
new file mode 100644
index 0000000..2894fc8
--- /dev/null
+++ b/doc/reproducible.rst
@@ -0,0 +1,34 @@
+Reproducible builds
+===================
+
+.. versionadded:: 0.8
+
+Wheels built by flit are reproducible: if you build from the same source code,
+you should be able to make wheels that are exactly identical, byte for byte.
+This is useful for verifying software. For more details, see
+`reproducible-builds.org <https://reproducible-builds.org/>`__.
+
+There is a caveat, however: wheels (which are zip files) include the
+modification timestamp from each file. This will
+probably be different on each computer, because it indicates when your local
+copy of the file was written, not when it was changed in version control.
+These timestamps can be overridden by the environment variable
+:envvar:`SOURCE_DATE_EPOCH`.
+
+.. code-block:: shell
+
+ SOURCE_DATE_EPOCH=$(date +%s)
+ flit publish
+ # Record the value of SOURCE_DATE_EPOCH in release notes for reproduction
+
+.. versionchanged:: 0.12
+ Normalising permission bits
+
+Flit normalises the permission bits of files copied into a wheel to either
+755 (executable) or 644. This means that a file is readable by all users
+and writable only by the user who owns it.
+
+The most popular version control systems only track the executable bit,
+so checking out the same repository on systems with different umasks
+(e.g. Debian and Fedora) produces files with different permissions. With Flit
+0.11 and earlier, this difference would produce non-identical wheels.
diff --git a/doc/requirements.txt b/doc/requirements.txt
new file mode 100644
index 0000000..87126c9
--- /dev/null
+++ b/doc/requirements.txt
@@ -0,0 +1,3 @@
+sphinx ~= 4.2
+sphinxcontrib_github_alt ~= 1.2
+sphinx-rtd-theme ~= 1.0
diff --git a/doc/upload.rst b/doc/upload.rst
new file mode 100644
index 0000000..df5af5e
--- /dev/null
+++ b/doc/upload.rst
@@ -0,0 +1,77 @@
+Controlling package uploads
+===========================
+
+.. program:: flit publish
+
+The command ``flit publish`` will upload your package to a package index server.
+The default settings let you upload to `PyPI <https://pypi.org/>`_,
+the default Python Package Index, with a single user account.
+
+If you want to upload to other servers, or with more than one user account,
+or upload packages from a continuous integration job,
+you can configure Flit in two main ways:
+
+Using .pypirc
+-------------
+
+You can create or edit a config file in your home directory, ``~/.pypirc`` that
+will be used by default or you can specify a custom location.
+This is also used by other Python tools such as `twine
+<https://pypi.python.org/pypi/twine>`_.
+
+For instance, to upload a package to the `Test PyPI server <https://test.pypi.org/>`_
+instead of the normal PyPI, use a config file looking like this:
+
+.. code-block:: ini
+
+ [distutils]
+ index-servers =
+ pypi
+ testpypi
+
+ [pypi]
+ repository = https://upload.pypi.org/legacy/
+ username = sirrobin # Replace with your PyPI username
+
+ [testpypi]
+ repository = https://test.pypi.org/legacy/
+ username = sirrobin # Replace with your TestPyPI username
+
+You can select an index server from this config file with the
+:option:`--repository` option::
+
+ flit publish --repository testpypi
+
+If you don't use this option,
+Flit will use the server called ``pypi`` in the config file. If that doesn't
+exist, it uploads to PyPI at ``https://upload.pypi.org/legacy/`` by default.
+
+If you publish a package and you don't have a ``.pypirc`` file, Flit will create
+it to store your username.
+
+Flit tries to store your password securely using the
+`keyring <https://pypi.python.org/pypi/keyring>`_ library.
+If keyring is not installed, Flit will ask for your password for each upload.
+Alternatively, you can also manually add your password to the ``.pypirc`` file
+(``password = ...``)
+
+.. _upload_envvars:
+
+Using environment variables
+---------------------------
+
+You can specify a server to upload to with :envvar:`FLIT_INDEX_URL`, and
+pass credentials with :envvar:`FLIT_USERNAME` and :envvar:`FLIT_PASSWORD`.
+Environment variables take precedence over the config file, except if you use
+the :option:`--repository` option to explicitly pick a server from the config file.
+
+This can make it easier to automate uploads, for example to release packages
+from a continuous integration job.
+
+.. warning::
+
+ Storing a password in an environment variable is convenient, but it's
+ `easy to accidentally leak it <https://www.diogomonica.com/2017/03/27/why-you-shouldnt-use-env-variables-for-secret-data/>`_.
+ Look out for scripts that helpfully print all environment variables for
+ debugging, and remember that other scripts and libraries you run in
+ that environment have access to your password.
diff --git a/flit/__init__.py b/flit/__init__.py
new file mode 100644
index 0000000..2d0ec4d
--- /dev/null
+++ b/flit/__init__.py
@@ -0,0 +1,216 @@
+"""A simple packaging tool for simple packages."""
+import argparse
+import logging
+import os
+import pathlib
+import shutil
+import subprocess
+import sys
+from typing import Optional
+
+from flit_core import common
+from .config import ConfigError
+from .log import enable_colourful_output
+
+__version__ = '3.8.0'
+
+log = logging.getLogger(__name__)
+
+
+class PythonNotFoundError(FileNotFoundError): pass
+
+
+def find_python_executable(python: Optional[str] = None) -> str:
+ """Returns an absolute filepath to the executable of Python to use."""
+ if not python:
+ python = os.environ.get("FLIT_INSTALL_PYTHON")
+ if not python:
+ return sys.executable
+ if os.path.isabs(python): # sys.executable is absolute too
+ return python
+ # get absolute filepath of {python}
+ # shutil.which may give a different result to the raw subprocess call
+ # see https://github.com/pypa/flit/pull/300 and https://bugs.python.org/issue38905
+ resolved_python = shutil.which(python)
+ if resolved_python is None:
+ raise PythonNotFoundError("Unable to resolve Python executable {!r}".format(python))
+ try:
+ return subprocess.check_output(
+ [resolved_python, "-c", "import sys; print(sys.executable)"],
+ universal_newlines=True,
+ ).strip()
+ except Exception as e:
+ raise PythonNotFoundError(
+ "{} occurred trying to find the absolute filepath of Python executable {!r} ({!r})".format(
+ e.__class__.__name__, python, resolved_python
+ )
+ ) from e
+
+
+def add_shared_install_options(parser: argparse.ArgumentParser):
+ parser.add_argument('--user', action='store_true', default=None,
+ help="Do a user-local install (default if site.ENABLE_USER_SITE is True)"
+ )
+ parser.add_argument('--env', action='store_false', dest='user',
+ help="Install into sys.prefix (default if site.ENABLE_USER_SITE is False, i.e. in virtualenvs)"
+ )
+ parser.add_argument('--python',
+ help="Target Python executable, if different from the one running flit"
+ )
+ parser.add_argument('--deps', choices=['all', 'production', 'develop', 'none'], default='all',
+ help="Which set of dependencies to install. If --deps=develop, the extras dev, doc, and test are installed"
+ )
+ parser.add_argument('--only-deps', action='store_true',
+ help="Install only dependencies of this package, and not the package itself"
+ )
+ parser.add_argument('--extras', default=(), type=lambda l: l.split(',') if l else (),
+ help="Install the dependencies of these (comma separated) extras additionally to the ones implied by --deps. "
+ "--extras=all can be useful in combination with --deps=production, --deps=none precludes using --extras"
+ )
+
+
+def main(argv=None):
+ ap = argparse.ArgumentParser()
+ ap.add_argument('-f', '--ini-file', type=pathlib.Path, default='pyproject.toml')
+ ap.add_argument('-V', '--version', action='version', version='Flit '+__version__)
+ # --repository now belongs on 'flit publish' - it's still here for
+ # compatibility with scripts passing it before the subcommand.
+ ap.add_argument('--repository', dest='deprecated_repository', help=argparse.SUPPRESS)
+ ap.add_argument('--debug', action='store_true', help=argparse.SUPPRESS)
+ ap.add_argument('--logo', action='store_true', help=argparse.SUPPRESS)
+ subparsers = ap.add_subparsers(title='subcommands', dest='subcmd')
+
+ # flit build --------------------------------------------
+ parser_build = subparsers.add_parser('build',
+ help="Build wheel and sdist",
+ )
+
+ parser_build.add_argument('--format', action='append',
+ help="Select a format to build. Options: 'wheel', 'sdist'"
+ )
+
+ parser_build.add_argument('--setup-py', action='store_true',
+ help=("Generate a setup.py file in the sdist. "
+ "The sdist will work with older tools that predate PEP 517. "
+ )
+ )
+
+ parser_build.add_argument('--no-setup-py', action='store_true',
+ help=("Don't generate a setup.py file in the sdist. This is the default. "
+ "The sdist will only work with tools that support PEP 517, "
+ "but the wheel will still be usable by any compatible tool."
+ )
+ )
+
+ # flit publish --------------------------------------------
+ parser_publish = subparsers.add_parser('publish',
+ help="Upload wheel and sdist",
+ )
+
+ parser_publish.add_argument('--format', action='append',
+ help="Select a format to publish. Options: 'wheel', 'sdist'"
+ )
+
+ parser_publish.add_argument('--setup-py', action='store_true',
+ help=("Generate a setup.py file in the sdist. "
+ "The sdist will work with older tools that predate PEP 517. "
+ "This is the default for now, but will change in a future version."
+ )
+ )
+
+ parser_publish.add_argument('--no-setup-py', action='store_true',
+ help=("Don't generate a setup.py file in the sdist. "
+ "The sdist will only work with tools that support PEP 517, "
+ "but the wheel will still be usable by any compatible tool."
+ )
+ )
+
+ parser_publish.add_argument('--pypirc',
+ help="The .pypirc config file to be used. DEFAULT = \"~/.pypirc\""
+ )
+
+ parser_publish.add_argument('--repository',
+ help="Name of the repository to upload to (must be in the specified .pypirc file)"
+ )
+
+ # flit install --------------------------------------------
+ parser_install = subparsers.add_parser('install',
+ help="Install the package",
+ )
+ parser_install.add_argument('-s', '--symlink', action='store_true',
+ help="Symlink the module/package into site packages instead of copying it"
+ )
+ parser_install.add_argument('--pth-file', action='store_true',
+ help="Add .pth file for the module/package to site packages instead of copying it"
+ )
+ add_shared_install_options(parser_install)
+
+ # flit init --------------------------------------------
+ parser_init = subparsers.add_parser('init',
+ help="Prepare pyproject.toml for a new package"
+ )
+
+ args = ap.parse_args(argv)
+
+ if args.ini_file.suffix == '.ini':
+ sys.exit("flit.ini format is no longer supported. You can use "
+ "'python3 -m flit.tomlify' to convert it to pyproject.toml")
+
+ if args.subcmd not in {'init'} and not args.ini_file.is_file():
+ sys.exit('Config file {} does not exist'.format(args.ini_file))
+
+ enable_colourful_output(logging.DEBUG if args.debug else logging.INFO)
+
+ log.debug("Parsed arguments %r", args)
+
+ if args.logo:
+ from .logo import clogo
+ print(clogo.format(version=__version__))
+ sys.exit(0)
+
+ def gen_setup_py():
+ if not (args.setup_py or args.no_setup_py):
+ return False
+ return args.setup_py
+
+ if args.subcmd == 'build':
+ from .build import main
+ try:
+ main(args.ini_file, formats=set(args.format or []),
+ gen_setup_py=gen_setup_py())
+ except(common.NoDocstringError, common.VCSError, common.NoVersionError) as e:
+ sys.exit(e.args[0])
+ elif args.subcmd == 'publish':
+ if args.deprecated_repository:
+ log.warning("Passing --repository before the 'upload' subcommand is deprecated: pass it after")
+ repository = args.repository or args.deprecated_repository
+ from .upload import main
+ main(args.ini_file, repository, args.pypirc, formats=set(args.format or []),
+ gen_setup_py=gen_setup_py())
+
+ elif args.subcmd == 'install':
+ from .install import Installer
+ try:
+ python = find_python_executable(args.python)
+ installer = Installer.from_ini_path(
+ args.ini_file,
+ user=args.user,
+ python=python,
+ symlink=args.symlink,
+ deps=args.deps,
+ extras=args.extras,
+ pth=args.pth_file
+ )
+ if args.only_deps:
+ installer.install_requirements()
+ else:
+ installer.install()
+ except (ConfigError, PythonNotFoundError, common.NoDocstringError, common.NoVersionError) as e:
+ sys.exit(e.args[0])
+
+ elif args.subcmd == 'init':
+ from .init import TerminalIniter
+ TerminalIniter().initialise()
+ else:
+ ap.print_help()
+ sys.exit(1)
diff --git a/flit/__main__.py b/flit/__main__.py
new file mode 100644
index 0000000..8b71b1b
--- /dev/null
+++ b/flit/__main__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import main
+
+main()
diff --git a/flit/_get_dirs.py b/flit/_get_dirs.py
new file mode 100644
index 0000000..9a80e63
--- /dev/null
+++ b/flit/_get_dirs.py
@@ -0,0 +1,27 @@
+"""get_dirs() is pulled out as a separate file so we can run it in a target Python.
+"""
+import os
+import sys
+import sysconfig
+
+def get_dirs(user=True):
+ """Get the 'scripts' and 'purelib' directories we'll install into.
+
+ This is now a thin wrapper around sysconfig.get_paths(). It's not inlined,
+ because some tests mock it out to install to a different location.
+ """
+ if user:
+ if (sys.platform == "darwin") and sysconfig.get_config_var('PYTHONFRAMEWORK'):
+ return sysconfig.get_paths('osx_framework_user')
+ return sysconfig.get_paths(os.name + '_user')
+ else:
+ # The default scheme is 'posix_prefix' or 'nt', and should work for e.g.
+ # installing into a virtualenv
+ return sysconfig.get_paths()
+
+
+if __name__ == '__main__':
+ import json
+ user = '--user'in sys.argv
+ dirs = get_dirs(user)
+ json.dump(dirs, sys.stdout)
diff --git a/flit/build.py b/flit/build.py
new file mode 100644
index 0000000..2917b18
--- /dev/null
+++ b/flit/build.py
@@ -0,0 +1,60 @@
+"""flit build - build both wheel and sdist"""
+
+from contextlib import contextmanager
+import logging
+import os
+from pathlib import Path
+import tarfile
+from tempfile import TemporaryDirectory
+from types import SimpleNamespace
+import sys
+
+from .config import read_flit_config, ConfigError
+from .sdist import SdistBuilder
+from .wheel import make_wheel_in
+
+log = logging.getLogger(__name__)
+
+ALL_FORMATS = {'wheel', 'sdist'}
+
+@contextmanager
+def unpacked_tarball(path):
+ tf = tarfile.open(str(path))
+ with TemporaryDirectory() as tmpdir:
+ tf.extractall(tmpdir)
+ files = os.listdir(tmpdir)
+ assert len(files) == 1, files
+ yield os.path.join(tmpdir, files[0])
+
+def main(ini_file: Path, formats=None, gen_setup_py=True):
+ """Build wheel and sdist"""
+ if not formats:
+ formats = ALL_FORMATS
+ elif not formats.issubset(ALL_FORMATS):
+ raise ValueError("Unknown package formats: {}".format(formats - ALL_FORMATS))
+
+ sdist_info = wheel_info = None
+ dist_dir = ini_file.parent / 'dist'
+ dist_dir.mkdir(parents=True, exist_ok=True)
+
+ try:
+ # Load the config file to make sure it gets validated
+ read_flit_config(ini_file)
+
+ if 'sdist' in formats:
+ sb = SdistBuilder.from_ini_path(ini_file)
+ sdist_file = sb.build(dist_dir, gen_setup_py=gen_setup_py)
+ sdist_info = SimpleNamespace(builder=sb, file=sdist_file)
+ # When we're building both, build the wheel from the unpacked sdist.
+ # This helps ensure that the sdist contains all the necessary files.
+ if 'wheel' in formats:
+ with unpacked_tarball(sdist_file) as tmpdir:
+ log.debug('Building wheel from unpacked sdist %s', tmpdir)
+ tmp_ini_file = Path(tmpdir, ini_file.name)
+ wheel_info = make_wheel_in(tmp_ini_file, dist_dir)
+ elif 'wheel' in formats:
+ wheel_info = make_wheel_in(ini_file, dist_dir)
+ except ConfigError as e:
+ sys.exit('Config error: {}'.format(e))
+
+ return SimpleNamespace(wheel=wheel_info, sdist=sdist_info)
diff --git a/flit/buildapi.py b/flit/buildapi.py
new file mode 100644
index 0000000..0de2e9d
--- /dev/null
+++ b/flit/buildapi.py
@@ -0,0 +1 @@
+from flit_core.buildapi import *
diff --git a/flit/config.py b/flit/config.py
new file mode 100644
index 0000000..9186e0a
--- /dev/null
+++ b/flit/config.py
@@ -0,0 +1,18 @@
+import os
+
+from flit_core.config import *
+from flit_core.config import read_flit_config as _read_flit_config_core
+from .validate import validate_config
+
+
+def read_flit_config(path):
+ """Read and check the `pyproject.toml` or `flit.ini` file with data about the package.
+ """
+ res = _read_flit_config_core(path)
+
+ if validate_config(res):
+ if os.environ.get('FLIT_ALLOW_INVALID'):
+ log.warning("Allowing invalid data (FLIT_ALLOW_INVALID set). Uploads may still fail.")
+ else:
+ raise ConfigError("Invalid config values (see log)")
+ return res
diff --git a/flit/init.py b/flit/init.py
new file mode 100644
index 0000000..4524d60
--- /dev/null
+++ b/flit/init.py
@@ -0,0 +1,251 @@
+from datetime import date
+import json
+import os
+from pathlib import Path
+import re
+import sys
+import tomli_w
+
+def get_data_dir():
+ """Get the directory path for flit user data files.
+ """
+ home = os.path.realpath(os.path.expanduser('~'))
+
+ if sys.platform == 'darwin':
+ d = Path(home, 'Library')
+ elif os.name == 'nt':
+ appdata = os.environ.get('APPDATA', None)
+ if appdata:
+ d = Path(appdata)
+ else:
+ d = Path(home, 'AppData', 'Roaming')
+ else:
+ # Linux, non-OS X Unix, AIX, etc.
+ xdg = os.environ.get("XDG_DATA_HOME", None)
+ d = Path(xdg) if xdg else Path(home, '.local/share')
+
+ return d / 'flit'
+
+def get_defaults():
+ try:
+ with (get_data_dir() / 'init_defaults.json').open(encoding='utf-8') as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return {}
+
+def store_defaults(d):
+ data_dir = get_data_dir()
+ try:
+ data_dir.mkdir(parents=True)
+ except FileExistsError:
+ pass
+ with (data_dir / 'init_defaults.json').open('w', encoding='utf-8') as f:
+ json.dump(d, f, indent=2)
+
+license_choices = [
+ ('mit', "MIT - simple and permissive"),
+ ('apache', "Apache - explicitly grants patent rights"),
+ ('gpl3', "GPL - ensures that code based on this is shared with the same terms"),
+ ('skip', "Skip - choose a license later"),
+]
+
+license_names_to_classifiers = {
+ 'mit': 'License :: OSI Approved :: MIT License',
+ 'gpl3': 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
+ 'apache': 'License :: OSI Approved :: Apache Software License'
+}
+
+license_templates_dir = Path(__file__).parent / 'license_templates'
+
+class IniterBase:
+ def __init__(self, directory='.'):
+ self.directory = Path(directory)
+ self.defaults = get_defaults()
+
+ def validate_email(self, s):
+ # Properly validating an email address is much more complex
+ return bool(re.match(r'.+@.+', s)) or s == ""
+
+ def validate_homepage(self, s):
+ return not s or s.startswith(('http://', 'https://'))
+
+ def guess_module_name(self):
+ packages, modules = [], []
+ for p in self.directory.iterdir():
+ if not p.stem.isidentifier():
+ continue
+
+ if p.is_dir() and (p / '__init__.py').is_file():
+ if p.name not in {'test', 'tests'}:
+ packages.append(p.name)
+
+ elif p.is_file() and p.suffix == '.py':
+ if p.stem not in {'setup'} and not p.name.startswith('test_'):
+ modules.append(p.stem)
+
+ src_dir = self.directory / 'src'
+ if src_dir.is_dir():
+ for p in src_dir.iterdir():
+ if not p.stem.isidentifier():
+ continue
+
+ if p.is_dir() and (p / '__init__.py').is_file():
+ if p.name not in {'test', 'tests'}:
+ packages.append(p.name)
+
+ elif p.is_file() and p.suffix == '.py':
+ if p.stem not in {'setup'} and not p.name.startswith('test_'):
+ modules.append(p.stem)
+
+ if len(packages) == 1:
+ return packages[0]
+ elif len(packages) == 0 and len(modules) == 1:
+ return modules[0]
+ else:
+ return None
+
+ def update_defaults(self, author, author_email, module, home_page, license):
+ new_defaults = {'author': author, 'author_email': author_email,
+ 'license': license}
+ name_chunk_pat = r'\b{}\b'.format(re.escape(module))
+ if re.search(name_chunk_pat, home_page):
+ new_defaults['home_page_template'] = \
+ re.sub(name_chunk_pat, '{modulename}', home_page, flags=re.I)
+ if any(new_defaults[k] != self.defaults.get(k) for k in new_defaults):
+ self.defaults.update(new_defaults)
+ store_defaults(self.defaults)
+
+ def write_license(self, name, author):
+ if (self.directory / 'LICENSE').exists():
+ return
+ year = date.today().year
+ with (license_templates_dir / name).open(encoding='utf-8') as f:
+ license_text = f.read()
+
+ with (self.directory / 'LICENSE').open('w', encoding='utf-8') as f:
+ f.write(license_text.format(year=year, author=author))
+
+ def find_readme(self):
+ allowed = ("readme.md","readme.rst","readme.txt")
+ for fl in self.directory.glob("*.*"):
+ if fl.name.lower() in allowed:
+ return fl.name
+ return None
+
+
+class TerminalIniter(IniterBase):
+ def prompt_text(self, prompt, default, validator, retry_msg="Try again."):
+ if default is not None:
+ p = "{} [{}]: ".format(prompt, default)
+ else:
+ p = prompt + ': '
+ while True:
+ response = input(p)
+ if response == '' and default is not None:
+ response = default
+ if validator(response):
+ return response
+
+ print(retry_msg)
+
+ def prompt_options(self, prompt, options, default=None):
+ default_ix = None
+
+ print(prompt)
+ for i, (key, text) in enumerate(options, start=1):
+ print("{}. {}".format(i, text))
+ if key == default:
+ default_ix = i
+
+ while True:
+ p = "Enter 1-" + str(len(options))
+ if default_ix is not None:
+ p += ' [{}]'.format(default_ix)
+ response = input(p+': ')
+ if (default_ix is not None) and response == '':
+ return default
+
+ if response.isnumeric():
+ ir = int(response)
+ if 1 <= ir <= len(options):
+ return options[ir-1][0]
+ print("Try again.")
+
+ def initialise(self):
+ if (self.directory / 'pyproject.toml').exists():
+ resp = input("pyproject.toml exists - overwrite it? [y/N]: ")
+ if (not resp) or resp[0].lower() != 'y':
+ return
+
+ module = self.prompt_text('Module name', self.guess_module_name(),
+ str.isidentifier)
+ author = self.prompt_text('Author', self.defaults.get('author'),
+ lambda s: True)
+ author_email = self.prompt_text('Author email',
+ self.defaults.get('author_email'), self.validate_email)
+ if 'home_page_template' in self.defaults:
+ home_page_default = self.defaults['home_page_template'].replace(
+ '{modulename}', module)
+ else:
+ home_page_default = None
+ home_page = self.prompt_text('Home page', home_page_default, self.validate_homepage,
+ retry_msg="Should start with http:// or https:// - try again.")
+ license = self.prompt_options('Choose a license (see http://choosealicense.com/ for more info)',
+ license_choices, self.defaults.get('license'))
+
+ readme = self.find_readme()
+
+ self.update_defaults(author=author, author_email=author_email,
+ home_page=home_page, module=module, license=license)
+
+ # Format information as TOML
+ # This is ugly code, but I want the generated pyproject.toml, which
+ # will mostly be edited by hand, to look a particular way - e.g. authors
+ # in inline tables. It's easier to 'cheat' with some string formatting
+ # than to do this through a TOML library.
+ author_info = []
+ if author:
+ author_info.append(f'name = {json.dumps(author, ensure_ascii=False)}')
+ if author_email:
+ author_info.append(f'email = {json.dumps(author_email)}')
+ if author_info:
+ authors_list = "[{%s}]" % ", ".join(author_info)
+ else:
+ authors_list = "[]"
+
+ classifiers = []
+ if license != 'skip':
+ classifiers = [license_names_to_classifiers[license]]
+ self.write_license(license, author)
+
+ with (self.directory / 'pyproject.toml').open('w', encoding='utf-8') as f:
+ f.write(TEMPLATE.format(
+ name=json.dumps(module), authors=authors_list
+ ))
+ if readme:
+ f.write(tomli_w.dumps({'readme': readme}))
+ if license != 'skip':
+ f.write('license = {file = "LICENSE"}\n')
+ if classifiers:
+ f.write(f"classifiers = {json.dumps(classifiers)}\n")
+ f.write('dynamic = ["version", "description"]\n')
+ if home_page:
+ f.write("\n" + tomli_w.dumps({
+ 'project': {'urls': {'Home': home_page}}
+ }))
+
+ print()
+ print("Written pyproject.toml; edit that file to add optional extra info.")
+
+TEMPLATE = """\
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = {name}
+authors = {authors}
+"""
+
+if __name__ == '__main__':
+ TerminalIniter().initialise()
diff --git a/flit/install.py b/flit/install.py
new file mode 100644
index 0000000..3ea9a4b
--- /dev/null
+++ b/flit/install.py
@@ -0,0 +1,432 @@
+"""Install packages locally for development
+"""
+import logging
+import os
+import os.path as osp
+import csv
+import json
+import pathlib
+import random
+import shutil
+import site
+import sys
+import tempfile
+from subprocess import check_call, check_output
+import sysconfig
+
+from flit_core import common
+from .config import read_flit_config
+from .wheel import WheelBuilder
+from ._get_dirs import get_dirs
+
+log = logging.getLogger(__name__)
+
+def _requires_dist_to_pip_requirement(requires_dist):
+ """Parse "Foo (v); python_version == '2.x'" from Requires-Dist
+
+ Returns pip-style appropriate for requirements.txt.
+ """
+ env_mark = ''
+ if ';' in requires_dist:
+ name_version, env_mark = requires_dist.split(';', 1)
+ else:
+ name_version = requires_dist
+ if '(' in name_version:
+ # turn 'name (X)' and 'name (<X.Y)'
+ # into 'name == X' and 'name < X.Y'
+ name, version = name_version.split('(', 1)
+ name = name.strip()
+ version = version.replace(')', '').strip()
+ if not any(c in version for c in '=<>'):
+ version = '==' + version
+ name_version = name + version
+ # re-add environment marker
+ return ' ;'.join([name_version, env_mark])
+
+def test_writable_dir(path):
+ """Check if a directory is writable.
+
+ Uses os.access() on POSIX, tries creating files on Windows.
+ """
+ if os.name == 'posix':
+ return os.access(path, os.W_OK)
+
+ return _test_writable_dir_win(path)
+
+def _test_writable_dir_win(path):
+ # os.access doesn't work on Windows: http://bugs.python.org/issue2528
+ # and we can't use tempfile: http://bugs.python.org/issue22107
+ basename = 'accesstest_deleteme_fishfingers_custard_'
+ alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789'
+ for i in range(10):
+ name = basename + ''.join(random.choice(alphabet) for _ in range(6))
+ file = osp.join(path, name)
+ try:
+ with open(file, mode='xb'):
+ pass
+ except FileExistsError:
+ continue
+ except PermissionError:
+ # This could be because there's a directory with the same name.
+ # But it's highly unlikely there's a directory called that,
+ # so we'll assume it's because the parent directory is not writable.
+ return False
+ else:
+ os.unlink(file)
+ return True
+
+ # This should never be reached
+ msg = ('Unexpected condition testing for writable directory {!r}. '
+ 'Please open an issue on flit to debug why this occurred.') # pragma: no cover
+ raise EnvironmentError(msg.format(path)) # pragma: no cover
+
+class RootInstallError(Exception):
+ def __str__(self):
+ return ("Installing packages as root is not recommended. "
+ "To allow this, set FLIT_ROOT_INSTALL=1 and try again.")
+
+class DependencyError(Exception):
+ def __str__(self):
+ return 'To install dependencies for extras, you cannot set deps=none.'
+
+class Installer(object):
+ def __init__(self, directory, ini_info, user=None, python=sys.executable,
+ symlink=False, deps='all', extras=(), pth=False):
+ self.directory = directory
+ self.ini_info = ini_info
+ self.python = python
+ self.symlink = symlink
+ self.pth = pth
+ self.deps = deps
+ self.extras = extras
+ if deps != 'none' and os.environ.get('FLIT_NO_NETWORK', ''):
+ self.deps = 'none'
+ log.warning('Not installing dependencies, because FLIT_NO_NETWORK is set')
+ if deps == 'none' and extras:
+ raise DependencyError()
+
+ self.module = common.Module(self.ini_info.module, directory)
+
+ if (hasattr(os, 'getuid') and (os.getuid() == 0) and
+ (not os.environ.get('FLIT_ROOT_INSTALL'))):
+ raise RootInstallError
+
+ if user is None:
+ self.user = self._auto_user(python)
+ else:
+ self.user = user
+ log.debug('User install? %s', self.user)
+
+ self.installed_files = []
+
+ @classmethod
+ def from_ini_path(cls, ini_path, user=None, python=sys.executable,
+ symlink=False, deps='all', extras=(), pth=False):
+ ini_info = read_flit_config(ini_path)
+ return cls(ini_path.parent, ini_info, user=user, python=python,
+ symlink=symlink, deps=deps, extras=extras, pth=pth)
+
+ def _run_python(self, code=None, file=None, extra_args=()):
+ if code and file:
+ raise ValueError('Specify code or file, not both')
+ if not (code or file):
+ raise ValueError('Specify code or file')
+
+ if code:
+ args = [self.python, '-c', code]
+ else:
+ args = [self.python, file]
+ args.extend(extra_args)
+ env = os.environ.copy()
+ env['PYTHONIOENCODING'] = 'utf-8'
+ # On Windows, shell needs to be True to pick up our local PATH
+ # when finding the Python command.
+ shell = (os.name == 'nt')
+ return check_output(args, shell=shell, env=env).decode('utf-8')
+
+ def _auto_user(self, python):
+ """Default guess for whether to do user-level install.
+
+ This should be True for system Python, and False in an env.
+ """
+ if python == sys.executable:
+ user_site = site.ENABLE_USER_SITE
+ lib_dir = sysconfig.get_path('purelib')
+ else:
+ out = self._run_python(code=
+ ("import sysconfig, site; "
+ "print(site.ENABLE_USER_SITE); "
+ "print(sysconfig.get_path('purelib'))"))
+ user_site, lib_dir = out.split('\n', 1)
+ user_site = (user_site.strip() == 'True')
+ lib_dir = lib_dir.strip()
+
+ if not user_site:
+ # No user site packages - probably a virtualenv
+ log.debug('User site packages not available - env install')
+ return False
+
+ log.debug('Checking access to %s', lib_dir)
+ return not test_writable_dir(lib_dir)
+
+ def install_scripts(self, script_defs, scripts_dir):
+ for name, ep in script_defs.items():
+ module, func = common.parse_entry_point(ep)
+ import_name = func.split('.')[0]
+ script_file = pathlib.Path(scripts_dir) / name
+ log.info('Writing script to %s', script_file)
+ with script_file.open('w', encoding='utf-8') as f:
+ f.write(common.script_template.format(
+ interpreter=self.python,
+ module=module,
+ import_name=import_name,
+ func=func
+ ))
+ script_file.chmod(0o755)
+
+ self.installed_files.append(script_file)
+
+ if sys.platform == 'win32':
+ cmd_file = script_file.with_suffix('.cmd')
+ cmd = '@echo off\r\n"{python}" "%~dp0\\{script}" %*\r\n'.format(
+ python=self.python, script=name)
+ log.debug("Writing script wrapper to %s", cmd_file)
+ with cmd_file.open('w') as f:
+ f.write(cmd)
+
+ self.installed_files.append(cmd_file)
+
+ def install_data_dir(self, target_data_dir):
+ for src_path in common.walk_data_dir(self.ini_info.data_directory):
+ rel_path = os.path.relpath(src_path, self.ini_info.data_directory)
+ dst_path = os.path.join(target_data_dir, rel_path)
+ os.makedirs(os.path.dirname(dst_path), exist_ok=True)
+ if self.symlink:
+ os.symlink(os.path.realpath(src_path), dst_path)
+ else:
+ shutil.copy2(src_path, dst_path)
+ self.installed_files.append(dst_path)
+
+ def _record_installed_directory(self, path):
+ for dirpath, dirnames, files in os.walk(path):
+ for f in files:
+ self.installed_files.append(osp.join(dirpath, f))
+
+ def _extras_to_install(self):
+ extras_to_install = set(self.extras)
+ if self.deps == 'all' or 'all' in extras_to_install:
+ extras_to_install |= set(self.ini_info.reqs_by_extra.keys())
+ # We don’t remove 'all' from the set because there might be an extra called “all”.
+ elif self.deps == 'develop':
+ extras_to_install |= {'dev', 'doc', 'test'}
+
+ if self.deps != 'none':
+ # '.none' is an internal token for normal requirements
+ extras_to_install.add('.none')
+ log.info("Extras to install for deps %r: %s", self.deps, extras_to_install)
+ return extras_to_install
+
+ def install_requirements(self):
+ """Install requirements of a package with pip.
+
+ Creates a temporary requirements.txt from requires_dist metadata.
+ """
+ # construct the full list of requirements, including dev requirements
+ requirements = []
+
+ if self.deps == 'none':
+ return
+
+ for extra in self._extras_to_install():
+ requirements.extend(self.ini_info.reqs_by_extra.get(extra, []))
+
+ # there aren't any requirements, so return
+ if len(requirements) == 0:
+ return
+
+ requirements = [
+ _requires_dist_to_pip_requirement(req_d)
+ for req_d in requirements
+ ]
+
+ # install the requirements with pip
+ cmd = [self.python, '-m', 'pip', 'install']
+ if self.user:
+ cmd.append('--user')
+ with tempfile.NamedTemporaryFile(mode='w',
+ suffix='requirements.txt',
+ delete=False) as tf:
+ tf.file.write('\n'.join(requirements))
+ cmd.extend(['-r', tf.name])
+ log.info("Installing requirements")
+ try:
+ check_call(cmd)
+ finally:
+ os.remove(tf.name)
+
+ def install_reqs_my_python_if_needed(self):
+ """Install requirements to this environment if needed.
+
+ We can normally get the summary and version number without import the
+ module, but if we do need to import it, we may need to install
+ its requirements for the Python where flit is running.
+ """
+ try:
+ common.get_info_from_module(self.module, self.ini_info.dynamic_metadata)
+ except ImportError:
+ if self.deps == 'none':
+ raise # We were asked not to install deps, so bail out.
+
+ log.warning("Installing requirements to Flit's env to import module.")
+ user = self.user if (self.python == sys.executable) else None
+ i2 = Installer(self.directory, self.ini_info, user=user, deps='production')
+ i2.install_requirements()
+
+ def _get_dirs(self, user):
+ if self.python == sys.executable:
+ return get_dirs(user=user)
+ else:
+ import json
+ path = osp.join(osp.dirname(__file__), '_get_dirs.py')
+ args = ['--user'] if user else []
+ return json.loads(self._run_python(file=path, extra_args=args))
+
+ def install_directly(self):
+ """Install a module/package into site-packages, and create its scripts.
+ """
+ dirs = self._get_dirs(user=self.user)
+ os.makedirs(dirs['purelib'], exist_ok=True)
+ os.makedirs(dirs['scripts'], exist_ok=True)
+
+ module_rel_path = self.module.path.relative_to(self.module.source_dir)
+ dst = osp.join(dirs['purelib'], module_rel_path)
+ if osp.lexists(dst):
+ if osp.isdir(dst) and not osp.islink(dst):
+ shutil.rmtree(dst)
+ else:
+ os.unlink(dst)
+
+ # Install requirements to target environment
+ self.install_requirements()
+
+ # Install requirements to this environment if we need them to
+ # get docstring & version number.
+ if self.python != sys.executable:
+ self.install_reqs_my_python_if_needed()
+
+ src = self.module.path
+ if self.symlink:
+ if self.module.in_namespace_package:
+ ns_dir = os.path.dirname(dst)
+ os.makedirs(ns_dir, exist_ok=True)
+
+ log.info("Symlinking %s -> %s", src, dst)
+ os.symlink(src.resolve(), dst)
+ self.installed_files.append(dst)
+ elif self.pth:
+ # .pth points to the the folder containing the module (which is
+ # added to sys.path)
+ pth_file = pathlib.Path(dirs['purelib'], self.module.name + '.pth')
+ log.info("Adding .pth file %s for %s", pth_file, self.module.source_dir)
+ pth_file.write_text(str(self.module.source_dir.resolve()), 'utf-8')
+ self.installed_files.append(pth_file)
+ elif self.module.is_package:
+ log.info("Copying directory %s -> %s", src, dst)
+ shutil.copytree(src, dst)
+ self._record_installed_directory(dst)
+ else:
+ log.info("Copying file %s -> %s", src, dst)
+ os.makedirs(osp.dirname(dst), exist_ok=True)
+ shutil.copy2(src, dst)
+ self.installed_files.append(dst)
+
+ scripts = self.ini_info.entrypoints.get('console_scripts', {})
+ self.install_scripts(scripts, dirs['scripts'])
+
+ self.install_data_dir(dirs['data'])
+
+ self.write_dist_info(dirs['purelib'])
+
+ def install_with_pip(self):
+ """Let pip install the project directory
+
+ pip will create an isolated build environment and install build
+ dependencies, which means downloading flit_core from PyPI. We ask pip
+ to install the project directory (instead of building a temporary wheel
+ and asking pip to install it), so pip will record the project directory
+ in direct_url.json.
+ """
+ self.install_reqs_my_python_if_needed()
+ extras = self._extras_to_install()
+ extras.discard('.none')
+ req_with_extras = '{}[{}]'.format(self.directory, ','.join(extras)) \
+ if extras else str(self.directory)
+ cmd = [self.python, '-m', 'pip', 'install', req_with_extras]
+ if self.user:
+ cmd.append('--user')
+ if self.deps == 'none':
+ cmd.append('--no-deps')
+ shell = (os.name == 'nt')
+ check_call(cmd, shell=shell)
+
+ def write_dist_info(self, site_pkgs):
+ """Write dist-info folder, according to PEP 376"""
+ metadata = common.make_metadata(self.module, self.ini_info)
+ dist_info = pathlib.Path(site_pkgs) / common.dist_info_name(
+ metadata.name, metadata.version)
+ try:
+ dist_info.mkdir()
+ except FileExistsError:
+ shutil.rmtree(str(dist_info))
+ dist_info.mkdir()
+
+ with (dist_info / 'METADATA').open('w', encoding='utf-8') as f:
+ metadata.write_metadata_file(f)
+ self.installed_files.append(dist_info / 'METADATA')
+
+ with (dist_info / 'INSTALLER').open('w', encoding='utf-8') as f:
+ f.write('flit')
+ self.installed_files.append(dist_info / 'INSTALLER')
+
+ # We only handle explicitly requested installations
+ with (dist_info / 'REQUESTED').open('wb'): pass
+ self.installed_files.append(dist_info / 'REQUESTED')
+
+ if self.ini_info.entrypoints:
+ with (dist_info / 'entry_points.txt').open('w') as f:
+ common.write_entry_points(self.ini_info.entrypoints, f)
+ self.installed_files.append(dist_info / 'entry_points.txt')
+
+ with (dist_info / 'direct_url.json').open('w', encoding='utf-8') as f:
+ json.dump(
+ {
+ "url": self.directory.resolve().as_uri(),
+ "dir_info": {"editable": bool(self.symlink or self.pth)}
+ },
+ f
+ )
+ self.installed_files.append(dist_info / 'direct_url.json')
+
+ # newline='' because the csv module does its own newline translation
+ with (dist_info / 'RECORD').open('w', encoding='utf-8', newline='') as f:
+ cf = csv.writer(f)
+ for path in sorted(self.installed_files, key=str):
+ path = pathlib.Path(path)
+ if path.is_symlink() or path.suffix in {'.pyc', '.pyo'}:
+ hash, size = '', ''
+ else:
+ hash = 'sha256=' + common.hash_file(str(path))
+ size = path.stat().st_size
+ try:
+ path = path.relative_to(site_pkgs)
+ except ValueError:
+ pass
+ cf.writerow((str(path), hash, size))
+
+ cf.writerow(((dist_info / 'RECORD').relative_to(site_pkgs), '', ''))
+
+ def install(self):
+ if self.symlink or self.pth:
+ self.install_directly()
+ else:
+ self.install_with_pip()
diff --git a/flit/license_templates/apache b/flit/license_templates/apache
new file mode 100644
index 0000000..6b8b53b
--- /dev/null
+++ b/flit/license_templates/apache
@@ -0,0 +1,68 @@
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License.
+
+Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License.
+
+Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution.
+
+You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+ You must give any other recipients of the Work or Derivative Works a copy of this License; and
+ You must cause any modified files to carry prominent notices stating that You changed the files; and
+ You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+ If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+
+You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions.
+
+Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks.
+
+This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty.
+
+Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability.
+
+In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability.
+
+While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/flit/license_templates/gpl3 b/flit/license_templates/gpl3
new file mode 100644
index 0000000..bc08fe2
--- /dev/null
+++ b/flit/license_templates/gpl3
@@ -0,0 +1,619 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
diff --git a/flit/license_templates/mit b/flit/license_templates/mit
new file mode 100644
index 0000000..063e484
--- /dev/null
+++ b/flit/license_templates/mit
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) {year} {author}
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/flit/log.py b/flit/log.py
new file mode 100644
index 0000000..26cf748
--- /dev/null
+++ b/flit/log.py
@@ -0,0 +1,110 @@
+"""Nicer log formatting with colours.
+
+Code copied from Tornado, Apache licensed.
+"""
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import sys
+
+try:
+ import curses
+except ImportError:
+ curses = None
+
+def _stderr_supports_color():
+ color = False
+ if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
+ try:
+ curses.setupterm()
+ if curses.tigetnum("colors") > 0:
+ color = True
+ except Exception:
+ pass
+ return color
+
+class LogFormatter(logging.Formatter):
+ """Log formatter with colour support
+ """
+ DEFAULT_COLORS = {
+ logging.INFO: 2, # Green
+ logging.WARNING: 3, # Yellow
+ logging.ERROR: 1, # Red
+ logging.CRITICAL: 1,
+ }
+
+ def __init__(self, color=True, datefmt=None):
+ r"""
+ :arg bool color: Enables color support.
+ :arg string fmt: Log message format.
+ It will be applied to the attributes dict of log records. The
+ text between ``%(color)s`` and ``%(end_color)s`` will be colored
+ depending on the level if color support is on.
+ :arg dict colors: color mappings from logging level to terminal color
+ code
+ :arg string datefmt: Datetime format.
+ Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
+ .. versionchanged:: 3.2
+ Added ``fmt`` and ``datefmt`` arguments.
+ """
+ logging.Formatter.__init__(self, datefmt=datefmt)
+ self._colors = {}
+ if color and _stderr_supports_color():
+ # The curses module has some str/bytes confusion in
+ # python3. Until version 3.2.3, most methods return
+ # bytes, but only accept strings. In addition, we want to
+ # output these strings with the logging module, which
+ # works with unicode strings. The explicit calls to
+ # unicode() below are harmless in python2 but will do the
+ # right conversion in python 3.
+ fg_color = (curses.tigetstr("setaf") or
+ curses.tigetstr("setf") or "")
+ if (3, 0) < sys.version_info < (3, 2, 3):
+ fg_color = str(fg_color, "ascii")
+
+ for levelno, code in self.DEFAULT_COLORS.items():
+ self._colors[levelno] = str(curses.tparm(fg_color, code), "ascii")
+ self._normal = str(curses.tigetstr("sgr0"), "ascii")
+
+ scr = curses.initscr()
+ self.termwidth = scr.getmaxyx()[1]
+ curses.endwin()
+ else:
+ self._normal = ''
+ # Default width is usually 80, but too wide is worse than too narrow
+ self.termwidth = 70
+
+ def formatMessage(self, record):
+ l = len(record.message)
+ right_text = '{initial}-{name}'.format(initial=record.levelname[0],
+ name=record.name)
+ if l + len(right_text) < self.termwidth:
+ space = ' ' * (self.termwidth - (l + len(right_text)))
+ else:
+ space = ' '
+
+ if record.levelno in self._colors:
+ start_color = self._colors[record.levelno]
+ end_color = self._normal
+ else:
+ start_color = end_color = ''
+
+ return record.message + space + start_color + right_text + end_color
+
+def enable_colourful_output(level=logging.INFO):
+ handler = logging.StreamHandler()
+ handler.setFormatter(LogFormatter())
+ logging.root.addHandler(handler)
+ logging.root.setLevel(level)
diff --git a/flit/logo.py b/flit/logo.py
new file mode 100644
index 0000000..15533cf
--- /dev/null
+++ b/flit/logo.py
@@ -0,0 +1,20 @@
+"""White and colored version for flit"""
+
+
+logo = """
+ ._ ._
+ ```. ```. .--.______
+ `. `-. `. / °,-—´
+ `. `~-.>.' /
+ `. .` |
+ -..;. /
+ / /___ _____
+ /r_,.´| | | |
+ ,' `/ |—— | | |
+ .´ ,'/ | |__ | |
+ .´ / . /
+ '__/|/ V {version}
+
+"""
+
+clogo = '\x1b[36m'+logo+'\x1b[39m'
diff --git a/flit/sdist.py b/flit/sdist.py
new file mode 100644
index 0000000..4d3cee1
--- /dev/null
+++ b/flit/sdist.py
@@ -0,0 +1,236 @@
+from collections import defaultdict
+import io
+import logging
+import os
+from pathlib import Path
+from posixpath import join as pjoin
+from pprint import pformat
+import tarfile
+
+from flit_core.sdist import SdistBuilder as SdistBuilderCore
+from flit_core.common import Module, VCSError
+from flit.vcs import identify_vcs
+
+log = logging.getLogger(__name__)
+
+# Our generated setup.py deliberately loads distutils, not setuptools, to
+# discourage running it directly and getting a setuptools mess. Tools like pip
+# handle this correctly - loading setuptools anyway but avoiding its issues.
+
+SETUP = """\
+#!/usr/bin/env python
+# setup.py generated by flit for tools that don't yet use PEP 517
+
+from distutils.core import setup
+
+{before}
+setup(name={name!r},
+ version={version!r},
+ description={description!r},
+ author={author!r},
+ author_email={author_email!r},
+ url={url!r},
+ {extra}
+ )
+"""
+
+
+
+
+def namespace_packages(module: Module):
+ """Get parent package names"""
+ name_parts = []
+ for part in module.namespace_package_name.split('.'):
+ name_parts.append(part)
+ yield '.'.join(name_parts)
+
+
+def auto_packages(module: Module):
+ """Discover subpackages and package_data"""
+ pkgdir = os.path.normpath(str(module.path))
+ pkg_name = module.name
+
+ packages = []
+ if module.in_namespace_package:
+ packages.extend(namespace_packages(module))
+ packages.append(pkg_name)
+
+ pkg_data = defaultdict(list)
+ # Undocumented distutils feature: the empty string matches all package names
+ pkg_data[''].append('*')
+
+ subpkg_paths = set()
+
+ def find_nearest_pkg(rel_path):
+ parts = rel_path.split(os.sep)
+ for i in reversed(range(1, len(parts))):
+ ancestor = '/'.join(parts[:i])
+ if ancestor in subpkg_paths:
+ pkg = '.'.join([pkg_name] + parts[:i])
+ return pkg, '/'.join(parts[i:])
+
+ # Relative to the top-level package
+ return pkg_name, rel_path
+
+ for path, dirnames, filenames in os.walk(pkgdir, topdown=True):
+ if os.path.basename(path) == '__pycache__':
+ continue
+
+ from_top_level = os.path.relpath(path, pkgdir)
+ if from_top_level == '.':
+ continue
+
+ is_subpkg = '__init__.py' in filenames
+ if is_subpkg:
+ subpkg_paths.add(from_top_level)
+ parts = from_top_level.split(os.sep)
+ packages.append('.'.join([pkg_name] + parts))
+ else:
+ pkg, from_nearest_pkg = find_nearest_pkg(from_top_level)
+ pkg_data[pkg].append(pjoin(from_nearest_pkg, '*'))
+
+ # Sort values in pkg_data
+ pkg_data = {k: sorted(v) for (k, v) in pkg_data.items()}
+
+ return sorted(packages), pkg_data
+
+
+def include_path(p):
+ return not (p.startswith('dist' + os.sep)
+ or (os.sep+'__pycache__' in p)
+ or p.endswith('.pyc'))
+
+
+def _parse_req(requires_dist):
+ """Parse "Foo (v); python_version == '2.x'" from Requires-Dist
+
+ Returns pip-style appropriate for requirements.txt.
+ """
+ if ';' in requires_dist:
+ name_version, env_mark = requires_dist.split(';', 1)
+ env_mark = env_mark.strip()
+ else:
+ name_version, env_mark = requires_dist, None
+
+ if '(' in name_version:
+ # turn 'name (X)' and 'name (<X.Y)'
+ # into 'name == X' and 'name < X.Y'
+ name, version = name_version.split('(', 1)
+ name = name.strip()
+ version = version.replace(')', '').strip()
+ if not any(c in version for c in '=<>'):
+ version = '==' + version
+ name_version = name + version
+
+ return name_version, env_mark
+
+
+def convert_requires(reqs_by_extra):
+ """Regroup requirements by (extra, env_mark)"""
+ grouping = defaultdict(list)
+ for extra, reqs in reqs_by_extra.items():
+ for req in reqs:
+ name_version, env_mark = _parse_req(req)
+ grouping[(extra, env_mark)].append(name_version)
+
+ install_reqs = grouping.pop(('.none', None), [])
+ extra_reqs = {}
+ for (extra, env_mark), reqs in grouping.items():
+ if extra == '.none':
+ extra = ''
+ if env_mark is None:
+ extra_reqs[extra] = reqs
+ else:
+ extra_reqs[extra + ':' + env_mark] = reqs
+
+ return install_reqs, extra_reqs
+
+
+class SdistBuilder(SdistBuilderCore):
+ """Build a complete sdist
+
+ This extends the minimal sdist-building in flit_core:
+
+ - Include any files tracked in version control, such as docs sources and
+ tests.
+ - Add a generated setup.py for compatibility with tools which don't yet know
+ about PEP 517.
+ """
+ def select_files(self):
+ vcs_mod = identify_vcs(self.cfgdir)
+ if vcs_mod is not None:
+ untracked_deleted = vcs_mod.list_untracked_deleted_files(self.cfgdir)
+ if any(include_path(p) and not self.excludes.match_file(p)
+ for p in untracked_deleted):
+ raise VCSError(
+ "Untracked or deleted files in the source directory. "
+ "Commit, undo or ignore these files in your VCS.",
+ self.cfgdir)
+
+ files = [os.path.normpath(p)
+ for p in vcs_mod.list_tracked_files(self.cfgdir)]
+ files = sorted(filter(include_path, files))
+ log.info("Found %d files tracked in %s", len(files), vcs_mod.name)
+ else:
+ files = super().select_files()
+
+ return files
+
+ def add_setup_py(self, files_to_add, target_tarfile):
+ if 'setup.py' in files_to_add:
+ log.warning(
+ "Using setup.py from repository, not generating setup.py")
+ else:
+ setup_py = self.make_setup_py()
+ log.info("Writing generated setup.py")
+ ti = tarfile.TarInfo(pjoin(self.dir_name, 'setup.py'))
+ ti.size = len(setup_py)
+ target_tarfile.addfile(ti, io.BytesIO(setup_py))
+
+ def make_setup_py(self):
+ before, extra = [], []
+ if self.module.is_package:
+ packages, package_data = auto_packages(self.module)
+ before.append("packages = \\\n%s\n" % pformat(sorted(packages)))
+ before.append("package_data = \\\n%s\n" % pformat(package_data))
+ extra.append("packages=packages,")
+ extra.append("package_data=package_data,")
+ else:
+ extra.append("py_modules={!r},".format([self.module.name]))
+ if self.module.in_namespace_package:
+ packages = list(namespace_packages(self.module))
+ before.append("packages = \\\n%s\n" % pformat(packages))
+ extra.append("packages=packages,")
+
+ if self.module.prefix:
+ package_dir = pformat({'': self.module.prefix})
+ before.append("package_dir = \\\n%s\n" % package_dir)
+ extra.append("package_dir=package_dir,")
+
+ install_reqs, extra_reqs = convert_requires(self.reqs_by_extra)
+ if install_reqs:
+ before.append("install_requires = \\\n%s\n" % pformat(install_reqs))
+ extra.append("install_requires=install_requires,")
+ if extra_reqs:
+ before.append("extras_require = \\\n%s\n" % pformat(extra_reqs))
+ extra.append("extras_require=extras_require,")
+
+ entrypoints = self.prep_entry_points()
+ if entrypoints:
+ before.append("entry_points = \\\n%s\n" % pformat(entrypoints))
+ extra.append("entry_points=entry_points,")
+
+ if self.metadata.requires_python:
+ extra.append('python_requires=%r,' % self.metadata.requires_python)
+
+ return SETUP.format(
+ before='\n'.join(before),
+ name=self.metadata.name,
+ version=self.metadata.version,
+ description=self.metadata.summary,
+ author=self.metadata.author,
+ author_email=self.metadata.author_email,
+ url=self.metadata.home_page,
+ extra='\n '.join(extra),
+ ).encode('utf-8')
+
diff --git a/flit/tomlify.py b/flit/tomlify.py
new file mode 100644
index 0000000..e4796cd
--- /dev/null
+++ b/flit/tomlify.py
@@ -0,0 +1,83 @@
+"""Convert a flit.ini file to pyproject.toml
+"""
+import argparse
+from collections import OrderedDict
+import configparser
+import os
+from pathlib import Path
+import tomli_w
+
+from .config import metadata_list_fields
+
+
+TEMPLATE = """\
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+{metadata}
+"""
+
+class CaseSensitiveConfigParser(configparser.ConfigParser):
+ optionxform = staticmethod(str)
+
+def convert(path):
+ cp = configparser.ConfigParser()
+ with path.open(encoding='utf-8') as f:
+ cp.read_file(f)
+
+ ep_file = Path('entry_points.txt')
+ metadata = OrderedDict()
+ for name, value in cp['metadata'].items():
+ if name in metadata_list_fields:
+ metadata[name] = [l for l in value.splitlines() if l.strip()]
+ elif name == 'entry-points-file':
+ ep_file = Path(value)
+ else:
+ metadata[name] = value
+
+ if 'scripts' in cp:
+ scripts = OrderedDict(cp['scripts'])
+ else:
+ scripts = {}
+
+ entrypoints = CaseSensitiveConfigParser()
+ if ep_file.is_file():
+ with ep_file.open(encoding='utf-8') as f:
+ entrypoints.read_file(f)
+
+ written_entrypoints = False
+ with Path('pyproject.toml').open('w', encoding='utf-8') as f:
+ f.write(TEMPLATE.format(metadata=tomli_w.dumps(metadata)))
+
+ if scripts:
+ f.write('\n[tool.flit.scripts]\n')
+ f.write(tomli_w.dumps(scripts))
+
+ for groupname, group in entrypoints.items():
+ if not dict(group):
+ continue
+
+ if '.' in groupname:
+ groupname = '"{}"'.format(groupname)
+ f.write('\n[tool.flit.entrypoints.{}]\n'.format(groupname))
+ f.write(tomli_w.dumps(OrderedDict(group)))
+ written_entrypoints = True
+
+ print("Written 'pyproject.toml'")
+ files = str(path)
+ if written_entrypoints:
+ files += ' and ' + str(ep_file)
+ print("Please check the new file, then remove", files)
+
+def main(argv=None):
+ ap = argparse.ArgumentParser()
+ ap.add_argument('-f', '--ini-file', type=Path, default='flit.ini')
+ args = ap.parse_args(argv)
+
+ os.chdir(str(args.ini_file.parent))
+ convert(Path(args.ini_file.name))
+
+if __name__ == '__main__':
+ main()
diff --git a/flit/upload.py b/flit/upload.py
new file mode 100644
index 0000000..0ea67e9
--- /dev/null
+++ b/flit/upload.py
@@ -0,0 +1,276 @@
+"""Code to communicate with PyPI to register distributions and upload files.
+
+This is cribbed heavily from distutils.command.(upgrade|register), which as part
+of Python is under the PSF license.
+"""
+import configparser
+import getpass
+import hashlib
+import logging
+import os
+from pathlib import Path
+import requests
+import sys
+from urllib.parse import urlparse
+
+from flit_core.common import Metadata
+
+log = logging.getLogger(__name__)
+
+PYPI = "https://upload.pypi.org/legacy/"
+PYPIRC_DEFAULT = "~/.pypirc"
+
+SWITCH_TO_HTTPS = (
+ "http://pypi.python.org/",
+ "http://testpypi.python.org/",
+ "http://upload.pypi.org/",
+ "http://upload.pypi.io/",
+)
+
+def get_repositories(file="~/.pypirc"):
+ """Get the known repositories from a pypirc file.
+
+ This returns a dict keyed by name, of dicts with keys 'url', 'username',
+ 'password'. Username and password may be None.
+ """
+ cp = configparser.ConfigParser()
+ if isinstance(file, str):
+ file = os.path.expanduser(file)
+
+ if not os.path.isfile(file):
+ return {'pypi': {
+ 'url': PYPI, 'username': None, 'password': None,
+ }}
+
+ cp.read(file)
+ else:
+ cp.read_file(file)
+
+ names = cp.get('distutils', 'index-servers', fallback='pypi').split()
+
+ repos = {}
+
+ for name in names:
+ repos[name] = {
+ 'url': cp.get(name, 'repository', fallback=PYPI),
+ 'username': cp.get(name, 'username', fallback=None),
+ 'password': cp.get(name, 'password', fallback=None),
+ }
+
+ return repos
+
+
+def get_repository(pypirc_path="~/.pypirc", name=None):
+ """Get the url, username and password for one repository.
+
+ Returns a dict with keys 'url', 'username', 'password'.
+
+ There is a hierarchy of possible sources of information:
+
+ Index URL:
+ 1. Command line arg --repository (looked up in .pypirc)
+ 2. $FLIT_INDEX_URL
+ 3. Repository called 'pypi' from .pypirc
+ 4. Default PyPI (hardcoded)
+
+ Username:
+ 1. Command line arg --repository (looked up in .pypirc)
+ 2. $FLIT_USERNAME
+ 3. Repository called 'pypi' from .pypirc
+ 4. Terminal prompt (write to .pypirc if it doesn't exist yet)
+
+ Password:
+ 1. Command line arg --repository (looked up in .pypirc)
+ 2. $FLIT_PASSWORD
+ 3. Repository called 'pypi' from .pypirc
+ 3. keyring
+ 4. Terminal prompt (store to keyring if available)
+ """
+ log.debug("Loading repositories config from %r", pypirc_path)
+ repos_cfg = get_repositories(pypirc_path)
+
+ if name is not None:
+ repo = repos_cfg[name]
+ elif 'FLIT_INDEX_URL' in os.environ:
+ repo = {'url': os.environ['FLIT_INDEX_URL'],
+ 'username': None, 'password': None}
+ elif 'pypi' in repos_cfg:
+ repo = repos_cfg['pypi']
+
+ if 'FLIT_PASSWORD' in os.environ:
+ repo['password'] = os.environ['FLIT_PASSWORD']
+ else:
+ repo = {'url': PYPI, 'username': None, 'password': None}
+
+ if repo['url'].startswith(SWITCH_TO_HTTPS):
+ # Use https for PyPI, even if an http URL was given
+ repo['url'] = 'https' + repo['url'][4:]
+ elif repo['url'].startswith('http://'):
+ log.warning("Unencrypted connection - credentials may be visible on "
+ "the network.")
+ log.info("Using repository at %s", repo['url'])
+
+ if ('FLIT_USERNAME' in os.environ) and ((name is None) or (not repo['username'])):
+ repo['username'] = os.environ['FLIT_USERNAME']
+ if sys.stdin.isatty():
+ while not repo['username']:
+ repo['username'] = input("Username: ")
+ if repo['url'] == PYPI:
+ write_pypirc(repo, pypirc_path)
+ elif not repo['username']:
+ raise Exception("Could not find username for upload.")
+
+ repo['password'] = get_password(repo, prefer_env=(name is None))
+
+ repo['is_warehouse'] = repo['url'].rstrip('/').endswith('/legacy')
+
+ return repo
+
+def write_pypirc(repo, file="~/.pypirc"):
+ """Write .pypirc if it doesn't already exist
+ """
+ file = os.path.expanduser(file)
+ if os.path.isfile(file):
+ return
+
+ with open(file, 'w', encoding='utf-8') as f:
+ f.write("[pypi]\n"
+ "username = %s\n" % repo['username'])
+
+def get_password(repo, prefer_env):
+ if ('FLIT_PASSWORD' in os.environ) and (prefer_env or not repo['password']):
+ return os.environ['FLIT_PASSWORD']
+
+ if repo['password']:
+ return repo['password']
+
+ try:
+ import keyring, keyring.errors
+ except ImportError: # pragma: no cover
+ log.warning("Install keyring to store passwords securely")
+ keyring = None
+ else:
+ try:
+ stored_pw = keyring.get_password(repo['url'], repo['username'])
+ if stored_pw is not None:
+ return stored_pw
+ except keyring.errors.KeyringError as e:
+ log.warning("Could not get password from keyring (%s)", e)
+
+ if sys.stdin.isatty():
+ pw = None
+ while not pw:
+ print('Server :', repo['url'])
+ print('Username:', repo['username'])
+ pw = getpass.getpass('Password: ')
+ else:
+ raise Exception("Could not find password for upload.")
+
+ if keyring is not None:
+ try:
+ keyring.set_password(repo['url'], repo['username'], pw)
+ log.info("Stored password with keyring")
+ except keyring.errors.KeyringError as e:
+ log.warning("Could not store password in keyring (%s)", e)
+
+ return pw
+
+def build_post_data(action, metadata:Metadata):
+ """Prepare the metadata needed for requests to PyPI.
+ """
+ d = {
+ ":action": action,
+
+ "name": metadata.name,
+ "version": metadata.version,
+
+ # additional meta-data
+ "metadata_version": '2.1',
+ "summary": metadata.summary,
+ "home_page": metadata.home_page,
+ "author": metadata.author,
+ "author_email": metadata.author_email,
+ "maintainer": metadata.maintainer,
+ "maintainer_email": metadata.maintainer_email,
+ "license": metadata.license,
+ "description": metadata.description,
+ "keywords": metadata.keywords,
+ "platform": metadata.platform,
+ "classifiers": metadata.classifiers,
+ "download_url": metadata.download_url,
+ "supported_platform": metadata.supported_platform,
+ # Metadata 1.1 (PEP 314)
+ "provides": metadata.provides,
+ "requires": metadata.requires,
+ "obsoletes": metadata.obsoletes,
+ # Metadata 1.2 (PEP 345)
+ "project_urls": metadata.project_urls,
+ "provides_dist": metadata.provides_dist,
+ "obsoletes_dist": metadata.obsoletes_dist,
+ "requires_dist": metadata.requires_dist,
+ "requires_external": metadata.requires_external,
+ "requires_python": metadata.requires_python,
+ # Metadata 2.1 (PEP 566)
+ "description_content_type": metadata.description_content_type,
+ "provides_extra": metadata.provides_extra,
+ }
+
+ return {k:v for k,v in d.items() if v}
+
+def upload_file(file:Path, metadata:Metadata, repo):
+ """Upload a file to an index server, given the index server details.
+ """
+ data = build_post_data('file_upload', metadata)
+ data['protocol_version'] = '1'
+ if file.suffix == '.whl':
+ data['filetype'] = 'bdist_wheel'
+ py2_support = not (metadata.requires_python or '')\
+ .startswith(('3', '>3', '>=3'))
+ data['pyversion'] = ('py2.' if py2_support else '') + 'py3'
+ else:
+ data['filetype'] = 'sdist'
+
+ with file.open('rb') as f:
+ content = f.read()
+ files = {'content': (file.name, content)}
+ data['md5_digest'] = hashlib.md5(content).hexdigest()
+ data['sha256_digest'] = hashlib.sha256(content).hexdigest()
+
+ log.info('Uploading %s...', file)
+ resp = requests.post(repo['url'],
+ data=data,
+ files=files,
+ auth=(repo['username'], repo['password']),
+ )
+ resp.raise_for_status()
+
+
+def do_upload(file:Path, metadata:Metadata, pypirc_path="~/.pypirc", repo_name=None):
+ """Upload a file to an index server.
+ """
+ repo = get_repository(pypirc_path, repo_name)
+ upload_file(file, metadata, repo)
+
+ if repo['is_warehouse']:
+ domain = urlparse(repo['url']).netloc
+ if domain.startswith('upload.'):
+ domain = domain[7:]
+ log.info("Package is at https://%s/project/%s/", domain, metadata.name)
+ else:
+ log.info("Package is at %s/%s", repo['url'], metadata.name)
+
+
+def main(ini_path, repo_name, pypirc_path=None, formats=None, gen_setup_py=True):
+ """Build and upload wheel and sdist."""
+ if pypirc_path is None:
+ pypirc_path = PYPIRC_DEFAULT
+ elif not os.path.isfile(pypirc_path):
+ raise FileNotFoundError("The specified pypirc config file does not exist.")
+
+ from . import build
+ built = build.main(ini_path, formats=formats, gen_setup_py=gen_setup_py)
+
+ if built.wheel is not None:
+ do_upload(built.wheel.file, built.wheel.builder.metadata, pypirc_path, repo_name)
+ if built.sdist is not None:
+ do_upload(built.sdist.file, built.sdist.builder.metadata, pypirc_path, repo_name)
diff --git a/flit/validate.py b/flit/validate.py
new file mode 100644
index 0000000..7184b1f
--- /dev/null
+++ b/flit/validate.py
@@ -0,0 +1,301 @@
+"""Validate various pieces of packaging data"""
+
+import errno
+import io
+import logging
+import os
+from pathlib import Path
+import re
+import requests
+import sys
+
+from .vendorized.readme.rst import render
+
+log = logging.getLogger(__name__)
+
+CUSTOM_CLASSIFIERS = frozenset({
+ # https://github.com/pypa/warehouse/pull/5440
+ 'Private :: Do Not Upload',
+})
+
+
+def get_cache_dir() -> Path:
+ """Locate a platform-appropriate cache directory for flit to use
+
+ Does not ensure that the cache directory exists.
+ """
+ # Linux, Unix, AIX, etc.
+ if os.name == 'posix' and sys.platform != 'darwin':
+ # use ~/.cache if empty OR not set
+ xdg = os.environ.get("XDG_CACHE_HOME", None) \
+ or os.path.expanduser('~/.cache')
+ return Path(xdg, 'flit')
+
+ # Mac OS
+ elif sys.platform == 'darwin':
+ return Path(os.path.expanduser('~'), 'Library/Caches/flit')
+
+ # Windows (hopefully)
+ else:
+ local = os.environ.get('LOCALAPPDATA', None) \
+ or os.path.expanduser('~\\AppData\\Local')
+ return Path(local, 'flit')
+
+
+def _read_classifiers_cached():
+ """Reads classifiers from cached file"""
+ with (get_cache_dir() / 'classifiers.lst').open(encoding='utf-8') as f:
+ valid_classifiers = set(l.strip() for l in f)
+ return valid_classifiers
+
+
+def _download_and_cache_classifiers():
+ """Get the list of valid trove classifiers from PyPI"""
+ log.info('Fetching list of valid trove classifiers')
+ resp = requests.get(
+ 'https://pypi.org/pypi?%3Aaction=list_classifiers')
+ resp.raise_for_status()
+
+ cache_dir = get_cache_dir()
+ try:
+ cache_dir.mkdir(parents=True)
+ except (FileExistsError, PermissionError):
+ pass
+ except OSError as e:
+ # readonly mounted file raises OSError, only these should be captured
+ if e.errno != errno.EROFS:
+ raise
+
+ try:
+ with (cache_dir / 'classifiers.lst').open('wb') as f:
+ f.write(resp.content)
+ except (PermissionError, FileNotFoundError):
+ # cache file could not be created
+ pass
+ except OSError as e:
+ # readonly mounted file raises OSError, only these should be captured
+ if e.errno != errno.EROFS:
+ raise
+
+ valid_classifiers = set(l.strip() for l in resp.text.splitlines())
+ return valid_classifiers
+
+
+def _verify_classifiers(classifiers, valid_classifiers):
+ """Check classifiers against a set of known classifiers"""
+ invalid = classifiers - valid_classifiers
+ return ["Unrecognised classifier: {!r}".format(c)
+ for c in sorted(invalid)]
+
+
+def validate_classifiers(classifiers):
+ """Verify trove classifiers from config file.
+
+ Fetches and caches a list of known classifiers from PyPI. Setting the
+ environment variable FLIT_NO_NETWORK=1 will skip this if the classifiers
+ are not already cached.
+ """
+ if not classifiers:
+ return []
+
+ problems = []
+ classifiers = set(classifiers)
+ try:
+ valid_classifiers = _read_classifiers_cached()
+ valid_classifiers.update(CUSTOM_CLASSIFIERS)
+ problems = _verify_classifiers(classifiers, valid_classifiers)
+ except (FileNotFoundError, PermissionError) as e1:
+ # We haven't yet got the classifiers cached or couldn't read it
+ pass
+ else:
+ if not problems:
+ return []
+
+ # Either we don't have the list, or there were unexpected classifiers
+ # which might have been added since we last fetched it. Fetch and cache.
+
+ if os.environ.get('FLIT_NO_NETWORK', ''):
+ log.warning(
+ "Not checking classifiers, because FLIT_NO_NETWORK is set")
+ return []
+
+ # Try to download up-to-date list of classifiers
+ try:
+ valid_classifiers = _download_and_cache_classifiers()
+ except requests.ConnectionError:
+ # The error you get on a train, going through Oregon, without wifi
+ log.warning(
+ "Couldn't get list of valid classifiers to check against")
+ return problems
+ valid_classifiers.update(CUSTOM_CLASSIFIERS)
+ return _verify_classifiers(classifiers, valid_classifiers)
+
+
+def validate_entrypoints(entrypoints):
+ """Check that the loaded entrypoints are valid.
+
+ Expects a dict of dicts, e.g.::
+
+ {'console_scripts': {'flit': 'flit:main'}}
+ """
+
+ def _is_identifier_attr(s):
+ return all(n.isidentifier() for n in s.split('.'))
+
+ problems = []
+ for groupname, group in entrypoints.items():
+ for k, v in group.items():
+ if ':' in v:
+ mod, obj = v.split(':', 1)
+ valid = _is_identifier_attr(mod) and _is_identifier_attr(obj)
+ else:
+ valid = _is_identifier_attr(v)
+
+ if not valid:
+ problems.append('Invalid entry point in group {}: '
+ '{} = {}'.format(groupname, k, v))
+ return problems
+
+# Distribution name, not quite the same as a Python identifier
+NAME = re.compile(r'^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$', re.IGNORECASE)
+r''
+VERSION_SPEC = re.compile(r'(~=|===?|!=|<=?|>=?)\s*[A-Z0-9\-_.*+!]+$', re.IGNORECASE)
+REQUIREMENT = re.compile(NAME.pattern[:-1] + # Trim '$'
+ r"""\s*(?P<extras>\[.*\])?
+ \s*(?P<version>[(=~<>!@][^;]*)?
+ \s*(?P<envmark>;.*)?
+ $""", re.IGNORECASE | re.VERBOSE)
+MARKER_OP = re.compile(r'(~=|===?|!=|<=?|>=?|\s+in\s+|\s+not in\s+)')
+
+def validate_name(metadata):
+ name = metadata.get('name', None)
+ if name is None or NAME.match(name):
+ return []
+ return ['Invalid name: {!r}'.format(name)]
+
+
+def _valid_version_specifier(s):
+ for clause in s.split(','):
+ if not VERSION_SPEC.match(clause.strip()):
+ return False
+ return True
+
+def validate_requires_python(metadata):
+ spec = metadata.get('requires_python', None)
+ if spec is None or _valid_version_specifier(spec):
+ return []
+ return ['Invalid requires-python: {!r}'.format(spec)]
+
+MARKER_VARS = {
+ 'python_version', 'python_full_version', 'os_name', 'sys_platform',
+ 'platform_release', 'platform_system', 'platform_version', 'platform_machine',
+ 'platform_python_implementation', 'implementation_name',
+ 'implementation_version', 'extra',
+}
+
+def validate_environment_marker(em):
+ clauses = re.split(r'\s+(?:and|or)\s+', em)
+ problems = []
+ for c in clauses:
+ # TODO: validate parentheses properly. They're allowed by PEP 508.
+ parts = MARKER_OP.split(c.strip('()'))
+ if len(parts) != 3:
+ problems.append("Invalid expression in environment marker: {!r}".format(c))
+ continue
+ l, op, r = parts
+ for var in (l.strip(), r.strip()):
+ if var[:1] in {'"', "'"}:
+ if len(var) < 2 or var[-1:] != var[:1]:
+ problems.append("Invalid string in environment marker: {}".format(var))
+ elif var not in MARKER_VARS:
+ problems.append("Invalid variable name in environment marker: {!r}".format(var))
+ return problems
+
+def validate_requires_dist(metadata):
+ probs = []
+ for req in metadata.get('requires_dist', []):
+ m = REQUIREMENT.match(req)
+ if not m:
+ probs.append("Could not parse requirement: {!r}".format(req))
+ continue
+
+ extras, version, envmark = m.group('extras', 'version', 'envmark')
+ if not (extras is None or all(NAME.match(e.strip())
+ for e in extras[1:-1].split(','))):
+ probs.append("Invalid extras in requirement: {!r}".format(req))
+ if version is not None:
+ if version.startswith('(') and version.endswith(')'):
+ version = version[1:-1]
+ if version.startswith('@'):
+ pass # url specifier TODO: validate URL
+ elif not _valid_version_specifier(version):
+ print((extras, version, envmark))
+ probs.append("Invalid version specifier {!r} in requirement {!r}"
+ .format(version, req))
+ if envmark is not None:
+ probs.extend(validate_environment_marker(envmark[1:]))
+ return probs
+
+def validate_url(url):
+ if url is None:
+ return []
+ probs = []
+ if not url.startswith(('http://', 'https://')):
+ probs.append("URL {!r} doesn't start with https:// or http://"
+ .format(url))
+ elif not url.split('//', 1)[1]:
+ probs.append("URL missing address")
+ return probs
+
+def validate_project_urls(metadata):
+ probs = []
+ for prurl in metadata.get('project_urls', []):
+ name, url = prurl.split(',', 1)
+ url = url.lstrip()
+ if not name:
+ probs.append("No name for project URL {!r}".format(url))
+ elif len(name) > 32:
+ probs.append("Project URL name {!r} longer than 32 characters"
+ .format(name))
+ probs.extend(validate_url(url))
+
+ return probs
+
+
+def validate_readme_rst(metadata):
+ mimetype = metadata.get('description_content_type', '')
+
+ if mimetype != 'text/x-rst':
+ return []
+
+ # rst check
+ raw_desc = metadata.get('description', '')
+ stream = io.StringIO()
+ res = render(raw_desc, stream)
+ if not res:
+ return [
+ ("The file description seems not to be valid rst for PyPI;"
+ " it will be interpreted as plain text"),
+ stream.getvalue(),
+ ]
+
+ return [] # rst rendered OK
+
+
+def validate_config(config_info):
+ i = config_info
+ problems = sum([
+ validate_classifiers(i.metadata.get('classifiers')),
+ validate_entrypoints(i.entrypoints),
+ validate_name(i.metadata),
+ validate_requires_python(i.metadata),
+ validate_requires_dist(i.metadata),
+ validate_url(i.metadata.get('home_page', None)),
+ validate_project_urls(i.metadata),
+ validate_readme_rst(i.metadata)
+ ], [])
+
+ for p in problems:
+ log.error(p)
+ return problems
+
diff --git a/flit/vcs/__init__.py b/flit/vcs/__init__.py
new file mode 100644
index 0000000..a3e92b3
--- /dev/null
+++ b/flit/vcs/__init__.py
@@ -0,0 +1,14 @@
+from pathlib import Path
+
+from . import hg
+from . import git
+
+def identify_vcs(directory: Path):
+ directory = directory.resolve()
+ for p in [directory] + list(directory.parents):
+ if (p / '.git').is_dir():
+ return git
+ if (p / '.hg').is_dir():
+ return hg
+
+ return None
diff --git a/flit/vcs/git.py b/flit/vcs/git.py
new file mode 100644
index 0000000..cb8890c
--- /dev/null
+++ b/flit/vcs/git.py
@@ -0,0 +1,15 @@
+import os
+from subprocess import check_output
+
+name = 'git'
+
+def list_tracked_files(directory):
+ outb = check_output(['git', 'ls-files', '--recurse-submodules', '-z'],
+ cwd=str(directory))
+ return [os.fsdecode(l) for l in outb.strip(b'\0').split(b'\0') if l]
+
+def list_untracked_deleted_files(directory):
+ outb = check_output(['git', 'ls-files', '--deleted', '--others',
+ '--exclude-standard', '-z'],
+ cwd=str(directory))
+ return [os.fsdecode(l) for l in outb.strip(b'\0').split(b'\0') if l]
diff --git a/flit/vcs/hg.py b/flit/vcs/hg.py
new file mode 100644
index 0000000..2c97eeb
--- /dev/null
+++ b/flit/vcs/hg.py
@@ -0,0 +1,34 @@
+import os
+from subprocess import check_output
+
+name = 'hg'
+
+def find_repo_root(directory):
+ for p in [directory] + list(directory.parents):
+ if (p / '.hg').is_dir():
+ return p
+
+def _repo_paths_to_directory_paths(paths, directory):
+ # 'hg status' gives paths from repo root, which may not be our directory.
+ directory = directory.resolve()
+ repo = find_repo_root(directory)
+ if directory != repo:
+ directory_in_repo = str(directory.relative_to(repo)) + os.sep
+ ix = len(directory_in_repo)
+ paths = [p[ix:] for p in paths
+ if os.path.normpath(p).startswith(directory_in_repo)]
+ return paths
+
+
+def list_tracked_files(directory):
+ outb = check_output(['hg', 'status', '--clean', '--added', '--modified', '--no-status'],
+ cwd=str(directory))
+ paths = [os.fsdecode(l) for l in outb.strip().splitlines()]
+ return _repo_paths_to_directory_paths(paths, directory)
+
+
+def list_untracked_deleted_files(directory):
+ outb = check_output(['hg', 'status', '--unknown', '--deleted', '--no-status'],
+ cwd=str(directory))
+ paths = [os.fsdecode(l) for l in outb.strip().splitlines()]
+ return _repo_paths_to_directory_paths(paths, directory)
diff --git a/flit/vendorized/__init__.py b/flit/vendorized/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit/vendorized/__init__.py
diff --git a/flit/vendorized/readme/__init__.py b/flit/vendorized/readme/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit/vendorized/readme/__init__.py
diff --git a/flit/vendorized/readme/clean.py b/flit/vendorized/readme/clean.py
new file mode 100644
index 0000000..780697e
--- /dev/null
+++ b/flit/vendorized/readme/clean.py
@@ -0,0 +1,2 @@
+## shim readme clean to simplify vendorizing of readme.rst
+clean = lambda x:x
diff --git a/flit/vendorized/readme/rst.py b/flit/vendorized/readme/rst.py
new file mode 100644
index 0000000..b4542be
--- /dev/null
+++ b/flit/vendorized/readme/rst.py
@@ -0,0 +1,128 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copied from https://github.com/pypa/readme_renderer
+# Commit 5b455a9c5bafc1732dafad9619bcbfa8e15432c9
+
+from __future__ import absolute_import, division, print_function
+
+import io
+import os.path
+
+from docutils.core import publish_parts
+from docutils.writers.html4css1 import HTMLTranslator, Writer
+from docutils.utils import SystemMessage
+
+from .clean import clean
+
+
+class ReadMeHTMLTranslator(HTMLTranslator):
+
+ def depart_image(self, node):
+ uri = node["uri"]
+ ext = os.path.splitext(uri)[1].lower()
+ # we need to swap RST's use of `object` with `img` tags
+ # see http://git.io/5me3dA
+ if ext == ".svg":
+ # preserve essential attributes
+ atts = {}
+ for attribute, value in node.attributes.items():
+ # we have no time for empty values
+ if value:
+ if attribute == "uri":
+ atts["src"] = value
+ else:
+ atts[attribute] = value
+
+ # toss off `object` tag
+ self.body.pop()
+ # add on `img` with attributes
+ self.body.append(self.starttag(node, "img", **atts))
+
+
+SETTINGS = {
+ # Cloaking email addresses provides a small amount of additional
+ # privacy protection for email addresses inside of a chunk of ReST.
+ "cloak_email_addresses": True,
+
+ # Prevent a lone top level heading from being promoted to document
+ # title, and thus second level headings from being promoted to top
+ # level.
+ "doctitle_xform": True,
+
+ # Prevent a lone subsection heading from being promoted to section
+ # title, and thus second level headings from being promoted to top
+ # level.
+ "sectsubtitle_xform": True,
+
+ # Set our initial header level
+ "initial_header_level": 2,
+
+ # Prevent local files from being included into the rendered output.
+ # This is a security concern because people can insert files
+ # that are part of the system, such as /etc/passwd.
+ "file_insertion_enabled": False,
+
+ # Halt rendering and throw an exception if there was any errors or
+ # warnings from docutils.
+ "halt_level": 2,
+
+ # Output math blocks as LaTeX that can be interpreted by MathJax for
+ # a prettier display of Math formulas.
+ "math_output": "MathJax",
+
+ # Disable raw html as enabling it is a security risk, we do not want
+ # people to be able to include any old HTML in the final output.
+ "raw_enabled": False,
+
+ # Disable all system messages from being reported.
+ "report_level": 5,
+
+ # Use typographic quotes, and transform --, ---, and ... into their
+ # typographic counterparts.
+ "smart_quotes": True,
+
+ # Strip all comments from the rendered output.
+ "strip_comments": True,
+
+ # PATCH FOR FLIT ----------------------------------
+ # Disable syntax highlighting so we don't need Pygments installed.
+ "syntax_highlight": "none",
+ # -------------------------------------------------
+}
+
+
+def render(raw, stream=None):
+ if stream is None:
+ # Use a io.StringIO as the warning stream to prevent warnings from
+ # being printed to sys.stderr.
+ stream = io.StringIO()
+
+ settings = SETTINGS.copy()
+ settings["warning_stream"] = stream
+
+ writer = Writer()
+ writer.translator_class = ReadMeHTMLTranslator
+
+ try:
+ parts = publish_parts(raw, writer=writer, settings_overrides=settings)
+ except SystemMessage:
+ rendered = None
+ else:
+ rendered = parts.get("fragment")
+
+ if rendered:
+ return clean(rendered)
+ else:
+ return None
diff --git a/flit/wheel.py b/flit/wheel.py
new file mode 100644
index 0000000..a60e358
--- /dev/null
+++ b/flit/wheel.py
@@ -0,0 +1,12 @@
+import logging
+
+import flit_core.wheel as core_wheel
+
+log = logging.getLogger(__name__)
+
+def make_wheel_in(ini_path, wheel_directory, editable=False):
+ return core_wheel.make_wheel_in(ini_path, wheel_directory, editable)
+
+class WheelBuilder(core_wheel.WheelBuilder):
+ pass
+
diff --git a/flit_core/LICENSE b/flit_core/LICENSE
new file mode 100644
index 0000000..1bd2e2d
--- /dev/null
+++ b/flit_core/LICENSE
@@ -0,0 +1,29 @@
+Copyright (c) 2015, Thomas Kluyver and contributors
+All rights reserved.
+
+BSD 3-clause license:
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/flit_core/README.rst b/flit_core/README.rst
new file mode 100644
index 0000000..6d9818f
--- /dev/null
+++ b/flit_core/README.rst
@@ -0,0 +1,6 @@
+flit_core
+---------
+
+This provides a PEP 517 build backend for packages using Flit.
+The only public interface is the API specified by PEP 517, at ``flit_core.buildapi``.
+
diff --git a/flit_core/bootstrap_install.py b/flit_core/bootstrap_install.py
new file mode 100644
index 0000000..1fd4c87
--- /dev/null
+++ b/flit_core/bootstrap_install.py
@@ -0,0 +1,57 @@
+"""Install flit_core without using any other tools.
+
+Normally, you would install flit_core with pip like any other Python package.
+This script is meant to help with 'bootstrapping' other packaging
+systems, where you may need flit_core to build other packaging tools.
+
+Use 'python -m flit_core.wheel' to make a wheel, then:
+
+ python bootstrap_install.py flit_core-3.6.0-py3-none-any.whl
+
+To install for something other than the Python running the script, pass a
+site-packages or equivalent directory with the --installdir option.
+"""
+import argparse
+import sys
+import sysconfig
+from pathlib import Path
+from zipfile import ZipFile
+
+def extract_wheel(whl_path, dest):
+ print("Installing to", dest)
+ with ZipFile(whl_path) as zf:
+ zf.extractall(dest)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ 'wheel',
+ type=Path,
+ help=f'flit_core wheel to install (.whl file)',
+ )
+ purelib = Path(sysconfig.get_path('purelib')).resolve()
+ parser.add_argument(
+ '--installdir',
+ '-i',
+ type=Path,
+ default=purelib,
+ help=f'installdir directory (defaults to {purelib})',
+ )
+ parser.add_argument(
+ '--install-root',
+ type=Path,
+ default=None,
+ help='if given, installdir is considered to be under this'
+ )
+
+ args = parser.parse_args()
+
+ if not args.wheel.name.startswith('flit_core-'):
+ sys.exit("Use this script only for flit_core wheels")
+ if args.install_root:
+ installdir = args.install_root / args.installdir.relative_to("/")
+ else:
+ installdir = args.installdir
+
+ installdir.mkdir(parents=True, exist_ok=True)
+ extract_wheel(args.wheel, installdir)
diff --git a/flit_core/build_dists.py b/flit_core/build_dists.py
new file mode 100644
index 0000000..efbce59
--- /dev/null
+++ b/flit_core/build_dists.py
@@ -0,0 +1,17 @@
+"""Build flit_core to upload to PyPI.
+
+Normally, this should only be used by me when making a release.
+"""
+import os
+
+from flit_core import buildapi
+
+os.chdir(os.path.dirname(os.path.abspath(__file__)))
+
+print("Building sdist")
+sdist_fname = buildapi.build_sdist('dist/')
+print(os.path.join('dist', sdist_fname))
+
+print("\nBuilding wheel")
+whl_fname = buildapi.build_wheel('dist/')
+print(os.path.join('dist', whl_fname))
diff --git a/flit_core/flit_core/__init__.py b/flit_core/flit_core/__init__.py
new file mode 100644
index 0000000..d3125ef
--- /dev/null
+++ b/flit_core/flit_core/__init__.py
@@ -0,0 +1,7 @@
+"""Flit's core machinery for building packages.
+
+This package provides a standard PEP 517 API to build packages using Flit.
+All the convenient development features live in the main 'flit' package.
+"""
+
+__version__ = '3.8.0'
diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py
new file mode 100644
index 0000000..963bf61
--- /dev/null
+++ b/flit_core/flit_core/buildapi.py
@@ -0,0 +1,83 @@
+"""PEP-517 compliant buildsystem API"""
+import logging
+import io
+import os
+import os.path as osp
+from pathlib import Path
+
+from .common import (
+ Module, make_metadata, write_entry_points, dist_info_name,
+ get_docstring_and_version_via_ast,
+)
+from .config import read_flit_config
+from .wheel import make_wheel_in, _write_wheel_file
+from .sdist import SdistBuilder
+
+log = logging.getLogger(__name__)
+
+# PEP 517 specifies that the CWD will always be the source tree
+pyproj_toml = Path('pyproject.toml')
+
+def get_requires_for_build_wheel(config_settings=None):
+ """Returns a list of requirements for building, as strings"""
+ info = read_flit_config(pyproj_toml)
+ # If we can get version & description from pyproject.toml (PEP 621), or
+ # by parsing the module (_via_ast), we don't need any extra
+ # dependencies. If not, we'll need to try importing it, so report any
+ # runtime dependencies as build dependencies.
+ want_summary = 'description' in info.dynamic_metadata
+ want_version = 'version' in info.dynamic_metadata
+
+ module = Module(info.module, Path.cwd())
+ docstring, version = get_docstring_and_version_via_ast(module)
+
+ if (want_summary and not docstring) or (want_version and not version):
+ return info.metadata.get('requires_dist', [])
+ else:
+ return []
+
+# Requirements to build an sdist are the same as for a wheel
+get_requires_for_build_sdist = get_requires_for_build_wheel
+
+# Requirements to build an editable are the same as for a wheel
+get_requires_for_build_editable = get_requires_for_build_wheel
+
+def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):
+ """Creates {metadata_directory}/foo-1.2.dist-info"""
+ ini_info = read_flit_config(pyproj_toml)
+ module = Module(ini_info.module, Path.cwd())
+ metadata = make_metadata(module, ini_info)
+
+ dist_info = osp.join(metadata_directory,
+ dist_info_name(metadata.name, metadata.version))
+ os.mkdir(dist_info)
+
+ with io.open(osp.join(dist_info, 'WHEEL'), 'w', encoding='utf-8') as f:
+ _write_wheel_file(f, supports_py2=metadata.supports_py2)
+
+ with io.open(osp.join(dist_info, 'METADATA'), 'w', encoding='utf-8') as f:
+ metadata.write_metadata_file(f)
+
+ if ini_info.entrypoints:
+ with io.open(osp.join(dist_info, 'entry_points.txt'), 'w', encoding='utf-8') as f:
+ write_entry_points(ini_info.entrypoints, f)
+
+ return osp.basename(dist_info)
+
+# Metadata for editable are the same as for a wheel
+prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel
+
+def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds a wheel, places it in wheel_directory"""
+ info = make_wheel_in(pyproj_toml, Path(wheel_directory))
+ return info.file.name
+
+def build_editable(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds an "editable" wheel, places it in wheel_directory"""
+ info = make_wheel_in(pyproj_toml, Path(wheel_directory), editable=True)
+ return info.file.name
+
+def build_sdist(sdist_directory, config_settings=None):
+ """Builds an sdist, places it in sdist_directory"""
+ path = SdistBuilder.from_ini_path(pyproj_toml).build(Path(sdist_directory))
+ return path.name
diff --git a/flit_core/flit_core/common.py b/flit_core/flit_core/common.py
new file mode 100644
index 0000000..68d91bb
--- /dev/null
+++ b/flit_core/flit_core/common.py
@@ -0,0 +1,449 @@
+import ast
+from contextlib import contextmanager
+import hashlib
+import logging
+import os
+import sys
+
+from pathlib import Path
+import re
+
+log = logging.getLogger(__name__)
+
+from .versionno import normalise_version
+
+class Module(object):
+ """This represents the module/package that we are going to distribute
+ """
+ in_namespace_package = False
+ namespace_package_name = None
+
+ def __init__(self, name, directory=Path()):
+ self.name = name
+
+ # It must exist either as a .py file or a directory, but not both
+ name_as_path = name.replace('.', os.sep)
+ pkg_dir = directory / name_as_path
+ py_file = directory / (name_as_path+'.py')
+ src_pkg_dir = directory / 'src' / name_as_path
+ src_py_file = directory / 'src' / (name_as_path+'.py')
+
+ existing = set()
+ if pkg_dir.is_dir():
+ self.path = pkg_dir
+ self.is_package = True
+ self.prefix = ''
+ existing.add(pkg_dir)
+ if py_file.is_file():
+ self.path = py_file
+ self.is_package = False
+ self.prefix = ''
+ existing.add(py_file)
+ if src_pkg_dir.is_dir():
+ self.path = src_pkg_dir
+ self.is_package = True
+ self.prefix = 'src'
+ existing.add(src_pkg_dir)
+ if src_py_file.is_file():
+ self.path = src_py_file
+ self.is_package = False
+ self.prefix = 'src'
+ existing.add(src_py_file)
+
+ if len(existing) > 1:
+ raise ValueError(
+ "Multiple files or folders could be module {}: {}"
+ .format(name, ", ".join([str(p) for p in sorted(existing)]))
+ )
+ elif not existing:
+ raise ValueError("No file/folder found for module {}".format(name))
+
+ self.source_dir = directory / self.prefix
+
+ if '.' in name:
+ self.namespace_package_name = name.rpartition('.')[0]
+ self.in_namespace_package = True
+
+ @property
+ def file(self):
+ if self.is_package:
+ return self.path / '__init__.py'
+ else:
+ return self.path
+
+ def iter_files(self):
+ """Iterate over the files contained in this module.
+
+ Yields absolute paths - caller may want to make them relative.
+ Excludes any __pycache__ and *.pyc files.
+ """
+ def _include(path):
+ name = os.path.basename(path)
+ if (name == '__pycache__') or name.endswith('.pyc'):
+ return False
+ return True
+
+ if self.is_package:
+ # Ensure we sort all files and directories so the order is stable
+ for dirpath, dirs, files in os.walk(str(self.path)):
+ for file in sorted(files):
+ full_path = os.path.join(dirpath, file)
+ if _include(full_path):
+ yield full_path
+
+ dirs[:] = [d for d in sorted(dirs) if _include(d)]
+
+ else:
+ yield str(self.path)
+
+class ProblemInModule(ValueError): pass
+class NoDocstringError(ProblemInModule): pass
+class NoVersionError(ProblemInModule): pass
+class InvalidVersion(ProblemInModule): pass
+
+class VCSError(Exception):
+ def __init__(self, msg, directory):
+ self.msg = msg
+ self.directory = directory
+
+ def __str__(self):
+ return self.msg + ' ({})'.format(self.directory)
+
+
+@contextmanager
+def _module_load_ctx():
+ """Preserve some global state that modules might change at import time.
+
+ - Handlers on the root logger.
+ """
+ logging_handlers = logging.root.handlers[:]
+ try:
+ yield
+ finally:
+ logging.root.handlers = logging_handlers
+
+def get_docstring_and_version_via_ast(target):
+ """
+ Return a tuple like (docstring, version) for the given module,
+ extracted by parsing its AST.
+ """
+ # read as bytes to enable custom encodings
+ with target.file.open('rb') as f:
+ node = ast.parse(f.read())
+ for child in node.body:
+ # Only use the version from the given module if it's a simple
+ # string assignment to __version__
+ is_version_str = (
+ isinstance(child, ast.Assign)
+ and any(
+ isinstance(target, ast.Name)
+ and target.id == "__version__"
+ for target in child.targets
+ )
+ and isinstance(child.value, ast.Str)
+ )
+ if is_version_str:
+ version = child.value.s
+ break
+ else:
+ version = None
+ return ast.get_docstring(node), version
+
+
+# To ensure we're actually loading the specified file, give it a unique name to
+# avoid any cached import. In normal use we'll only load one module per process,
+# so it should only matter for the tests, but we'll do it anyway.
+_import_i = 0
+
+
+def get_docstring_and_version_via_import(target):
+ """
+ Return a tuple like (docstring, version) for the given module,
+ extracted by importing the module and pulling __doc__ & __version__
+ from it.
+ """
+ global _import_i
+ _import_i += 1
+
+ log.debug("Loading module %s", target.file)
+ from importlib.util import spec_from_file_location, module_from_spec
+ mod_name = 'flit_core.dummy.import%d' % _import_i
+ spec = spec_from_file_location(mod_name, target.file)
+ with _module_load_ctx():
+ m = module_from_spec(spec)
+ # Add the module to sys.modules to allow relative imports to work.
+ # importlib has more code around this to handle the case where two
+ # threads are trying to load the same module at the same time, but Flit
+ # should always be running a single thread, so we won't duplicate that.
+ sys.modules[mod_name] = m
+ try:
+ spec.loader.exec_module(m)
+ finally:
+ sys.modules.pop(mod_name, None)
+
+ docstring = m.__dict__.get('__doc__', None)
+ version = m.__dict__.get('__version__', None)
+ return docstring, version
+
+
+def get_info_from_module(target, for_fields=('version', 'description')):
+ """Load the module/package, get its docstring and __version__
+ """
+ if not for_fields:
+ return {}
+
+ # What core metadata calls Summary, PEP 621 calls description
+ want_summary = 'description' in for_fields
+ want_version = 'version' in for_fields
+
+ log.debug("Loading module %s", target.file)
+
+ # Attempt to extract our docstring & version by parsing our target's
+ # AST, falling back to an import if that fails. This allows us to
+ # build without necessarily requiring that our built package's
+ # requirements are installed.
+ docstring, version = get_docstring_and_version_via_ast(target)
+ if (want_summary and not docstring) or (want_version and not version):
+ docstring, version = get_docstring_and_version_via_import(target)
+
+ res = {}
+
+ if want_summary:
+ if (not docstring) or not docstring.strip():
+ raise NoDocstringError(
+ 'Flit cannot package module without docstring, or empty docstring. '
+ 'Please add a docstring to your module ({}).'.format(target.file)
+ )
+ res['summary'] = docstring.lstrip().splitlines()[0]
+
+ if want_version:
+ res['version'] = check_version(version)
+
+ return res
+
+def check_version(version):
+ """
+ Check whether a given version string match PEP 440, and do normalisation.
+
+ Raise InvalidVersion/NoVersionError with relevant information if
+ version is invalid.
+
+ Log a warning if the version is not canonical with respect to PEP 440.
+
+ Returns the version in canonical PEP 440 format.
+ """
+ if not version:
+ raise NoVersionError('Cannot package module without a version string. '
+ 'Please define a `__version__ = "x.y.z"` in your module.')
+ if not isinstance(version, str):
+ raise InvalidVersion('__version__ must be a string, not {}.'
+ .format(type(version)))
+
+ # Import here to avoid circular import
+ version = normalise_version(version)
+
+ return version
+
+
+script_template = """\
+#!{interpreter}
+# -*- coding: utf-8 -*-
+import re
+import sys
+from {module} import {import_name}
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\\.pyw|\\.exe)?$', '', sys.argv[0])
+ sys.exit({func}())
+"""
+
+def parse_entry_point(ep):
+ """Check and parse a 'package.module:func' style entry point specification.
+
+ Returns (modulename, funcname)
+ """
+ if ':' not in ep:
+ raise ValueError("Invalid entry point (no ':'): %r" % ep)
+ mod, func = ep.split(':')
+
+ for piece in func.split('.'):
+ if not piece.isidentifier():
+ raise ValueError("Invalid entry point: %r is not an identifier" % piece)
+ for piece in mod.split('.'):
+ if not piece.isidentifier():
+ raise ValueError("Invalid entry point: %r is not a module path" % piece)
+
+ return mod, func
+
+def write_entry_points(d, fp):
+ """Write entry_points.txt from a two-level dict
+
+ Sorts on keys to ensure results are reproducible.
+ """
+ for group_name in sorted(d):
+ fp.write(u'[{}]\n'.format(group_name))
+ group = d[group_name]
+ for name in sorted(group):
+ val = group[name]
+ fp.write(u'{}={}\n'.format(name, val))
+ fp.write(u'\n')
+
+def hash_file(path, algorithm='sha256'):
+ with open(path, 'rb') as f:
+ h = hashlib.new(algorithm, f.read())
+ return h.hexdigest()
+
+def normalize_file_permissions(st_mode):
+ """Normalize the permission bits in the st_mode field from stat to 644/755
+
+ Popular VCSs only track whether a file is executable or not. The exact
+ permissions can vary on systems with different umasks. Normalising
+ to 644 (non executable) or 755 (executable) makes builds more reproducible.
+ """
+ # Set 644 permissions, leaving higher bits of st_mode unchanged
+ new_mode = (st_mode | 0o644) & ~0o133
+ if st_mode & 0o100:
+ new_mode |= 0o111 # Executable: 644 -> 755
+ return new_mode
+
+class Metadata(object):
+
+ summary = None
+ home_page = None
+ author = None
+ author_email = None
+ maintainer = None
+ maintainer_email = None
+ license = None
+ description = None
+ keywords = None
+ download_url = None
+ requires_python = None
+ description_content_type = None
+
+ platform = ()
+ supported_platform = ()
+ classifiers = ()
+ provides = ()
+ requires = ()
+ obsoletes = ()
+ project_urls = ()
+ provides_dist = ()
+ requires_dist = ()
+ obsoletes_dist = ()
+ requires_external = ()
+ provides_extra = ()
+
+ metadata_version = "2.1"
+
+ def __init__(self, data):
+ data = data.copy()
+ self.name = data.pop('name')
+ self.version = data.pop('version')
+
+ for k, v in data.items():
+ assert hasattr(self, k), "data does not have attribute '{}'".format(k)
+ setattr(self, k, v)
+
+ def _normalise_name(self, n):
+ return n.lower().replace('-', '_')
+
+ def write_metadata_file(self, fp):
+ """Write out metadata in the email headers format"""
+ fields = [
+ 'Metadata-Version',
+ 'Name',
+ 'Version',
+ ]
+ optional_fields = [
+ 'Summary',
+ 'Home-page',
+ 'License',
+ 'Keywords',
+ 'Author',
+ 'Author-email',
+ 'Maintainer',
+ 'Maintainer-email',
+ 'Requires-Python',
+ 'Description-Content-Type',
+ ]
+
+ for field in fields:
+ value = getattr(self, self._normalise_name(field))
+ fp.write(u"{}: {}\n".format(field, value))
+
+ for field in optional_fields:
+ value = getattr(self, self._normalise_name(field))
+ if value is not None:
+ # TODO: verify which fields can be multiline
+ # The spec has multiline examples for Author, Maintainer &
+ # License (& Description, but we put that in the body)
+ # Indent following lines with 8 spaces:
+ value = '\n '.join(value.splitlines())
+ fp.write(u"{}: {}\n".format(field, value))
+
+ for clsfr in self.classifiers:
+ fp.write(u'Classifier: {}\n'.format(clsfr))
+
+ for req in self.requires_dist:
+ fp.write(u'Requires-Dist: {}\n'.format(req))
+
+ for url in self.project_urls:
+ fp.write(u'Project-URL: {}\n'.format(url))
+
+ for extra in self.provides_extra:
+ fp.write(u'Provides-Extra: {}\n'.format(extra))
+
+ if self.description is not None:
+ fp.write(u'\n' + self.description + u'\n')
+
+ @property
+ def supports_py2(self):
+ """Return True if Requires-Python indicates Python 2 support."""
+ for part in (self.requires_python or "").split(","):
+ if re.search(r"^\s*(>=?|~=|===?)?\s*[3-9]", part):
+ return False
+ return True
+
+
+def make_metadata(module, ini_info):
+ md_dict = {'name': module.name, 'provides': [module.name]}
+ md_dict.update(get_info_from_module(module, ini_info.dynamic_metadata))
+ md_dict.update(ini_info.metadata)
+ return Metadata(md_dict)
+
+
+
+def normalize_dist_name(name: str, version: str) -> str:
+ """Normalizes a name and a PEP 440 version
+
+ The resulting string is valid as dist-info folder name
+ and as first part of a wheel filename
+
+ See https://packaging.python.org/specifications/binary-distribution-format/#escaping-and-unicode
+ """
+ normalized_name = re.sub(r'[-_.]+', '_', name, flags=re.UNICODE).lower()
+ assert check_version(version) == version
+ assert '-' not in version, 'Normalized versions can’t have dashes'
+ return '{}-{}'.format(normalized_name, version)
+
+
+def dist_info_name(distribution, version):
+ """Get the correct name of the .dist-info folder"""
+ return normalize_dist_name(distribution, version) + '.dist-info'
+
+
+def walk_data_dir(data_directory):
+ """Iterate over the files in the given data directory.
+
+ Yields paths prefixed with data_directory - caller may want to make them
+ relative to that. Excludes any __pycache__ subdirectories.
+ """
+ if data_directory is None:
+ return
+
+ for dirpath, dirs, files in os.walk(data_directory):
+ for file in sorted(files):
+ full_path = os.path.join(dirpath, file)
+ yield full_path
+
+ dirs[:] = [d for d in sorted(dirs) if d != '__pycache__']
diff --git a/flit_core/flit_core/config.py b/flit_core/flit_core/config.py
new file mode 100644
index 0000000..1292956
--- /dev/null
+++ b/flit_core/flit_core/config.py
@@ -0,0 +1,660 @@
+import difflib
+from email.headerregistry import Address
+import errno
+import logging
+import os
+import os.path as osp
+from pathlib import Path
+import re
+
+try:
+ import tomllib
+except ImportError:
+ try:
+ from .vendor import tomli as tomllib
+ # Some downstream distributors remove the vendored tomli.
+ # When that is removed, import tomli from the regular location.
+ except ImportError:
+ import tomli as tomllib
+
+from .versionno import normalise_version
+
+log = logging.getLogger(__name__)
+
+
+class ConfigError(ValueError):
+ pass
+
+metadata_list_fields = {
+ 'classifiers',
+ 'requires',
+ 'dev-requires'
+}
+
+metadata_allowed_fields = {
+ 'module',
+ 'author',
+ 'author-email',
+ 'maintainer',
+ 'maintainer-email',
+ 'home-page',
+ 'license',
+ 'keywords',
+ 'requires-python',
+ 'dist-name',
+ 'description-file',
+ 'requires-extra',
+} | metadata_list_fields
+
+metadata_required_fields = {
+ 'module',
+ 'author',
+}
+
+pep621_allowed_fields = {
+ 'name',
+ 'version',
+ 'description',
+ 'readme',
+ 'requires-python',
+ 'license',
+ 'authors',
+ 'maintainers',
+ 'keywords',
+ 'classifiers',
+ 'urls',
+ 'scripts',
+ 'gui-scripts',
+ 'entry-points',
+ 'dependencies',
+ 'optional-dependencies',
+ 'dynamic',
+}
+
+
+def read_flit_config(path):
+ """Read and check the `pyproject.toml` file with data about the package.
+ """
+ d = tomllib.loads(path.read_text('utf-8'))
+ return prep_toml_config(d, path)
+
+
+class EntryPointsConflict(ConfigError):
+ def __str__(self):
+ return ('Please specify console_scripts entry points, or [scripts] in '
+ 'flit config, not both.')
+
+def prep_toml_config(d, path):
+ """Validate config loaded from pyproject.toml and prepare common metadata
+
+ Returns a LoadedConfig object.
+ """
+ dtool = d.get('tool', {}).get('flit', {})
+
+ if 'project' in d:
+ # Metadata in [project] table (PEP 621)
+ if 'metadata' in dtool:
+ raise ConfigError(
+ "Use [project] table for metadata or [tool.flit.metadata], not both."
+ )
+ if ('scripts' in dtool) or ('entrypoints' in dtool):
+ raise ConfigError(
+ "Don't mix [project] metadata with [tool.flit.scripts] or "
+ "[tool.flit.entrypoints]. Use [project.scripts],"
+ "[project.gui-scripts] or [project.entry-points] as replacements."
+ )
+ loaded_cfg = read_pep621_metadata(d['project'], path)
+
+ module_tbl = dtool.get('module', {})
+ if 'name' in module_tbl:
+ loaded_cfg.module = module_tbl['name']
+ elif 'metadata' in dtool:
+ # Metadata in [tool.flit.metadata] (pre PEP 621 format)
+ if 'module' in dtool:
+ raise ConfigError(
+ "Use [tool.flit.module] table with new-style [project] metadata, "
+ "not [tool.flit.metadata]"
+ )
+ loaded_cfg = _prep_metadata(dtool['metadata'], path)
+ loaded_cfg.dynamic_metadata = ['version', 'description']
+
+ if 'entrypoints' in dtool:
+ loaded_cfg.entrypoints = flatten_entrypoints(dtool['entrypoints'])
+
+ if 'scripts' in dtool:
+ loaded_cfg.add_scripts(dict(dtool['scripts']))
+ else:
+ raise ConfigError(
+ "Neither [project] nor [tool.flit.metadata] found in pyproject.toml"
+ )
+
+ unknown_sections = set(dtool) - {
+ 'metadata', 'module', 'scripts', 'entrypoints', 'sdist', 'external-data'
+ }
+ unknown_sections = [s for s in unknown_sections if not s.lower().startswith('x-')]
+ if unknown_sections:
+ raise ConfigError('Unexpected tables in pyproject.toml: ' + ', '.join(
+ '[tool.flit.{}]'.format(s) for s in unknown_sections
+ ))
+
+ if 'sdist' in dtool:
+ unknown_keys = set(dtool['sdist']) - {'include', 'exclude'}
+ if unknown_keys:
+ raise ConfigError(
+ "Unknown keys in [tool.flit.sdist]:" + ", ".join(unknown_keys)
+ )
+
+ loaded_cfg.sdist_include_patterns = _check_glob_patterns(
+ dtool['sdist'].get('include', []), 'include'
+ )
+ exclude = [
+ "**/__pycache__",
+ "**.pyc",
+ ] + dtool['sdist'].get('exclude', [])
+ loaded_cfg.sdist_exclude_patterns = _check_glob_patterns(
+ exclude, 'exclude'
+ )
+
+ data_dir = dtool.get('external-data', {}).get('directory', None)
+ if data_dir is not None:
+ toml_key = "tool.flit.external-data.directory"
+ if not isinstance(data_dir, str):
+ raise ConfigError(f"{toml_key} must be a string")
+
+ normp = osp.normpath(data_dir)
+ if osp.isabs(normp):
+ raise ConfigError(f"{toml_key} cannot be an absolute path")
+ if normp.startswith('..' + os.sep):
+ raise ConfigError(
+ f"{toml_key} cannot point outside the directory containing pyproject.toml"
+ )
+ if normp == '.':
+ raise ConfigError(
+ f"{toml_key} cannot refer to the directory containing pyproject.toml"
+ )
+ loaded_cfg.data_directory = path.parent / data_dir
+ if not loaded_cfg.data_directory.is_dir():
+ raise ConfigError(f"{toml_key} must refer to a directory")
+
+ return loaded_cfg
+
+def flatten_entrypoints(ep):
+ """Flatten nested entrypoints dicts.
+
+ Entry points group names can include dots. But dots in TOML make nested
+ dictionaries:
+
+ [entrypoints.a.b] # {'entrypoints': {'a': {'b': {}}}}
+
+ The proper way to avoid this is:
+
+ [entrypoints."a.b"] # {'entrypoints': {'a.b': {}}}
+
+ But since there isn't a need for arbitrarily nested mappings in entrypoints,
+ flit allows you to use the former. This flattens the nested dictionaries
+ from loading pyproject.toml.
+ """
+ def _flatten(d, prefix):
+ d1 = {}
+ for k, v in d.items():
+ if isinstance(v, dict):
+ for flattened in _flatten(v, prefix+'.'+k):
+ yield flattened
+ else:
+ d1[k] = v
+
+ if d1:
+ yield prefix, d1
+
+ res = {}
+ for k, v in ep.items():
+ res.update(_flatten(v, k))
+ return res
+
+
+def _check_glob_patterns(pats, clude):
+ """Check and normalise glob patterns for sdist include/exclude"""
+ if not isinstance(pats, list):
+ raise ConfigError("sdist {} patterns must be a list".format(clude))
+
+ # Windows filenames can't contain these (nor * or ?, but they are part of
+ # glob patterns) - https://stackoverflow.com/a/31976060/434217
+ bad_chars = re.compile(r'[\000-\037<>:"\\]')
+
+ normed = []
+
+ for p in pats:
+ if bad_chars.search(p):
+ raise ConfigError(
+ '{} pattern {!r} contains bad characters (<>:\"\\ or control characters)'
+ .format(clude, p)
+ )
+
+ normp = osp.normpath(p)
+
+ if osp.isabs(normp):
+ raise ConfigError(
+ '{} pattern {!r} is an absolute path'.format(clude, p)
+ )
+ if normp.startswith('..' + os.sep):
+ raise ConfigError(
+ '{} pattern {!r} points out of the directory containing pyproject.toml'
+ .format(clude, p)
+ )
+ normed.append(normp)
+
+ return normed
+
+
+class LoadedConfig(object):
+ def __init__(self):
+ self.module = None
+ self.metadata = {}
+ self.reqs_by_extra = {}
+ self.entrypoints = {}
+ self.referenced_files = []
+ self.sdist_include_patterns = []
+ self.sdist_exclude_patterns = []
+ self.dynamic_metadata = []
+ self.data_directory = None
+
+ def add_scripts(self, scripts_dict):
+ if scripts_dict:
+ if 'console_scripts' in self.entrypoints:
+ raise EntryPointsConflict
+ else:
+ self.entrypoints['console_scripts'] = scripts_dict
+
+readme_ext_to_content_type = {
+ '.rst': 'text/x-rst',
+ '.md': 'text/markdown',
+ '.txt': 'text/plain',
+}
+
+
+def description_from_file(rel_path: str, proj_dir: Path, guess_mimetype=True):
+ if osp.isabs(rel_path):
+ raise ConfigError("Readme path must be relative")
+
+ desc_path = proj_dir / rel_path
+ try:
+ with desc_path.open('r', encoding='utf-8') as f:
+ raw_desc = f.read()
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ raise ConfigError(
+ "Description file {} does not exist".format(desc_path)
+ )
+ raise
+
+ if guess_mimetype:
+ ext = desc_path.suffix.lower()
+ try:
+ mimetype = readme_ext_to_content_type[ext]
+ except KeyError:
+ log.warning("Unknown extension %r for description file.", ext)
+ log.warning(" Recognised extensions: %s",
+ " ".join(readme_ext_to_content_type))
+ mimetype = None
+ else:
+ mimetype = None
+
+ return raw_desc, mimetype
+
+
+def _prep_metadata(md_sect, path):
+ """Process & verify the metadata from a config file
+
+ - Pull out the module name we're packaging.
+ - Read description-file and check that it's valid rst
+ - Convert dashes in key names to underscores
+ (e.g. home-page in config -> home_page in metadata)
+ """
+ if not set(md_sect).issuperset(metadata_required_fields):
+ missing = metadata_required_fields - set(md_sect)
+ raise ConfigError("Required fields missing: " + '\n'.join(missing))
+
+ res = LoadedConfig()
+
+ res.module = md_sect.get('module')
+ if not all([m.isidentifier() for m in res.module.split(".")]):
+ raise ConfigError("Module name %r is not a valid identifier" % res.module)
+
+ md_dict = res.metadata
+
+ # Description file
+ if 'description-file' in md_sect:
+ desc_path = md_sect.get('description-file')
+ res.referenced_files.append(desc_path)
+ desc_content, mimetype = description_from_file(desc_path, path.parent)
+ md_dict['description'] = desc_content
+ md_dict['description_content_type'] = mimetype
+
+ if 'urls' in md_sect:
+ project_urls = md_dict['project_urls'] = []
+ for label, url in sorted(md_sect.pop('urls').items()):
+ project_urls.append("{}, {}".format(label, url))
+
+ for key, value in md_sect.items():
+ if key in {'description-file', 'module'}:
+ continue
+ if key not in metadata_allowed_fields:
+ closest = difflib.get_close_matches(key, metadata_allowed_fields,
+ n=1, cutoff=0.7)
+ msg = "Unrecognised metadata key: {!r}".format(key)
+ if closest:
+ msg += " (did you mean {!r}?)".format(closest[0])
+ raise ConfigError(msg)
+
+ k2 = key.replace('-', '_')
+ md_dict[k2] = value
+ if key in metadata_list_fields:
+ if not isinstance(value, list):
+ raise ConfigError('Expected a list for {} field, found {!r}'
+ .format(key, value))
+ if not all(isinstance(a, str) for a in value):
+ raise ConfigError('Expected a list of strings for {} field'
+ .format(key))
+ elif key == 'requires-extra':
+ if not isinstance(value, dict):
+ raise ConfigError('Expected a dict for requires-extra field, found {!r}'
+ .format(value))
+ if not all(isinstance(e, list) for e in value.values()):
+ raise ConfigError('Expected a dict of lists for requires-extra field')
+ for e, reqs in value.items():
+ if not all(isinstance(a, str) for a in reqs):
+ raise ConfigError('Expected a string list for requires-extra. (extra {})'
+ .format(e))
+ else:
+ if not isinstance(value, str):
+ raise ConfigError('Expected a string for {} field, found {!r}'
+ .format(key, value))
+
+ # What we call requires in the ini file is technically requires_dist in
+ # the metadata.
+ if 'requires' in md_dict:
+ md_dict['requires_dist'] = md_dict.pop('requires')
+
+ # And what we call dist-name is name in the metadata
+ if 'dist_name' in md_dict:
+ md_dict['name'] = md_dict.pop('dist_name')
+
+ # Move dev-requires into requires-extra
+ reqs_noextra = md_dict.pop('requires_dist', [])
+ res.reqs_by_extra = md_dict.pop('requires_extra', {})
+ dev_requires = md_dict.pop('dev_requires', None)
+ if dev_requires is not None:
+ if 'dev' in res.reqs_by_extra:
+ raise ConfigError(
+ 'dev-requires occurs together with its replacement requires-extra.dev.')
+ else:
+ log.warning(
+ '"dev-requires = ..." is obsolete. Use "requires-extra = {"dev" = ...}" instead.')
+ res.reqs_by_extra['dev'] = dev_requires
+
+ # Add requires-extra requirements into requires_dist
+ md_dict['requires_dist'] = \
+ reqs_noextra + list(_expand_requires_extra(res.reqs_by_extra))
+
+ md_dict['provides_extra'] = sorted(res.reqs_by_extra.keys())
+
+ # For internal use, record the main requirements as a '.none' extra.
+ res.reqs_by_extra['.none'] = reqs_noextra
+
+ return res
+
+def _expand_requires_extra(re):
+ for extra, reqs in sorted(re.items()):
+ for req in reqs:
+ if ';' in req:
+ name, envmark = req.split(';', 1)
+ yield '{} ; extra == "{}" and ({})'.format(name, extra, envmark)
+ else:
+ yield '{} ; extra == "{}"'.format(req, extra)
+
+
+def _check_type(d, field_name, cls):
+ if not isinstance(d[field_name], cls):
+ raise ConfigError(
+ "{} field should be {}, not {}".format(field_name, cls, type(d[field_name]))
+ )
+
+def _check_list_of_str(d, field_name):
+ if not isinstance(d[field_name], list) or not all(
+ isinstance(e, str) for e in d[field_name]
+ ):
+ raise ConfigError(
+ "{} field should be a list of strings".format(field_name)
+ )
+
+def read_pep621_metadata(proj, path) -> LoadedConfig:
+ lc = LoadedConfig()
+ md_dict = lc.metadata
+
+ if 'name' not in proj:
+ raise ConfigError('name must be specified in [project] table')
+ _check_type(proj, 'name', str)
+ md_dict['name'] = proj['name']
+ lc.module = md_dict['name'].replace('-', '_')
+
+ unexpected_keys = proj.keys() - pep621_allowed_fields
+ if unexpected_keys:
+ log.warning("Unexpected names under [project]: %s", ', '.join(unexpected_keys))
+
+ if 'version' in proj:
+ _check_type(proj, 'version', str)
+ md_dict['version'] = normalise_version(proj['version'])
+ if 'description' in proj:
+ _check_type(proj, 'description', str)
+ md_dict['summary'] = proj['description']
+ if 'readme' in proj:
+ readme = proj['readme']
+ if isinstance(readme, str):
+ lc.referenced_files.append(readme)
+ desc_content, mimetype = description_from_file(readme, path.parent)
+
+ elif isinstance(readme, dict):
+ unrec_keys = set(readme.keys()) - {'text', 'file', 'content-type'}
+ if unrec_keys:
+ raise ConfigError(
+ "Unrecognised keys in [project.readme]: {}".format(unrec_keys)
+ )
+ if 'content-type' in readme:
+ mimetype = readme['content-type']
+ mtype_base = mimetype.split(';')[0].strip() # e.g. text/x-rst
+ if mtype_base not in readme_ext_to_content_type.values():
+ raise ConfigError(
+ "Unrecognised readme content-type: {!r}".format(mtype_base)
+ )
+ # TODO: validate content-type parameters (charset, md variant)?
+ else:
+ raise ConfigError(
+ "content-type field required in [project.readme] table"
+ )
+ if 'file' in readme:
+ if 'text' in readme:
+ raise ConfigError(
+ "[project.readme] should specify file or text, not both"
+ )
+ lc.referenced_files.append(readme['file'])
+ desc_content, _ = description_from_file(
+ readme['file'], path.parent, guess_mimetype=False
+ )
+ elif 'text' in readme:
+ desc_content = readme['text']
+ else:
+ raise ConfigError(
+ "file or text field required in [project.readme] table"
+ )
+ else:
+ raise ConfigError(
+ "project.readme should be a string or a table"
+ )
+
+ md_dict['description'] = desc_content
+ md_dict['description_content_type'] = mimetype
+
+ if 'requires-python' in proj:
+ md_dict['requires_python'] = proj['requires-python']
+
+ if 'license' in proj:
+ _check_type(proj, 'license', dict)
+ license_tbl = proj['license']
+ unrec_keys = set(license_tbl.keys()) - {'text', 'file'}
+ if unrec_keys:
+ raise ConfigError(
+ "Unrecognised keys in [project.license]: {}".format(unrec_keys)
+ )
+
+ # TODO: Do something with license info.
+ # The 'License' field in packaging metadata is a brief description of
+ # a license, not the full text or a file path. PEP 639 will improve on
+ # how licenses are recorded.
+ if 'file' in license_tbl:
+ if 'text' in license_tbl:
+ raise ConfigError(
+ "[project.license] should specify file or text, not both"
+ )
+ lc.referenced_files.append(license_tbl['file'])
+ elif 'text' in license_tbl:
+ pass
+ else:
+ raise ConfigError(
+ "file or text field required in [project.license] table"
+ )
+
+ if 'authors' in proj:
+ _check_type(proj, 'authors', list)
+ md_dict.update(pep621_people(proj['authors']))
+
+ if 'maintainers' in proj:
+ _check_type(proj, 'maintainers', list)
+ md_dict.update(pep621_people(proj['maintainers'], group_name='maintainer'))
+
+ if 'keywords' in proj:
+ _check_list_of_str(proj, 'keywords')
+ md_dict['keywords'] = ",".join(proj['keywords'])
+
+ if 'classifiers' in proj:
+ _check_list_of_str(proj, 'classifiers')
+ md_dict['classifiers'] = proj['classifiers']
+
+ if 'urls' in proj:
+ _check_type(proj, 'urls', dict)
+ project_urls = md_dict['project_urls'] = []
+ for label, url in sorted(proj['urls'].items()):
+ project_urls.append("{}, {}".format(label, url))
+
+ if 'entry-points' in proj:
+ _check_type(proj, 'entry-points', dict)
+ for grp in proj['entry-points'].values():
+ if not isinstance(grp, dict):
+ raise ConfigError(
+ "projects.entry-points should only contain sub-tables"
+ )
+ if not all(isinstance(k, str) for k in grp.values()):
+ raise ConfigError(
+ "[projects.entry-points.*] tables should have string values"
+ )
+ if set(proj['entry-points'].keys()) & {'console_scripts', 'gui_scripts'}:
+ raise ConfigError(
+ "Scripts should be specified in [project.scripts] or "
+ "[project.gui-scripts], not under [project.entry-points]"
+ )
+ lc.entrypoints = proj['entry-points']
+
+ if 'scripts' in proj:
+ _check_type(proj, 'scripts', dict)
+ if not all(isinstance(k, str) for k in proj['scripts'].values()):
+ raise ConfigError(
+ "[projects.scripts] table should have string values"
+ )
+ lc.entrypoints['console_scripts'] = proj['scripts']
+
+ if 'gui-scripts' in proj:
+ _check_type(proj, 'gui-scripts', dict)
+ if not all(isinstance(k, str) for k in proj['gui-scripts'].values()):
+ raise ConfigError(
+ "[projects.gui-scripts] table should have string values"
+ )
+ lc.entrypoints['gui_scripts'] = proj['gui-scripts']
+
+ if 'dependencies' in proj:
+ _check_list_of_str(proj, 'dependencies')
+ reqs_noextra = proj['dependencies']
+ else:
+ reqs_noextra = []
+
+ if 'optional-dependencies' in proj:
+ _check_type(proj, 'optional-dependencies', dict)
+ optdeps = proj['optional-dependencies']
+ if not all(isinstance(e, list) for e in optdeps.values()):
+ raise ConfigError(
+ 'Expected a dict of lists in optional-dependencies field'
+ )
+ for e, reqs in optdeps.items():
+ if not all(isinstance(a, str) for a in reqs):
+ raise ConfigError(
+ 'Expected a string list for optional-dependencies ({})'.format(e)
+ )
+
+ lc.reqs_by_extra = optdeps.copy()
+ md_dict['provides_extra'] = sorted(lc.reqs_by_extra.keys())
+
+ md_dict['requires_dist'] = \
+ reqs_noextra + list(_expand_requires_extra(lc.reqs_by_extra))
+
+ # For internal use, record the main requirements as a '.none' extra.
+ if reqs_noextra:
+ lc.reqs_by_extra['.none'] = reqs_noextra
+
+ if 'dynamic' in proj:
+ _check_list_of_str(proj, 'dynamic')
+ dynamic = set(proj['dynamic'])
+ unrec_dynamic = dynamic - {'version', 'description'}
+ if unrec_dynamic:
+ raise ConfigError(
+ "flit only supports dynamic metadata for 'version' & 'description'"
+ )
+ if dynamic.intersection(proj):
+ raise ConfigError(
+ "keys listed in project.dynamic must not be in [project] table"
+ )
+ lc.dynamic_metadata = dynamic
+
+ if ('version' not in proj) and ('version' not in lc.dynamic_metadata):
+ raise ConfigError(
+ "version must be specified under [project] or listed as a dynamic field"
+ )
+ if ('description' not in proj) and ('description' not in lc.dynamic_metadata):
+ raise ConfigError(
+ "description must be specified under [project] or listed as a dynamic field"
+ )
+
+ return lc
+
+def pep621_people(people, group_name='author') -> dict:
+ """Convert authors/maintainers from PEP 621 to core metadata fields"""
+ names, emails = [], []
+ for person in people:
+ if not isinstance(person, dict):
+ raise ConfigError("{} info must be list of dicts".format(group_name))
+ unrec_keys = set(person.keys()) - {'name', 'email'}
+ if unrec_keys:
+ raise ConfigError(
+ "Unrecognised keys in {} info: {}".format(group_name, unrec_keys)
+ )
+ if 'email' in person:
+ email = person['email']
+ if 'name' in person:
+ email = str(Address(person['name'], addr_spec=email))
+ emails.append(email)
+ elif 'name' in person:
+ names.append(person['name'])
+
+ res = {}
+ if names:
+ res[group_name] = ", ".join(names)
+ if emails:
+ res[group_name + '_email'] = ", ".join(emails)
+ return res
diff --git a/flit_core/flit_core/sdist.py b/flit_core/flit_core/sdist.py
new file mode 100644
index 0000000..f41d177
--- /dev/null
+++ b/flit_core/flit_core/sdist.py
@@ -0,0 +1,202 @@
+from collections import defaultdict
+from copy import copy
+from glob import glob
+from gzip import GzipFile
+import io
+import logging
+import os
+import os.path as osp
+from pathlib import Path
+from posixpath import join as pjoin
+import tarfile
+
+from . import common
+
+log = logging.getLogger(__name__)
+
+
+def clean_tarinfo(ti, mtime=None):
+ """Clean metadata from a TarInfo object to make it more reproducible.
+
+ - Set uid & gid to 0
+ - Set uname and gname to ""
+ - Normalise permissions to 644 or 755
+ - Set mtime if not None
+ """
+ ti = copy(ti)
+ ti.uid = 0
+ ti.gid = 0
+ ti.uname = ''
+ ti.gname = ''
+ ti.mode = common.normalize_file_permissions(ti.mode)
+ if mtime is not None:
+ ti.mtime = mtime
+ return ti
+
+
+class FilePatterns:
+ """Manage a set of file inclusion/exclusion patterns relative to basedir"""
+ def __init__(self, patterns, basedir):
+ self.basedir = basedir
+
+ self.dirs = set()
+ self.files = set()
+
+ for pattern in patterns:
+ for path in sorted(glob(osp.join(basedir, pattern), recursive=True)):
+ rel = osp.relpath(path, basedir)
+ if osp.isdir(path):
+ self.dirs.add(rel)
+ else:
+ self.files.add(rel)
+
+ def match_file(self, rel_path):
+ if rel_path in self.files:
+ return True
+
+ return any(rel_path.startswith(d + os.sep) for d in self.dirs)
+
+ def match_dir(self, rel_path):
+ if rel_path in self.dirs:
+ return True
+
+ # Check if it's a subdirectory of any directory in the list
+ return any(rel_path.startswith(d + os.sep) for d in self.dirs)
+
+
+class SdistBuilder:
+ """Builds a minimal sdist
+
+ These minimal sdists should work for PEP 517.
+ The class is extended in flit.sdist to make a more 'full fat' sdist,
+ which is what should normally be published to PyPI.
+ """
+ def __init__(self, module, metadata, cfgdir, reqs_by_extra, entrypoints,
+ extra_files, data_directory, include_patterns=(), exclude_patterns=()):
+ self.module = module
+ self.metadata = metadata
+ self.cfgdir = cfgdir
+ self.reqs_by_extra = reqs_by_extra
+ self.entrypoints = entrypoints
+ self.extra_files = extra_files
+ self.data_directory = data_directory
+ self.includes = FilePatterns(include_patterns, str(cfgdir))
+ self.excludes = FilePatterns(exclude_patterns, str(cfgdir))
+
+ @classmethod
+ def from_ini_path(cls, ini_path: Path):
+ # Local import so bootstrapping doesn't try to load toml
+ from .config import read_flit_config
+ ini_info = read_flit_config(ini_path)
+ srcdir = ini_path.parent
+ module = common.Module(ini_info.module, srcdir)
+ metadata = common.make_metadata(module, ini_info)
+ extra_files = [ini_path.name] + ini_info.referenced_files
+ return cls(
+ module, metadata, srcdir, ini_info.reqs_by_extra,
+ ini_info.entrypoints, extra_files, ini_info.data_directory,
+ ini_info.sdist_include_patterns, ini_info.sdist_exclude_patterns,
+ )
+
+ def prep_entry_points(self):
+ # Reformat entry points from dict-of-dicts to dict-of-lists
+ res = defaultdict(list)
+ for groupname, group in self.entrypoints.items():
+ for name, ep in sorted(group.items()):
+ res[groupname].append('{} = {}'.format(name, ep))
+
+ return dict(res)
+
+ def select_files(self):
+ """Pick which files from the source tree will be included in the sdist
+
+ This is overridden in flit itself to use information from a VCS to
+ include tests, docs, etc. for a 'gold standard' sdist.
+ """
+ cfgdir_s = str(self.cfgdir)
+ return [
+ osp.relpath(p, cfgdir_s) for p in self.module.iter_files()
+ ] + [
+ osp.relpath(p, cfgdir_s) for p in common.walk_data_dir(self.data_directory)
+ ] + self.extra_files
+
+ def apply_includes_excludes(self, files):
+ cfgdir_s = str(self.cfgdir)
+ files = {f for f in files if not self.excludes.match_file(f)}
+
+ for f_rel in self.includes.files:
+ if not self.excludes.match_file(f_rel):
+ files.add(f_rel)
+
+ for rel_d in self.includes.dirs:
+ for dirpath, dirs, dfiles in os.walk(osp.join(cfgdir_s, rel_d)):
+ for file in dfiles:
+ f_abs = osp.join(dirpath, file)
+ f_rel = osp.relpath(f_abs, cfgdir_s)
+ if not self.excludes.match_file(f_rel):
+ files.add(f_rel)
+
+ # Filter subdirectories before os.walk scans them
+ dirs[:] = [d for d in dirs if not self.excludes.match_dir(
+ osp.relpath(osp.join(dirpath, d), cfgdir_s)
+ )]
+
+ crucial_files = set(
+ self.extra_files + [str(self.module.file.relative_to(self.cfgdir))]
+ )
+ missing_crucial = crucial_files - files
+ if missing_crucial:
+ raise Exception("Crucial files were excluded from the sdist: {}"
+ .format(", ".join(missing_crucial)))
+
+ return sorted(files)
+
+ def add_setup_py(self, files_to_add, target_tarfile):
+ """No-op here; overridden in flit to generate setup.py"""
+ pass
+
+ @property
+ def dir_name(self):
+ return '{}-{}'.format(self.metadata.name, self.metadata.version)
+
+ def build(self, target_dir, gen_setup_py=True):
+ os.makedirs(str(target_dir), exist_ok=True)
+ target = target_dir / '{}-{}.tar.gz'.format(
+ self.metadata.name, self.metadata.version
+ )
+ source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH', '')
+ mtime = int(source_date_epoch) if source_date_epoch else None
+ gz = GzipFile(str(target), mode='wb', mtime=mtime)
+ tf = tarfile.TarFile(str(target), mode='w', fileobj=gz,
+ format=tarfile.PAX_FORMAT)
+
+ try:
+ files_to_add = self.apply_includes_excludes(self.select_files())
+
+ for relpath in files_to_add:
+ path = str(self.cfgdir / relpath)
+ ti = tf.gettarinfo(path, arcname=pjoin(self.dir_name, relpath))
+ ti = clean_tarinfo(ti, mtime)
+
+ if ti.isreg():
+ with open(path, 'rb') as f:
+ tf.addfile(ti, f)
+ else:
+ tf.addfile(ti) # Symlinks & ?
+
+ if gen_setup_py:
+ self.add_setup_py(files_to_add, tf)
+
+ stream = io.StringIO()
+ self.metadata.write_metadata_file(stream)
+ pkg_info = stream.getvalue().encode()
+ ti = tarfile.TarInfo(pjoin(self.dir_name, 'PKG-INFO'))
+ ti.size = len(pkg_info)
+ tf.addfile(ti, io.BytesIO(pkg_info))
+
+ finally:
+ tf.close()
+ gz.close()
+
+ log.info("Built sdist: %s", target)
+ return target
diff --git a/flit_core/flit_core/tests/__init__.py b/flit_core/flit_core/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/__init__.py
diff --git a/flit_core/flit_core/tests/samples/EG_README.rst b/flit_core/flit_core/tests/samples/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/flit_core/flit_core/tests/samples/bad-description-ext.toml b/flit_core/flit_core/tests/samples/bad-description-ext.toml
new file mode 100644
index 0000000..1062829
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/bad-description-ext.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "module1.py" # WRONG
diff --git a/flit_core/flit_core/tests/samples/conflicting_modules/module1.py b/flit_core/flit_core/tests/samples/conflicting_modules/module1.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/conflicting_modules/module1.py
diff --git a/flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml b/flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml
new file mode 100644
index 0000000..a38df52
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/conflicting_modules/pyproject.toml
@@ -0,0 +1,8 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
diff --git a/flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py b/flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/conflicting_modules/src/module1.py
diff --git a/flit_core/flit_core/tests/samples/constructed_version/module1.py b/flit_core/flit_core/tests/samples/constructed_version/module1.py
new file mode 100644
index 0000000..5d9ec93
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/constructed_version/module1.py
@@ -0,0 +1,4 @@
+
+"""This module has a __version__ that requires runtime interpretation"""
+
+__version__ = ".".join(["1", "2", "3"])
diff --git a/flit_core/flit_core/tests/samples/constructed_version/pyproject.toml b/flit_core/flit_core/tests/samples/constructed_version/pyproject.toml
new file mode 100644
index 0000000..812b74f
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/constructed_version/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = [
+ "numpy >=1.16.0",
+]
diff --git a/flit_core/flit_core/tests/samples/extras-dev-conflict.toml b/flit_core/flit_core/tests/samples/extras-dev-conflict.toml
new file mode 100644
index 0000000..0fe249d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/extras-dev-conflict.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+dev-requires = ["apackage"]
+
+[tool.flit.metadata.requires-extra]
+dev = ["anotherpackage"]
diff --git a/flit_core/flit_core/tests/samples/extras.toml b/flit_core/flit_core/tests/samples/extras.toml
new file mode 100644
index 0000000..afdb221
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/extras.toml
@@ -0,0 +1,15 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+requires = ["toml"]
+
+[tool.flit.metadata.requires-extra]
+test = ["pytest"]
+custom = ["requests"]
diff --git a/flit_core/flit_core/tests/samples/imported_version/package1/__init__.py b/flit_core/flit_core/tests/samples/imported_version/package1/__init__.py
new file mode 100644
index 0000000..49adc42
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/imported_version/package1/__init__.py
@@ -0,0 +1,3 @@
+"""This module has a __version__ that requires a relative import"""
+
+from ._version import __version__
diff --git a/flit_core/flit_core/tests/samples/imported_version/package1/_version.py b/flit_core/flit_core/tests/samples/imported_version/package1/_version.py
new file mode 100644
index 0000000..91201fc
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/imported_version/package1/_version.py
@@ -0,0 +1 @@
+__version__ = '0.5.8'
diff --git a/flit_core/flit_core/tests/samples/imported_version/pyproject.toml b/flit_core/flit_core/tests/samples/imported_version/pyproject.toml
new file mode 100644
index 0000000..b6d44e1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/imported_version/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "package1"
+authors = [
+ {name = "Sir Röbin", email = "robin@camelot.uk"}
+]
+dynamic = ["version", "description"]
diff --git a/flit_core/flit_core/tests/samples/inclusion/LICENSES/README b/flit_core/flit_core/tests/samples/inclusion/LICENSES/README
new file mode 100644
index 0000000..63de856
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/LICENSES/README
@@ -0,0 +1,2 @@
+This directory will match the LICENSE* glob which Flit uses to add license
+files to wheel metadata.
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/subsubdir/test.md
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt
new file mode 100644
index 0000000..5f852b1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/subdir/test.txt
@@ -0,0 +1 @@
+sdists should include this (see pyproject.toml)
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/test.rst b/flit_core/flit_core/tests/samples/inclusion/doc/test.rst
new file mode 100644
index 0000000..5f852b1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/test.rst
@@ -0,0 +1 @@
+sdists should include this (see pyproject.toml)
diff --git a/flit_core/flit_core/tests/samples/inclusion/doc/test.txt b/flit_core/flit_core/tests/samples/inclusion/doc/test.txt
new file mode 100644
index 0000000..31dc06a
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/doc/test.txt
@@ -0,0 +1 @@
+sdists should exclude this (see pyproject.toml)
diff --git a/flit_core/flit_core/tests/samples/inclusion/module1.py b/flit_core/flit_core/tests/samples/inclusion/module1.py
new file mode 100644
index 0000000..7e0d3cb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/module1.py
@@ -0,0 +1,3 @@
+"""For tests"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/inclusion/pyproject.toml b/flit_core/flit_core/tests/samples/inclusion/pyproject.toml
new file mode 100644
index 0000000..c37d44d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/inclusion/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+build-backend = "flit.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+
+[tool.flit.sdist]
+include = ["doc"]
+exclude = ["doc/*.txt", "doc/**/*.md"]
diff --git a/flit_core/flit_core/tests/samples/invalid_version1.py b/flit_core/flit_core/tests/samples/invalid_version1.py
new file mode 100644
index 0000000..dd3268a
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/invalid_version1.py
@@ -0,0 +1,3 @@
+"""Sample module with invalid __version__ string"""
+
+__version__ = "not starting with a number" \ No newline at end of file
diff --git a/flit_core/flit_core/tests/samples/missing-description-file.toml b/flit_core/flit_core/tests/samples/missing-description-file.toml
new file mode 100644
index 0000000..00fae72
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/missing-description-file.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "missingdescriptionfile"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/missingdescriptionfile"
+description-file = "definitely-missing.rst"
diff --git a/flit_core/flit_core/tests/samples/misspelled-key.toml b/flit_core/flit_core/tests/samples/misspelled-key.toml
new file mode 100644
index 0000000..cbde9ac
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/misspelled-key.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+descryption-file = "my-description.rst" # Deliberate typo for test
+home-page = "http://github.com/sirrobin/package1"
diff --git a/flit_core/flit_core/tests/samples/module1-pkg.ini b/flit_core/flit_core/tests/samples/module1-pkg.ini
new file mode 100644
index 0000000..9bbfc4e
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module1-pkg.ini
@@ -0,0 +1,5 @@
+[metadata]
+module=module1
+author=Sir Robin
+author-email=robin@camelot.uk
+home-page=http://github.com/sirrobin/module1
diff --git a/flit_core/flit_core/tests/samples/module1-pkg.toml b/flit_core/flit_core/tests/samples/module1-pkg.toml
new file mode 100644
index 0000000..740ec87
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module1-pkg.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.urls]
+Documentation = "https://example.com/module1"
diff --git a/flit_core/flit_core/tests/samples/module1.py b/flit_core/flit_core/tests/samples/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/module2.py b/flit_core/flit_core/tests/samples/module2.py
new file mode 100644
index 0000000..0f36679
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/module2.py
@@ -0,0 +1,10 @@
+"""
+Docstring formatted like this.
+"""
+
+a = {}
+# An assignment to a subscript (a['test']) broke introspection
+# https://github.com/pypa/flit/issues/343
+a['test'] = 6
+
+__version__ = '7.0'
diff --git a/flit_core/flit_core/tests/samples/moduleunimportable.py b/flit_core/flit_core/tests/samples/moduleunimportable.py
new file mode 100644
index 0000000..147d26e
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/moduleunimportable.py
@@ -0,0 +1,8 @@
+
+"""
+A sample unimportable module
+"""
+
+raise ImportError()
+
+__version__ = "0.1"
diff --git a/flit_core/flit_core/tests/samples/moduleunimportabledouble.py b/flit_core/flit_core/tests/samples/moduleunimportabledouble.py
new file mode 100644
index 0000000..42d51f3
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/moduleunimportabledouble.py
@@ -0,0 +1,8 @@
+
+"""
+A sample unimportable module with double assignment
+"""
+
+raise ImportError()
+
+VERSION = __version__ = "0.1"
diff --git a/flit_core/flit_core/tests/samples/my-description.rst b/flit_core/flit_core/tests/samples/my-description.rst
new file mode 100644
index 0000000..623cb1d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/my-description.rst
@@ -0,0 +1 @@
+Sample description for test.
diff --git a/flit_core/flit_core/tests/samples/no_docstring-pkg.toml b/flit_core/flit_core/tests/samples/no_docstring-pkg.toml
new file mode 100644
index 0000000..b68827f
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/no_docstring-pkg.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "no_docstring"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/no_docstring"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.urls]
+Documentation = "https://example.com/no_docstring"
diff --git a/flit_core/flit_core/tests/samples/no_docstring.py b/flit_core/flit_core/tests/samples/no_docstring.py
new file mode 100644
index 0000000..29524eb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/no_docstring.py
@@ -0,0 +1 @@
+__version__ = '7.0'
diff --git a/flit_core/flit_core/tests/samples/normalization/my_python_module.py b/flit_core/flit_core/tests/samples/normalization/my_python_module.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/normalization/my_python_module.py
diff --git a/flit_core/flit_core/tests/samples/normalization/pyproject.toml b/flit_core/flit_core/tests/samples/normalization/pyproject.toml
new file mode 100644
index 0000000..c32e4a1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/normalization/pyproject.toml
@@ -0,0 +1,14 @@
+[build-system]
+requires = ["flit_core >=3.8,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "my-python-module"
+version = "0.0.1"
+description = "Hyphenated package name, infered import name"
+authors = [
+ {name = "Sir Robin", email = "robin@camelot.uk"}
+]
+
+[project.urls]
+homepage = "http://github.com/me/python-module"
diff --git a/flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst b/flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/ns1-pkg/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py b/flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py
new file mode 100644
index 0000000..445afbb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/ns1-pkg/ns1/pkg/__init__.py
@@ -0,0 +1,8 @@
+"""
+==================
+ns1.pkg
+==================
+"""
+
+__version__ = '0.1'
+
diff --git a/flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml b/flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml
new file mode 100644
index 0000000..acbabb1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/ns1-pkg/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=3.5,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "ns1.pkg"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
diff --git a/flit_core/flit_core/tests/samples/package1.toml b/flit_core/flit_core/tests/samples/package1.toml
new file mode 100644
index 0000000..ca12080
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+description-file = "my-description.rst"
+home-page = "http://github.com/sirrobin/package1"
+
+[scripts]
+pkg_script = "package1:main"
diff --git a/flit_core/flit_core/tests/samples/package1/__init__.py b/flit_core/flit_core/tests/samples/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/flit_core/flit_core/tests/samples/package1/data_dir/foo.sh b/flit_core/flit_core/tests/samples/package1/data_dir/foo.sh
new file mode 100644
index 0000000..92abcfb
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/data_dir/foo.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo "Example data file"
diff --git a/flit_core/flit_core/tests/samples/package1/foo.py b/flit_core/flit_core/tests/samples/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/flit_core/flit_core/tests/samples/package1/subpkg/__init__.py b/flit_core/flit_core/tests/samples/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/subpkg/__init__.py
diff --git a/flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json b/flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json
new file mode 100644
index 0000000..f77d03c
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/subpkg/sp_data_dir/test.json
@@ -0,0 +1 @@
+{"example": true}
diff --git a/flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py b/flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/package1/subpkg2/__init__.py
diff --git a/flit_core/flit_core/tests/samples/pep517/LICENSE b/flit_core/flit_core/tests/samples/pep517/LICENSE
new file mode 100644
index 0000000..7f5c194
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/LICENSE
@@ -0,0 +1 @@
+This file should be added to wheels
diff --git a/flit_core/flit_core/tests/samples/pep517/README.rst b/flit_core/flit_core/tests/samples/pep517/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/pep517/module1.py b/flit_core/flit_core/tests/samples/pep517/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/pep517/pyproject.toml b/flit_core/flit_core/tests/samples/pep517/pyproject.toml
new file mode 100644
index 0000000..b6cebac
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep517/pyproject.toml
@@ -0,0 +1,17 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "README.rst"
+requires = [
+ "requests >= 2.18",
+ "docutils",
+]
+
+[tool.flit.entrypoints.flit_test_example]
+foo = "module1:main"
diff --git a/flit_core/flit_core/tests/samples/pep621/LICENSE b/flit_core/flit_core/tests/samples/pep621/LICENSE
new file mode 100644
index 0000000..7f5c194
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/LICENSE
@@ -0,0 +1 @@
+This file should be added to wheels
diff --git a/flit_core/flit_core/tests/samples/pep621/README.rst b/flit_core/flit_core/tests/samples/pep621/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/pep621/module1a.py b/flit_core/flit_core/tests/samples/pep621/module1a.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/module1a.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/pep621/pyproject.toml b/flit_core/flit_core/tests/samples/pep621/pyproject.toml
new file mode 100644
index 0000000..72a85d0
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621/pyproject.toml
@@ -0,0 +1,39 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "module1"
+authors = [
+ {name = "Sir Röbin", email = "robin@camelot.uk"}
+]
+maintainers = [
+ {name = "Sir Galahad"}
+]
+readme = "README.rst"
+license = {file = "LICENSE"}
+requires-python = ">=3.7"
+dependencies = [
+ "requests >= 2.18",
+ "docutils",
+]
+keywords = ["example", "test"]
+dynamic = [
+ "version",
+ "description",
+]
+
+[project.optional-dependencies]
+test = [
+ "pytest",
+ "mock; python_version<'3.6'"
+]
+
+[project.urls]
+homepage = "http://github.com/sirrobin/module1"
+
+[project.entry-points.flit_test_example]
+foo = "module1:main"
+
+[tool.flit.module]
+name = "module1a"
diff --git a/flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst b/flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621_nodynamic/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py b/flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621_nodynamic/module1.py
diff --git a/flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml b/flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml
new file mode 100644
index 0000000..0b579f3
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/pep621_nodynamic/pyproject.toml
@@ -0,0 +1,28 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "module1"
+version = "0.03"
+description = "Statically specified description"
+authors = [
+ {name = "Sir Robin", email = "robin@camelot.uk"}
+]
+readme = {file = "README.rst", content-type = "text/x-rst"}
+classifiers = [
+ "Topic :: Internet :: WWW/HTTP",
+]
+dependencies = [
+ "requests >= 2.18",
+ "docutils",
+] # N.B. Using this to check behaviour with dependencies but no optional deps
+
+[project.urls]
+homepage = "http://github.com/sirrobin/module1"
+
+[project.scripts]
+foo = "module1:main"
+
+[project.gui-scripts]
+foo-gui = "module1:main"
diff --git a/flit_core/flit_core/tests/samples/requires-dev.toml b/flit_core/flit_core/tests/samples/requires-dev.toml
new file mode 100644
index 0000000..46e3170
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-dev.toml
@@ -0,0 +1,11 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+# This should generate a warning tell you to use requires-extra.dev
+dev-requires = ["apackage"]
diff --git a/flit_core/flit_core/tests/samples/requires-envmark.toml b/flit_core/flit_core/tests/samples/requires-envmark.toml
new file mode 100644
index 0000000..e97c5f0
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-envmark.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = [
+ "requests",
+ "pathlib2; python_version == '2.7'",
+]
diff --git a/flit_core/flit_core/tests/samples/requires-extra-envmark.toml b/flit_core/flit_core/tests/samples/requires-extra-envmark.toml
new file mode 100644
index 0000000..ac49cb0
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-extra-envmark.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.requires-extra]
+test = ["pathlib2; python_version == \"2.7\""]
diff --git a/flit_core/flit_core/tests/samples/requires-requests.toml b/flit_core/flit_core/tests/samples/requires-requests.toml
new file mode 100644
index 0000000..bf26ac5
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/requires-requests.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+requires = ["requests"]
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/LICENSE b/flit_core/flit_core/tests/samples/with_data_dir/LICENSE
new file mode 100644
index 0000000..7f5c194
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/LICENSE
@@ -0,0 +1 @@
+This file should be added to wheels
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/README.rst b/flit_core/flit_core/tests/samples/with_data_dir/README.rst
new file mode 100644
index 0000000..ef7b7c1
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/README.rst
@@ -0,0 +1 @@
+This contains a nön-ascii character
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.1 b/flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.1
new file mode 100644
index 0000000..c12128d
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/data/share/man/man1/foo.1
@@ -0,0 +1 @@
+Example data file
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/module1.py b/flit_core/flit_core/tests/samples/with_data_dir/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml b/flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml
new file mode 100644
index 0000000..84d165e
--- /dev/null
+++ b/flit_core/flit_core/tests/samples/with_data_dir/pyproject.toml
@@ -0,0 +1,26 @@
+[build-system]
+requires = ["flit_core >=3.2,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "module1"
+authors = [
+ {name = "Sir Röbin", email = "robin@camelot.uk"}
+]
+readme = "README.rst"
+license = {file = "LICENSE"}
+requires-python = ">=3.7"
+dependencies = [
+ "requests >= 2.18",
+ "docutils",
+]
+dynamic = [
+ "version",
+ "description",
+]
+
+[project.scripts]
+foo = "module1:main"
+
+[tool.flit.external-data]
+directory = "data"
diff --git a/flit_core/flit_core/tests/test_build_thyself.py b/flit_core/flit_core/tests/test_build_thyself.py
new file mode 100644
index 0000000..ad15819
--- /dev/null
+++ b/flit_core/flit_core/tests/test_build_thyself.py
@@ -0,0 +1,57 @@
+"""Tests of flit_core building itself"""
+import os
+import os.path as osp
+import pytest
+import tarfile
+from testpath import assert_isdir, assert_isfile
+import zipfile
+
+from flit_core import buildapi
+
+@pytest.fixture()
+def cwd_project():
+ proj_dir = osp.dirname(osp.dirname(osp.abspath(buildapi.__file__)))
+ if not osp.isfile(osp.join(proj_dir, 'pyproject.toml')):
+ pytest.skip("need flit_core source directory")
+
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(proj_dir)
+ yield
+ finally:
+ os.chdir(old_cwd)
+
+
+def test_prepare_metadata(tmp_path, cwd_project):
+ tmp_path = str(tmp_path)
+ dist_info = buildapi.prepare_metadata_for_build_wheel(tmp_path)
+
+ assert dist_info.endswith('.dist-info')
+ assert dist_info.startswith('flit_core')
+ dist_info = osp.join(tmp_path, dist_info)
+ assert_isdir(dist_info)
+
+ assert_isfile(osp.join(dist_info, 'WHEEL'))
+ assert_isfile(osp.join(dist_info, 'METADATA'))
+
+
+def test_wheel(tmp_path, cwd_project):
+ tmp_path = str(tmp_path)
+ filename = buildapi.build_wheel(tmp_path)
+
+ assert filename.endswith('.whl')
+ assert filename.startswith('flit_core')
+ path = osp.join(tmp_path, filename)
+ assert_isfile(path)
+ assert zipfile.is_zipfile(path)
+
+
+def test_sdist(tmp_path, cwd_project):
+ tmp_path = str(tmp_path)
+ filename = buildapi.build_sdist(tmp_path)
+
+ assert filename.endswith('.tar.gz')
+ assert filename.startswith('flit_core')
+ path = osp.join(tmp_path, filename)
+ assert_isfile(path)
+ assert tarfile.is_tarfile(path)
diff --git a/flit_core/flit_core/tests/test_buildapi.py b/flit_core/flit_core/tests/test_buildapi.py
new file mode 100644
index 0000000..3e621e6
--- /dev/null
+++ b/flit_core/flit_core/tests/test_buildapi.py
@@ -0,0 +1,93 @@
+from contextlib import contextmanager
+import os
+import os.path as osp
+import tarfile
+from testpath import assert_isfile, assert_isdir
+from testpath.tempdir import TemporaryDirectory
+import zipfile
+
+from flit_core import buildapi
+
+samples_dir = osp.join(osp.dirname(__file__), 'samples')
+
+@contextmanager
+def cwd(directory):
+ prev = os.getcwd()
+ os.chdir(directory)
+ try:
+ yield
+ finally:
+ os.chdir(prev)
+
+def test_get_build_requires():
+ # This module can be inspected (for docstring & __version__) without
+ # importing it, so there are no build dependencies.
+ with cwd(osp.join(samples_dir,'pep517')):
+ assert buildapi.get_requires_for_build_wheel() == []
+ assert buildapi.get_requires_for_build_editable() == []
+ assert buildapi.get_requires_for_build_sdist() == []
+
+def test_get_build_requires_pep621_nodynamic():
+ # This module isn't inspected because version & description are specified
+ # as static metadata in pyproject.toml, so there are no build dependencies
+ with cwd(osp.join(samples_dir, 'pep621_nodynamic')):
+ assert buildapi.get_requires_for_build_wheel() == []
+ assert buildapi.get_requires_for_build_editable() == []
+ assert buildapi.get_requires_for_build_sdist() == []
+
+def test_get_build_requires_import():
+ # This one has to be imported, so its runtime dependencies are also
+ # build dependencies.
+ expected = ["numpy >=1.16.0"]
+ with cwd(osp.join(samples_dir, 'constructed_version')):
+ assert buildapi.get_requires_for_build_wheel() == expected
+ assert buildapi.get_requires_for_build_editable() == expected
+ assert buildapi.get_requires_for_build_sdist() == expected
+
+def test_build_wheel():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ filename = buildapi.build_wheel(td)
+ assert filename.endswith('.whl'), filename
+ assert_isfile(osp.join(td, filename))
+ assert zipfile.is_zipfile(osp.join(td, filename))
+ with zipfile.ZipFile(osp.join(td, filename)) as zip:
+ assert "module1.py" in zip.namelist()
+ assert "module1.pth" not in zip.namelist()
+
+def test_build_wheel_pep621():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir, 'pep621')):
+ filename = buildapi.build_wheel(td)
+ assert filename.endswith('.whl'), filename
+ assert_isfile(osp.join(td, filename))
+ assert zipfile.is_zipfile(osp.join(td, filename))
+
+def test_build_editable():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ filename = buildapi.build_editable(td)
+ assert filename.endswith('.whl'), filename
+ assert_isfile(osp.join(td, filename))
+ assert zipfile.is_zipfile(osp.join(td, filename))
+ with zipfile.ZipFile(osp.join(td, filename)) as zip:
+ assert "module1.py" not in zip.namelist()
+ assert "module1.pth" in zip.namelist()
+
+def test_build_sdist():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ filename = buildapi.build_sdist(td)
+ assert filename.endswith('.tar.gz'), filename
+ assert_isfile(osp.join(td, filename))
+ assert tarfile.is_tarfile(osp.join(td, filename))
+
+def test_prepare_metadata_for_build_wheel():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ dirname = buildapi.prepare_metadata_for_build_wheel(td)
+ assert dirname.endswith('.dist-info'), dirname
+ assert_isdir(osp.join(td, dirname))
+ assert_isfile(osp.join(td, dirname, 'METADATA'))
+
+def test_prepare_metadata_for_build_editable():
+ with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
+ dirname = buildapi.prepare_metadata_for_build_editable(td)
+ assert dirname.endswith('.dist-info'), dirname
+ assert_isdir(osp.join(td, dirname))
+ assert_isfile(osp.join(td, dirname, 'METADATA'))
diff --git a/flit_core/flit_core/tests/test_common.py b/flit_core/flit_core/tests/test_common.py
new file mode 100644
index 0000000..b6d6290
--- /dev/null
+++ b/flit_core/flit_core/tests/test_common.py
@@ -0,0 +1,158 @@
+import email.parser
+import email.policy
+from io import StringIO
+from pathlib import Path
+import pytest
+from unittest import TestCase
+
+from flit_core import config
+from flit_core.common import (
+ Module, get_info_from_module, InvalidVersion, NoVersionError, check_version,
+ normalize_file_permissions, Metadata, make_metadata,
+)
+
+samples_dir = Path(__file__).parent / 'samples'
+
+class ModuleTests(TestCase):
+ def test_ns_package_importable(self):
+ i = Module('ns1.pkg', samples_dir / 'ns1-pkg')
+ assert i.path == Path(samples_dir, 'ns1-pkg', 'ns1', 'pkg')
+ assert i.file == Path(samples_dir, 'ns1-pkg', 'ns1', 'pkg', '__init__.py')
+ assert i.is_package
+
+ assert i.in_namespace_package
+ assert i.namespace_package_name == 'ns1'
+
+ def test_package_importable(self):
+ i = Module('package1', samples_dir)
+ assert i.path == samples_dir / 'package1'
+ assert i.file == samples_dir / 'package1' / '__init__.py'
+ assert i.is_package
+
+ def test_module_importable(self):
+ i = Module('module1', samples_dir)
+ assert i.path == samples_dir / 'module1.py'
+ assert not i.is_package
+
+ def test_missing_name(self):
+ with self.assertRaises(ValueError):
+ i = Module('doesnt_exist', samples_dir)
+
+ def test_conflicting_modules(self):
+ with pytest.raises(ValueError, match="Multiple"):
+ Module('module1', samples_dir / 'conflicting_modules')
+
+ def test_get_info_from_module(self):
+ info = get_info_from_module(Module('module1', samples_dir))
+ self.assertEqual(info, {'summary': 'Example module',
+ 'version': '0.1'}
+ )
+
+ info = get_info_from_module(Module('module2', samples_dir))
+ self.assertEqual(info, {'summary': 'Docstring formatted like this.',
+ 'version': '7.0'}
+ )
+
+ pkg1 = Module('package1', samples_dir)
+ info = get_info_from_module(pkg1)
+ self.assertEqual(info, {'summary': 'A sample package',
+ 'version': '0.1'}
+ )
+ info = get_info_from_module(pkg1, for_fields=['version'])
+ self.assertEqual(info, {'version': '0.1'})
+ info = get_info_from_module(pkg1, for_fields=['description'])
+ self.assertEqual(info, {'summary': 'A sample package'})
+ info = get_info_from_module(pkg1, for_fields=[])
+ self.assertEqual(info, {})
+
+ info = get_info_from_module(Module('moduleunimportable', samples_dir))
+ self.assertEqual(info, {'summary': 'A sample unimportable module',
+ 'version': '0.1'}
+ )
+
+ info = get_info_from_module(Module('moduleunimportabledouble', samples_dir))
+ self.assertEqual(info, {'summary': 'A sample unimportable module with double assignment',
+ 'version': '0.1'}
+ )
+
+ info = get_info_from_module(Module('module1', samples_dir / 'constructed_version'))
+ self.assertEqual(info, {'summary': 'This module has a __version__ that requires runtime interpretation',
+ 'version': '1.2.3'}
+ )
+
+ info = get_info_from_module(Module('package1', samples_dir / 'imported_version'))
+ self.assertEqual(info, {'summary': 'This module has a __version__ that requires a relative import',
+ 'version': '0.5.8'}
+ )
+
+ with self.assertRaises(InvalidVersion):
+ get_info_from_module(Module('invalid_version1', samples_dir))
+
+ def test_version_raise(self):
+ with pytest.raises(InvalidVersion):
+ check_version('a.1.0.beta0')
+
+ with pytest.raises(InvalidVersion):
+ check_version('3!')
+
+ with pytest.raises(InvalidVersion):
+ check_version((1, 2))
+
+ with pytest.raises(NoVersionError):
+ check_version(None)
+
+ assert check_version('4.1.0beta1') == '4.1.0b1'
+ assert check_version('v1.2') == '1.2'
+
+def test_normalize_file_permissions():
+ assert normalize_file_permissions(0o100664) == 0o100644 # regular file
+ assert normalize_file_permissions(0o40775) == 0o40755 # directory
+
+@pytest.mark.parametrize(
+ ("requires_python", "expected_result"),
+ [
+ ("", True),
+ (">2.7", True),
+ ("3", False),
+ (">= 3.7", False),
+ ("<4, > 3.2", False),
+ (">3.4", False),
+ (">=2.7, !=3.0.*, !=3.1.*, !=3.2.*", True),
+ ("== 3.9", False),
+ ("~=2.7", True),
+ ("~=3.9", False),
+ ],
+)
+def test_supports_py2(requires_python, expected_result):
+ metadata = object.__new__(Metadata)
+ metadata.requires_python = requires_python
+ result = metadata.supports_py2
+ assert result == expected_result
+
+def test_make_metadata():
+ project_dir = samples_dir / 'pep621_nodynamic'
+ ini_info = config.read_flit_config(project_dir / 'pyproject.toml')
+ module = Module(ini_info.module, project_dir)
+ print(module.file)
+ md = make_metadata(module, ini_info)
+ assert md.version == '0.3'
+ assert md.summary == "Statically specified description"
+
+def test_metadata_multiline(tmp_path):
+ d = {
+ 'name': 'foo',
+ 'version': '1.0',
+ # Example from: https://packaging.python.org/specifications/core-metadata/#author
+ 'author': ('C. Schultz, Universal Features Syndicate\n'
+ 'Los Angeles, CA <cschultz@peanuts.example.com>'),
+ }
+ md = Metadata(d)
+ sio = StringIO()
+ md.write_metadata_file(sio)
+ sio.seek(0)
+
+ msg = email.parser.Parser(policy=email.policy.compat32).parse(sio)
+ assert msg['Name'] == d['name']
+ assert msg['Version'] == d['version']
+ assert [l.lstrip() for l in msg['Author'].splitlines()] == d['author'].splitlines()
+ assert not msg.defects
diff --git a/flit_core/flit_core/tests/test_config.py b/flit_core/flit_core/tests/test_config.py
new file mode 100644
index 0000000..eafb7e9
--- /dev/null
+++ b/flit_core/flit_core/tests/test_config.py
@@ -0,0 +1,165 @@
+import logging
+from pathlib import Path
+import pytest
+
+from flit_core import config
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_flatten_entrypoints():
+ r = config.flatten_entrypoints({'a': {'b': {'c': 'd'}, 'e': {'f': {'g': 'h'}}, 'i': 'j'}})
+ assert r == {'a': {'i': 'j'}, 'a.b': {'c': 'd'}, 'a.e.f': {'g': 'h'}}
+
+def test_load_toml():
+ inf = config.read_flit_config(samples_dir / 'module1-pkg.toml')
+ assert inf.module == 'module1'
+ assert inf.metadata['home_page'] == 'http://github.com/sirrobin/module1'
+
+def test_load_toml_ns():
+ inf = config.read_flit_config(samples_dir / 'ns1-pkg' / 'pyproject.toml')
+ assert inf.module == 'ns1.pkg'
+ assert inf.metadata['home_page'] == 'http://github.com/sirrobin/module1'
+
+def test_load_normalization():
+ inf = config.read_flit_config(samples_dir / 'normalization' / 'pyproject.toml')
+ assert inf.module == 'my_python_module'
+ assert inf.metadata['name'] == 'my-python-module'
+
+def test_load_pep621():
+ inf = config.read_flit_config(samples_dir / 'pep621' / 'pyproject.toml')
+ assert inf.module == 'module1a'
+ assert inf.metadata['name'] == 'module1'
+ assert inf.metadata['description_content_type'] == 'text/x-rst'
+ # Remove all whitespace from requirements so we don't check exact format:
+ assert {r.replace(' ', '') for r in inf.metadata['requires_dist']} == {
+ 'docutils',
+ 'requests>=2.18',
+ 'pytest;extra=="test"', # from [project.optional-dependencies]
+ 'mock;extra=="test"and(python_version<\'3.6\')',
+ }
+ assert inf.metadata['author_email'] == "Sir Röbin <robin@camelot.uk>"
+ assert inf.entrypoints['flit_test_example']['foo'] == 'module1:main'
+ assert set(inf.dynamic_metadata) == {'version', 'description'}
+
+def test_load_pep621_nodynamic():
+ inf = config.read_flit_config(samples_dir / 'pep621_nodynamic' / 'pyproject.toml')
+ assert inf.module == 'module1'
+ assert inf.metadata['name'] == 'module1'
+ assert inf.metadata['version'] == '0.3'
+ assert inf.metadata['summary'] == 'Statically specified description'
+ assert set(inf.dynamic_metadata) == set()
+
+ # Filling reqs_by_extra when dependencies were specified but no optional
+ # dependencies was a bug.
+ assert inf.reqs_by_extra == {'.none': ['requests >= 2.18', 'docutils']}
+
+def test_misspelled_key():
+ with pytest.raises(config.ConfigError) as e_info:
+ config.read_flit_config(samples_dir / 'misspelled-key.toml')
+
+ assert 'description-file' in str(e_info.value)
+
+def test_description_file():
+ info = config.read_flit_config(samples_dir / 'package1.toml')
+ assert info.metadata['description'] == \
+ "Sample description for test.\n"
+ assert info.metadata['description_content_type'] == 'text/x-rst'
+
+def test_missing_description_file():
+ with pytest.raises(config.ConfigError, match=r"Description file .* does not exist"):
+ config.read_flit_config(samples_dir / 'missing-description-file.toml')
+
+def test_bad_description_extension(caplog):
+ info = config.read_flit_config(samples_dir / 'bad-description-ext.toml')
+ assert info.metadata['description_content_type'] is None
+ assert any((r.levelno == logging.WARN and "Unknown extension" in r.msg)
+ for r in caplog.records)
+
+def test_extras():
+ info = config.read_flit_config(samples_dir / 'extras.toml')
+ requires_dist = set(info.metadata['requires_dist'])
+ assert requires_dist == {
+ 'toml',
+ 'pytest ; extra == "test"',
+ 'requests ; extra == "custom"',
+ }
+ assert set(info.metadata['provides_extra']) == {'test', 'custom'}
+
+def test_extras_dev_conflict():
+ with pytest.raises(config.ConfigError, match=r'dev-requires'):
+ config.read_flit_config(samples_dir / 'extras-dev-conflict.toml')
+
+def test_extras_dev_warning(caplog):
+ info = config.read_flit_config(samples_dir / 'requires-dev.toml')
+ assert '"dev-requires = ..." is obsolete' in caplog.text
+ assert set(info.metadata['requires_dist']) == {'apackage ; extra == "dev"'}
+
+def test_requires_extra_env_marker():
+ info = config.read_flit_config(samples_dir / 'requires-extra-envmark.toml')
+ assert info.metadata['requires_dist'][0].startswith('pathlib2 ;')
+
+@pytest.mark.parametrize(('erroneous', 'match'), [
+ ({'requires-extra': None}, r'Expected a dict for requires-extra field'),
+ ({'requires-extra': dict(dev=None)}, r'Expected a dict of lists for requires-extra field'),
+ ({'requires-extra': dict(dev=[1])}, r'Expected a string list for requires-extra'),
+])
+def test_faulty_requires_extra(erroneous, match):
+ metadata = {'module': 'mymod', 'author': '', 'author-email': ''}
+ with pytest.raises(config.ConfigError, match=match):
+ config._prep_metadata(dict(metadata, **erroneous), None)
+
+@pytest.mark.parametrize(('path', 'err_match'), [
+ ('../bar', 'out of the directory'),
+ ('foo/../../bar', 'out of the directory'),
+ ('/home', 'absolute path'),
+ ('foo:bar', 'bad character'),
+])
+def test_bad_include_paths(path, err_match):
+ toml_cfg = {'tool': {'flit': {
+ 'metadata': {'module': 'xyz', 'author': 'nobody'},
+ 'sdist': {'include': [path]}
+ }}}
+
+ with pytest.raises(config.ConfigError, match=err_match):
+ config.prep_toml_config(toml_cfg, None)
+
+@pytest.mark.parametrize(('proj_bad', 'err_match'), [
+ ({'version': 1}, r'\bstr\b'),
+ ({'license': {'fromage': 2}}, '[Uu]nrecognised'),
+ ({'license': {'file': 'LICENSE', 'text': 'xyz'}}, 'both'),
+ ({'license': {}}, 'required'),
+ ({'keywords': 'foo'}, 'list'),
+ ({'keywords': ['foo', 7]}, 'strings'),
+ ({'entry-points': {'foo': 'module1:main'}}, 'entry-point.*tables'),
+ ({'entry-points': {'group': {'foo': 7}}}, 'entry-point.*string'),
+ ({'entry-points': {'gui_scripts': {'foo': 'a:b'}}}, r'\[project\.gui-scripts\]'),
+ ({'scripts': {'foo': 7}}, 'scripts.*string'),
+ ({'gui-scripts': {'foo': 7}}, 'gui-scripts.*string'),
+ ({'optional-dependencies': {'test': 'requests'}}, 'list.*optional-dep'),
+ ({'optional-dependencies': {'test': [7]}}, 'string.*optional-dep'),
+ ({'dynamic': ['classifiers']}, 'dynamic'),
+ ({'dynamic': ['version']}, r'dynamic.*\[project\]'),
+ ({'authors': ['thomas']}, r'author.*\bdict'),
+ ({'maintainers': [{'title': 'Dr'}]}, r'maintainer.*title'),
+])
+def test_bad_pep621_info(proj_bad, err_match):
+ proj = {'name': 'module1', 'version': '1.0', 'description': 'x'}
+ proj.update(proj_bad)
+ with pytest.raises(config.ConfigError, match=err_match):
+ config.read_pep621_metadata(proj, samples_dir / 'pep621')
+
+@pytest.mark.parametrize(('readme', 'err_match'), [
+ ({'file': 'README.rst'}, 'required'),
+ ({'file': 'README.rst', 'content-type': 'text/x-python'}, 'content-type'),
+ ('/opt/README.rst', 'relative'),
+ ({'file': 'README.rst', 'text': '', 'content-type': 'text/x-rst'}, 'both'),
+ ({'content-type': 'text/x-rst'}, 'required'),
+ ({'file': 'README.rst', 'content-type': 'text/x-rst', 'a': 'b'}, '[Uu]nrecognised'),
+ (5, r'readme.*string'),
+])
+def test_bad_pep621_readme(readme, err_match):
+ proj = {
+ 'name': 'module1', 'version': '1.0', 'description': 'x', 'readme': readme
+ }
+ with pytest.raises(config.ConfigError, match=err_match):
+ config.read_pep621_metadata(proj, samples_dir / 'pep621')
diff --git a/flit_core/flit_core/tests/test_sdist.py b/flit_core/flit_core/tests/test_sdist.py
new file mode 100644
index 0000000..cffea02
--- /dev/null
+++ b/flit_core/flit_core/tests/test_sdist.py
@@ -0,0 +1,61 @@
+from io import BytesIO
+import os.path as osp
+from pathlib import Path
+import tarfile
+from testpath import assert_isfile
+
+from flit_core import sdist
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_make_sdist(tmp_path):
+ # Smoke test of making a complete sdist
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1.toml')
+ builder.build(tmp_path)
+ assert_isfile(tmp_path / 'package1-0.1.tar.gz')
+
+
+def test_make_sdist_pep621(tmp_path):
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'pep621' / 'pyproject.toml')
+ path = builder.build(tmp_path)
+ assert path == tmp_path / 'module1-0.1.tar.gz'
+ assert_isfile(path)
+
+
+def test_make_sdist_pep621_nodynamic(tmp_path):
+ builder = sdist.SdistBuilder.from_ini_path(
+ samples_dir / 'pep621_nodynamic' / 'pyproject.toml'
+ )
+ path = builder.build(tmp_path)
+ assert path == tmp_path / 'module1-0.3.tar.gz'
+ assert_isfile(path)
+
+
+def test_clean_tarinfo():
+ with tarfile.open(mode='w', fileobj=BytesIO()) as tf:
+ ti = tf.gettarinfo(str(samples_dir / 'module1.py'))
+ cleaned = sdist.clean_tarinfo(ti, mtime=42)
+ assert cleaned.uid == 0
+ assert cleaned.uname == ''
+ assert cleaned.mtime == 42
+
+
+def test_include_exclude():
+ builder = sdist.SdistBuilder.from_ini_path(
+ samples_dir / 'inclusion' / 'pyproject.toml'
+ )
+ files = builder.apply_includes_excludes(builder.select_files())
+
+ assert osp.join('doc', 'test.rst') in files
+ assert osp.join('doc', 'test.txt') not in files
+ assert osp.join('doc', 'subdir', 'test.txt') in files
+ assert osp.join('doc', 'subdir', 'subsubdir', 'test.md') not in files
+
+
+def test_data_dir():
+ builder = sdist.SdistBuilder.from_ini_path(
+ samples_dir / 'with_data_dir' / 'pyproject.toml'
+ )
+ files = builder.apply_includes_excludes(builder.select_files())
+
+ assert osp.join('data', 'share', 'man', 'man1', 'foo.1') in files
diff --git a/flit_core/flit_core/tests/test_versionno.py b/flit_core/flit_core/tests/test_versionno.py
new file mode 100644
index 0000000..b02792b
--- /dev/null
+++ b/flit_core/flit_core/tests/test_versionno.py
@@ -0,0 +1,40 @@
+import pytest
+
+from flit_core.common import InvalidVersion
+from flit_core.versionno import normalise_version
+
+def test_normalise_version():
+ nv = normalise_version
+ assert nv('4.3.1') == '4.3.1'
+ assert nv('1.0b2') == '1.0b2'
+ assert nv('2!1.3') == '2!1.3'
+
+ # Prereleases
+ assert nv('1.0B2') == '1.0b2'
+ assert nv('1.0.b2') == '1.0b2'
+ assert nv('1.0beta2') == '1.0b2'
+ assert nv('1.01beta002') == '1.1b2'
+ assert nv('1.0-preview2') == '1.0rc2'
+ assert nv('1.0_c') == '1.0rc0'
+
+ # Post releases
+ assert nv('1.0post-2') == '1.0.post2'
+ assert nv('1.0post') == '1.0.post0'
+ assert nv('1.0-rev3') == '1.0.post3'
+ assert nv('1.0-2') == '1.0.post2'
+
+ # Development versions
+ assert nv('1.0dev-2') == '1.0.dev2'
+ assert nv('1.0dev') == '1.0.dev0'
+ assert nv('1.0-dev3') == '1.0.dev3'
+
+ assert nv('1.0+ubuntu-01') == '1.0+ubuntu.1'
+ assert nv('v1.3-pre2') == '1.3rc2'
+ assert nv(' 1.2.5.6\t') == '1.2.5.6'
+ assert nv('1.0-alpha3-post02+ubuntu_xenial_5') == '1.0a3.post2+ubuntu.xenial.5'
+
+ with pytest.raises(InvalidVersion):
+ nv('3!')
+
+ with pytest.raises(InvalidVersion):
+ nv('abc')
diff --git a/flit_core/flit_core/tests/test_wheel.py b/flit_core/flit_core/tests/test_wheel.py
new file mode 100644
index 0000000..310f9c6
--- /dev/null
+++ b/flit_core/flit_core/tests/test_wheel.py
@@ -0,0 +1,47 @@
+from pathlib import Path
+from zipfile import ZipFile
+
+from testpath import assert_isfile
+
+from flit_core.wheel import make_wheel_in, main
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_licenses_dir(tmp_path):
+ # Smoketest for https://github.com/pypa/flit/issues/399
+ info = make_wheel_in(samples_dir / 'inclusion' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+
+
+def test_source_date_epoch(tmp_path, monkeypatch):
+ monkeypatch.setenv('SOURCE_DATE_EPOCH', '1633007882')
+ info = make_wheel_in(samples_dir / 'pep621' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ # Minimum value for zip timestamps is 1980-1-1
+ with ZipFile(info.file, 'r') as zf:
+ assert zf.getinfo('module1a.py').date_time[:3] == (2021, 9, 30)
+
+
+def test_zero_timestamp(tmp_path, monkeypatch):
+ monkeypatch.setenv('SOURCE_DATE_EPOCH', '0')
+ info = make_wheel_in(samples_dir / 'pep621' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ # Minimum value for zip timestamps is 1980-1-1
+ with ZipFile(info.file, 'r') as zf:
+ assert zf.getinfo('module1a.py').date_time == (1980, 1, 1, 0, 0, 0)
+
+
+def test_main(tmp_path):
+ main(['--outdir', str(tmp_path), str(samples_dir / 'pep621')])
+ wheels = list(tmp_path.glob('*.whl'))
+ assert len(wheels) == 1
+ # Minimum value for zip timestamps is 1980-1-1
+ with ZipFile(wheels[0], 'r') as zf:
+ assert 'module1a.py' in zf.namelist()
+
+
+def test_data_dir(tmp_path):
+ info = make_wheel_in(samples_dir / 'with_data_dir' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ with ZipFile(info.file, 'r') as zf:
+ assert 'module1-0.1.data/data/share/man/man1/foo.1' in zf.namelist()
diff --git a/flit_core/flit_core/vendor/README b/flit_core/flit_core/vendor/README
new file mode 100644
index 0000000..32e1b00
--- /dev/null
+++ b/flit_core/flit_core/vendor/README
@@ -0,0 +1,13 @@
+flit_core bundles the 'tomli' TOML parser, to avoid a bootstrapping problem.
+tomli is packaged using Flit, so there would be a dependency cycle when building
+from source. Vendoring a copy of tomli avoids this. The code in tomli is under
+the MIT license, and the LICENSE file is in the .dist-info folder.
+
+If you want to unbundle tomli and rely on it as a separate package, you can
+replace the package with Python code doing 'from tomli import *'. You will
+probably need to work around the dependency cycle between flit_core and tomli.
+
+Bundling a TOML parser should be a special case - I don't plan on bundling
+anything else in flit_core (or depending on any other packages).
+I hope that a TOML parser will be added to the Python standard library, and then
+this bundled parser will go away.
diff --git a/flit_core/flit_core/vendor/__init__.py b/flit_core/flit_core/vendor/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flit_core/flit_core/vendor/__init__.py
diff --git a/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE
new file mode 100644
index 0000000..e859590
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2021 Taneli Hukkinen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA
new file mode 100644
index 0000000..0ddc586
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA
@@ -0,0 +1,208 @@
+Metadata-Version: 2.1
+Name: tomli
+Version: 1.2.3
+Summary: A lil' TOML parser
+Keywords: toml
+Author-email: Taneli Hukkinen <hukkin@users.noreply.github.com>
+Requires-Python: >=3.6
+Description-Content-Type: text/markdown
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: MacOS
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Typing :: Typed
+Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md
+Project-URL: Homepage, https://github.com/hukkin/tomli
+
+[![Build Status](https://github.com/hukkin/tomli/workflows/Tests/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush)
+[![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli)
+[![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli)
+
+# Tomli
+
+> A lil' TOML parser
+
+**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)*
+
+<!-- mdformat-toc start --slug=github --maxlevel=6 --minlevel=2 -->
+
+- [Intro](#intro)
+- [Installation](#installation)
+- [Usage](#usage)
+ - [Parse a TOML string](#parse-a-toml-string)
+ - [Parse a TOML file](#parse-a-toml-file)
+ - [Handle invalid TOML](#handle-invalid-toml)
+ - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats)
+- [FAQ](#faq)
+ - [Why this parser?](#why-this-parser)
+ - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported)
+ - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function)
+ - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types)
+- [Performance](#performance)
+
+<!-- mdformat-toc end -->
+
+## Intro<a name="intro"></a>
+
+Tomli is a Python library for parsing [TOML](https://toml.io).
+Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0).
+
+## Installation<a name="installation"></a>
+
+```bash
+pip install tomli
+```
+
+## Usage<a name="usage"></a>
+
+### Parse a TOML string<a name="parse-a-toml-string"></a>
+
+```python
+import tomli
+
+toml_str = """
+ gretzky = 99
+
+ [kurri]
+ jari = 17
+ """
+
+toml_dict = tomli.loads(toml_str)
+assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}}
+```
+
+### Parse a TOML file<a name="parse-a-toml-file"></a>
+
+```python
+import tomli
+
+with open("path_to_file/conf.toml", "rb") as f:
+ toml_dict = tomli.load(f)
+```
+
+The file must be opened in binary mode (with the `"rb"` flag).
+Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled,
+both of which are required to correctly parse TOML.
+Support for text file objects is deprecated for removal in the next major release.
+
+### Handle invalid TOML<a name="handle-invalid-toml"></a>
+
+```python
+import tomli
+
+try:
+ toml_dict = tomli.loads("]] this is invalid TOML [[")
+except tomli.TOMLDecodeError:
+ print("Yep, definitely not valid.")
+```
+
+Note that while the `TOMLDecodeError` type is public API, error messages of raised instances of it are not.
+Error messages should not be assumed to stay constant across Tomli versions.
+
+### Construct `decimal.Decimal`s from TOML floats<a name="construct-decimaldecimals-from-toml-floats"></a>
+
+```python
+from decimal import Decimal
+import tomli
+
+toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal)
+assert toml_dict["precision-matters"] == Decimal("0.982492")
+```
+
+Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type.
+The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated.
+
+Illegal types include `dict`, `list`, and anything that has the `append` attribute.
+Parsing floats into an illegal type results in undefined behavior.
+
+## FAQ<a name="faq"></a>
+
+### Why this parser?<a name="why-this-parser"></a>
+
+- it's lil'
+- pure Python with zero dependencies
+- the fastest pure Python parser [\*](#performance):
+ 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/),
+ 2.4x as fast as [toml](https://pypi.org/project/toml/)
+- outputs [basic data types](#how-do-toml-types-map-into-python-types) only
+- 100% spec compliant: passes all tests in
+ [a test set](https://github.com/toml-lang/compliance/pull/8)
+ soon to be merged to the official
+ [compliance tests for TOML](https://github.com/toml-lang/compliance)
+ repository
+- thoroughly tested: 100% branch coverage
+
+### Is comment preserving round-trip parsing supported?<a name="is-comment-preserving-round-trip-parsing-supported"></a>
+
+No.
+
+The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only.
+Preserving comments requires a custom type to be returned so will not be supported,
+at least not by the `tomli.loads` and `tomli.load` functions.
+
+Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need.
+
+### Is there a `dumps`, `write` or `encode` function?<a name="is-there-a-dumps-write-or-encode-function"></a>
+
+[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions.
+
+The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal.
+
+### How do TOML types map into Python types?<a name="how-do-toml-types-map-into-python-types"></a>
+
+| TOML type | Python type | Details |
+| ---------------- | ------------------- | ------------------------------------------------------------ |
+| Document Root | `dict` | |
+| Key | `str` | |
+| String | `str` | |
+| Integer | `int` | |
+| Float | `float` | |
+| Boolean | `bool` | |
+| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` |
+| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` |
+| Local Date | `datetime.date` | |
+| Local Time | `datetime.time` | |
+| Array | `list` | |
+| Table | `dict` | |
+| Inline Table | `dict` | |
+
+## Performance<a name="performance"></a>
+
+The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers.
+The benchmark can be run with `tox -e benchmark-pypi`.
+Running the benchmark on my personal computer output the following:
+
+```console
+foo@bar:~/dev/tomli$ tox -e benchmark-pypi
+benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2
+benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909'
+benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())'
+2021-07-23
+benchmark-pypi run-test: commands[1] | python --version
+Python 3.8.10
+benchmark-pypi run-test: commands[2] | python benchmark/run.py
+Parsing data.toml 5000 times:
+------------------------------------------------------
+ parser | exec time | performance (more is better)
+-----------+------------+-----------------------------
+ rtoml | 0.901 s | baseline (100%)
+ pytomlpp | 1.08 s | 83.15%
+ tomli | 3.89 s | 23.15%
+ toml | 9.36 s | 9.63%
+ qtoml | 11.5 s | 7.82%
+ tomlkit | 56.8 s | 1.59%
+```
+
+The parsers are ordered from fastest to slowest, using the fastest parser as baseline.
+Tomli performed the best out of all pure Python TOML parsers,
+losing only to pytomlpp (wraps C++) and rtoml (wraps Rust).
+
diff --git a/flit_core/flit_core/vendor/tomli/__init__.py b/flit_core/flit_core/vendor/tomli/__init__.py
new file mode 100644
index 0000000..8597467
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/__init__.py
@@ -0,0 +1,9 @@
+"""A lil' TOML parser."""
+
+__all__ = ("loads", "load", "TOMLDecodeError")
+__version__ = "1.2.3" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
+
+from ._parser import TOMLDecodeError, load, loads
+
+# Pretend this exception was created here.
+TOMLDecodeError.__module__ = "tomli"
diff --git a/flit_core/flit_core/vendor/tomli/_parser.py b/flit_core/flit_core/vendor/tomli/_parser.py
new file mode 100644
index 0000000..093afe5
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/_parser.py
@@ -0,0 +1,663 @@
+import string
+from types import MappingProxyType
+from typing import Any, BinaryIO, Dict, FrozenSet, Iterable, NamedTuple, Optional, Tuple
+import warnings
+
+from ._re import (
+ RE_DATETIME,
+ RE_LOCALTIME,
+ RE_NUMBER,
+ match_to_datetime,
+ match_to_localtime,
+ match_to_number,
+)
+from ._types import Key, ParseFloat, Pos
+
+ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
+
+# Neither of these sets include quotation mark or backslash. They are
+# currently handled as separate cases in the parser functions.
+ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t")
+ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n")
+
+ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS
+ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+
+ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS
+
+TOML_WS = frozenset(" \t")
+TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n")
+BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_")
+KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'")
+HEXDIGIT_CHARS = frozenset(string.hexdigits)
+
+BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType(
+ {
+ "\\b": "\u0008", # backspace
+ "\\t": "\u0009", # tab
+ "\\n": "\u000A", # linefeed
+ "\\f": "\u000C", # form feed
+ "\\r": "\u000D", # carriage return
+ '\\"': "\u0022", # quote
+ "\\\\": "\u005C", # backslash
+ }
+)
+
+
+class TOMLDecodeError(ValueError):
+ """An error raised if a document is not valid TOML."""
+
+
+def load(fp: BinaryIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]:
+ """Parse TOML from a binary file object."""
+ s_bytes = fp.read()
+ try:
+ s = s_bytes.decode()
+ except AttributeError:
+ warnings.warn(
+ "Text file object support is deprecated in favor of binary file objects."
+ ' Use `open("foo.toml", "rb")` to open the file in binary mode.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ s = s_bytes # type: ignore[assignment]
+ return loads(s, parse_float=parse_float)
+
+
+def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901
+ """Parse TOML from a string."""
+
+ # The spec allows converting "\r\n" to "\n", even in string
+ # literals. Let's do so to simplify parsing.
+ src = s.replace("\r\n", "\n")
+ pos = 0
+ out = Output(NestedDict(), Flags())
+ header: Key = ()
+
+ # Parse one statement at a time
+ # (typically means one line in TOML source)
+ while True:
+ # 1. Skip line leading whitespace
+ pos = skip_chars(src, pos, TOML_WS)
+
+ # 2. Parse rules. Expect one of the following:
+ # - end of file
+ # - end of line
+ # - comment
+ # - key/value pair
+ # - append dict to list (and move to its namespace)
+ # - create dict (and move to its namespace)
+ # Skip trailing whitespace when applicable.
+ try:
+ char = src[pos]
+ except IndexError:
+ break
+ if char == "\n":
+ pos += 1
+ continue
+ if char in KEY_INITIAL_CHARS:
+ pos = key_value_rule(src, pos, out, header, parse_float)
+ pos = skip_chars(src, pos, TOML_WS)
+ elif char == "[":
+ try:
+ second_char: Optional[str] = src[pos + 1]
+ except IndexError:
+ second_char = None
+ if second_char == "[":
+ pos, header = create_list_rule(src, pos, out)
+ else:
+ pos, header = create_dict_rule(src, pos, out)
+ pos = skip_chars(src, pos, TOML_WS)
+ elif char != "#":
+ raise suffixed_err(src, pos, "Invalid statement")
+
+ # 3. Skip comment
+ pos = skip_comment(src, pos)
+
+ # 4. Expect end of line or end of file
+ try:
+ char = src[pos]
+ except IndexError:
+ break
+ if char != "\n":
+ raise suffixed_err(
+ src, pos, "Expected newline or end of document after a statement"
+ )
+ pos += 1
+
+ return out.data.dict
+
+
+class Flags:
+ """Flags that map to parsed keys/namespaces."""
+
+ # Marks an immutable namespace (inline array or inline table).
+ FROZEN = 0
+ # Marks a nest that has been explicitly created and can no longer
+ # be opened using the "[table]" syntax.
+ EXPLICIT_NEST = 1
+
+ def __init__(self) -> None:
+ self._flags: Dict[str, dict] = {}
+
+ def unset_all(self, key: Key) -> None:
+ cont = self._flags
+ for k in key[:-1]:
+ if k not in cont:
+ return
+ cont = cont[k]["nested"]
+ cont.pop(key[-1], None)
+
+ def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None:
+ cont = self._flags
+ for k in head_key:
+ if k not in cont:
+ cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+ for k in rel_key:
+ if k in cont:
+ cont[k]["flags"].add(flag)
+ else:
+ cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+
+ def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003
+ cont = self._flags
+ key_parent, key_stem = key[:-1], key[-1]
+ for k in key_parent:
+ if k not in cont:
+ cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+ if key_stem not in cont:
+ cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag)
+
+ def is_(self, key: Key, flag: int) -> bool:
+ if not key:
+ return False # document root has no flags
+ cont = self._flags
+ for k in key[:-1]:
+ if k not in cont:
+ return False
+ inner_cont = cont[k]
+ if flag in inner_cont["recursive_flags"]:
+ return True
+ cont = inner_cont["nested"]
+ key_stem = key[-1]
+ if key_stem in cont:
+ cont = cont[key_stem]
+ return flag in cont["flags"] or flag in cont["recursive_flags"]
+ return False
+
+
+class NestedDict:
+ def __init__(self) -> None:
+ # The parsed content of the TOML document
+ self.dict: Dict[str, Any] = {}
+
+ def get_or_create_nest(
+ self,
+ key: Key,
+ *,
+ access_lists: bool = True,
+ ) -> dict:
+ cont: Any = self.dict
+ for k in key:
+ if k not in cont:
+ cont[k] = {}
+ cont = cont[k]
+ if access_lists and isinstance(cont, list):
+ cont = cont[-1]
+ if not isinstance(cont, dict):
+ raise KeyError("There is no nest behind this key")
+ return cont
+
+ def append_nest_to_list(self, key: Key) -> None:
+ cont = self.get_or_create_nest(key[:-1])
+ last_key = key[-1]
+ if last_key in cont:
+ list_ = cont[last_key]
+ try:
+ list_.append({})
+ except AttributeError:
+ raise KeyError("An object other than list found behind this key")
+ else:
+ cont[last_key] = [{}]
+
+
+class Output(NamedTuple):
+ data: NestedDict
+ flags: Flags
+
+
+def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos:
+ try:
+ while src[pos] in chars:
+ pos += 1
+ except IndexError:
+ pass
+ return pos
+
+
+def skip_until(
+ src: str,
+ pos: Pos,
+ expect: str,
+ *,
+ error_on: FrozenSet[str],
+ error_on_eof: bool,
+) -> Pos:
+ try:
+ new_pos = src.index(expect, pos)
+ except ValueError:
+ new_pos = len(src)
+ if error_on_eof:
+ raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None
+
+ if not error_on.isdisjoint(src[pos:new_pos]):
+ while src[pos] not in error_on:
+ pos += 1
+ raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}")
+ return new_pos
+
+
+def skip_comment(src: str, pos: Pos) -> Pos:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char == "#":
+ return skip_until(
+ src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False
+ )
+ return pos
+
+
+def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos:
+ while True:
+ pos_before_skip = pos
+ pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+ pos = skip_comment(src, pos)
+ if pos == pos_before_skip:
+ return pos
+
+
+def create_dict_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]:
+ pos += 1 # Skip "["
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key = parse_key(src, pos)
+
+ if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Can not declare {key} twice")
+ out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+ try:
+ out.data.get_or_create_nest(key)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+
+ if not src.startswith("]", pos):
+ raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration')
+ return pos + 1, key
+
+
+def create_list_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]:
+ pos += 2 # Skip "[["
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key = parse_key(src, pos)
+
+ if out.flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}")
+ # Free the namespace now that it points to another empty list item...
+ out.flags.unset_all(key)
+ # ...but this key precisely is still prohibited from table declaration
+ out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+ try:
+ out.data.append_nest_to_list(key)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+
+ if not src.startswith("]]", pos):
+ raise suffixed_err(src, pos, 'Expected "]]" at the end of an array declaration')
+ return pos + 2, key
+
+
+def key_value_rule(
+ src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat
+) -> Pos:
+ pos, key, value = parse_key_value_pair(src, pos, parse_float)
+ key_parent, key_stem = key[:-1], key[-1]
+ abs_key_parent = header + key_parent
+
+ if out.flags.is_(abs_key_parent, Flags.FROZEN):
+ raise suffixed_err(
+ src, pos, f"Can not mutate immutable namespace {abs_key_parent}"
+ )
+ # Containers in the relative path can't be opened with the table syntax after this
+ out.flags.set_for_relative_key(header, key, Flags.EXPLICIT_NEST)
+ try:
+ nest = out.data.get_or_create_nest(abs_key_parent)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+ if key_stem in nest:
+ raise suffixed_err(src, pos, "Can not overwrite a value")
+ # Mark inline table and array namespaces recursively immutable
+ if isinstance(value, (dict, list)):
+ out.flags.set(header + key, Flags.FROZEN, recursive=True)
+ nest[key_stem] = value
+ return pos
+
+
+def parse_key_value_pair(
+ src: str, pos: Pos, parse_float: ParseFloat
+) -> Tuple[Pos, Key, Any]:
+ pos, key = parse_key(src, pos)
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char != "=":
+ raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair')
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, value = parse_value(src, pos, parse_float)
+ return pos, key, value
+
+
+def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]:
+ pos, key_part = parse_key_part(src, pos)
+ key: Key = (key_part,)
+ pos = skip_chars(src, pos, TOML_WS)
+ while True:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char != ".":
+ return pos, key
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key_part = parse_key_part(src, pos)
+ key += (key_part,)
+ pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+ if char in BARE_KEY_CHARS:
+ start_pos = pos
+ pos = skip_chars(src, pos, BARE_KEY_CHARS)
+ return pos, src[start_pos:pos]
+ if char == "'":
+ return parse_literal_str(src, pos)
+ if char == '"':
+ return parse_one_line_basic_str(src, pos)
+ raise suffixed_err(src, pos, "Invalid initial character for a key part")
+
+
+def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]:
+ pos += 1
+ return parse_basic_str(src, pos, multiline=False)
+
+
+def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]:
+ pos += 1
+ array: list = []
+
+ pos = skip_comments_and_array_ws(src, pos)
+ if src.startswith("]", pos):
+ return pos + 1, array
+ while True:
+ pos, val = parse_value(src, pos, parse_float)
+ array.append(val)
+ pos = skip_comments_and_array_ws(src, pos)
+
+ c = src[pos : pos + 1]
+ if c == "]":
+ return pos + 1, array
+ if c != ",":
+ raise suffixed_err(src, pos, "Unclosed array")
+ pos += 1
+
+ pos = skip_comments_and_array_ws(src, pos)
+ if src.startswith("]", pos):
+ return pos + 1, array
+
+
+def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]:
+ pos += 1
+ nested_dict = NestedDict()
+ flags = Flags()
+
+ pos = skip_chars(src, pos, TOML_WS)
+ if src.startswith("}", pos):
+ return pos + 1, nested_dict.dict
+ while True:
+ pos, key, value = parse_key_value_pair(src, pos, parse_float)
+ key_parent, key_stem = key[:-1], key[-1]
+ if flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}")
+ try:
+ nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
+ except KeyError:
+ raise suffixed_err(src, pos, "Can not overwrite a value") from None
+ if key_stem in nest:
+ raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}")
+ nest[key_stem] = value
+ pos = skip_chars(src, pos, TOML_WS)
+ c = src[pos : pos + 1]
+ if c == "}":
+ return pos + 1, nested_dict.dict
+ if c != ",":
+ raise suffixed_err(src, pos, "Unclosed inline table")
+ if isinstance(value, (dict, list)):
+ flags.set(key, Flags.FROZEN, recursive=True)
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_basic_str_escape( # noqa: C901
+ src: str, pos: Pos, *, multiline: bool = False
+) -> Tuple[Pos, str]:
+ escape_id = src[pos : pos + 2]
+ pos += 2
+ if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}:
+ # Skip whitespace until next non-whitespace character or end of
+ # the doc. Error if non-whitespace is found before newline.
+ if escape_id != "\\\n":
+ pos = skip_chars(src, pos, TOML_WS)
+ try:
+ char = src[pos]
+ except IndexError:
+ return pos, ""
+ if char != "\n":
+ raise suffixed_err(src, pos, 'Unescaped "\\" in a string')
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+ return pos, ""
+ if escape_id == "\\u":
+ return parse_hex_char(src, pos, 4)
+ if escape_id == "\\U":
+ return parse_hex_char(src, pos, 8)
+ try:
+ return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
+ except KeyError:
+ if len(escape_id) != 2:
+ raise suffixed_err(src, pos, "Unterminated string") from None
+ raise suffixed_err(src, pos, 'Unescaped "\\" in a string') from None
+
+
+def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]:
+ return parse_basic_str_escape(src, pos, multiline=True)
+
+
+def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]:
+ hex_str = src[pos : pos + hex_len]
+ if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str):
+ raise suffixed_err(src, pos, "Invalid hex value")
+ pos += hex_len
+ hex_int = int(hex_str, 16)
+ if not is_unicode_scalar_value(hex_int):
+ raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value")
+ return pos, chr(hex_int)
+
+
+def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]:
+ pos += 1 # Skip starting apostrophe
+ start_pos = pos
+ pos = skip_until(
+ src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True
+ )
+ return pos + 1, src[start_pos:pos] # Skip ending apostrophe
+
+
+def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]:
+ pos += 3
+ if src.startswith("\n", pos):
+ pos += 1
+
+ if literal:
+ delim = "'"
+ end_pos = skip_until(
+ src,
+ pos,
+ "'''",
+ error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
+ error_on_eof=True,
+ )
+ result = src[pos:end_pos]
+ pos = end_pos + 3
+ else:
+ delim = '"'
+ pos, result = parse_basic_str(src, pos, multiline=True)
+
+ # Add at maximum two extra apostrophes/quotes if the end sequence
+ # is 4 or 5 chars long instead of just 3.
+ if not src.startswith(delim, pos):
+ return pos, result
+ pos += 1
+ if not src.startswith(delim, pos):
+ return pos, result + delim
+ pos += 1
+ return pos, result + (delim * 2)
+
+
+def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]:
+ if multiline:
+ error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+ parse_escapes = parse_basic_str_escape_multiline
+ else:
+ error_on = ILLEGAL_BASIC_STR_CHARS
+ parse_escapes = parse_basic_str_escape
+ result = ""
+ start_pos = pos
+ while True:
+ try:
+ char = src[pos]
+ except IndexError:
+ raise suffixed_err(src, pos, "Unterminated string") from None
+ if char == '"':
+ if not multiline:
+ return pos + 1, result + src[start_pos:pos]
+ if src.startswith('"""', pos):
+ return pos + 3, result + src[start_pos:pos]
+ pos += 1
+ continue
+ if char == "\\":
+ result += src[start_pos:pos]
+ pos, parsed_escape = parse_escapes(src, pos)
+ result += parsed_escape
+ start_pos = pos
+ continue
+ if char in error_on:
+ raise suffixed_err(src, pos, f"Illegal character {char!r}")
+ pos += 1
+
+
+def parse_value( # noqa: C901
+ src: str, pos: Pos, parse_float: ParseFloat
+) -> Tuple[Pos, Any]:
+ try:
+ char: Optional[str] = src[pos]
+ except IndexError:
+ char = None
+
+ # Basic strings
+ if char == '"':
+ if src.startswith('"""', pos):
+ return parse_multiline_str(src, pos, literal=False)
+ return parse_one_line_basic_str(src, pos)
+
+ # Literal strings
+ if char == "'":
+ if src.startswith("'''", pos):
+ return parse_multiline_str(src, pos, literal=True)
+ return parse_literal_str(src, pos)
+
+ # Booleans
+ if char == "t":
+ if src.startswith("true", pos):
+ return pos + 4, True
+ if char == "f":
+ if src.startswith("false", pos):
+ return pos + 5, False
+
+ # Dates and times
+ datetime_match = RE_DATETIME.match(src, pos)
+ if datetime_match:
+ try:
+ datetime_obj = match_to_datetime(datetime_match)
+ except ValueError as e:
+ raise suffixed_err(src, pos, "Invalid date or datetime") from e
+ return datetime_match.end(), datetime_obj
+ localtime_match = RE_LOCALTIME.match(src, pos)
+ if localtime_match:
+ return localtime_match.end(), match_to_localtime(localtime_match)
+
+ # Integers and "normal" floats.
+ # The regex will greedily match any type starting with a decimal
+ # char, so needs to be located after handling of dates and times.
+ number_match = RE_NUMBER.match(src, pos)
+ if number_match:
+ return number_match.end(), match_to_number(number_match, parse_float)
+
+ # Arrays
+ if char == "[":
+ return parse_array(src, pos, parse_float)
+
+ # Inline tables
+ if char == "{":
+ return parse_inline_table(src, pos, parse_float)
+
+ # Special floats
+ first_three = src[pos : pos + 3]
+ if first_three in {"inf", "nan"}:
+ return pos + 3, parse_float(first_three)
+ first_four = src[pos : pos + 4]
+ if first_four in {"-inf", "+inf", "-nan", "+nan"}:
+ return pos + 4, parse_float(first_four)
+
+ raise suffixed_err(src, pos, "Invalid value")
+
+
+def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError:
+ """Return a `TOMLDecodeError` where error message is suffixed with
+ coordinates in source."""
+
+ def coord_repr(src: str, pos: Pos) -> str:
+ if pos >= len(src):
+ return "end of document"
+ line = src.count("\n", 0, pos) + 1
+ if line == 1:
+ column = pos + 1
+ else:
+ column = pos - src.rindex("\n", 0, pos)
+ return f"line {line}, column {column}"
+
+ return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})")
+
+
+def is_unicode_scalar_value(codepoint: int) -> bool:
+ return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
diff --git a/flit_core/flit_core/vendor/tomli/_re.py b/flit_core/flit_core/vendor/tomli/_re.py
new file mode 100644
index 0000000..45e17e2
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/_re.py
@@ -0,0 +1,101 @@
+from datetime import date, datetime, time, timedelta, timezone, tzinfo
+from functools import lru_cache
+import re
+from typing import Any, Optional, Union
+
+from ._types import ParseFloat
+
+# E.g.
+# - 00:32:00.999999
+# - 00:32:00
+_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?"
+
+RE_NUMBER = re.compile(
+ r"""
+0
+(?:
+ x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
+ |
+ b[01](?:_?[01])* # bin
+ |
+ o[0-7](?:_?[0-7])* # oct
+)
+|
+[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
+(?P<floatpart>
+ (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
+ (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
+)
+""",
+ flags=re.VERBOSE,
+)
+RE_LOCALTIME = re.compile(_TIME_RE_STR)
+RE_DATETIME = re.compile(
+ fr"""
+([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
+(?:
+ [Tt ]
+ {_TIME_RE_STR}
+ (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
+)?
+""",
+ flags=re.VERBOSE,
+)
+
+
+def match_to_datetime(match: "re.Match") -> Union[datetime, date]:
+ """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
+
+ Raises ValueError if the match does not correspond to a valid date
+ or datetime.
+ """
+ (
+ year_str,
+ month_str,
+ day_str,
+ hour_str,
+ minute_str,
+ sec_str,
+ micros_str,
+ zulu_time,
+ offset_sign_str,
+ offset_hour_str,
+ offset_minute_str,
+ ) = match.groups()
+ year, month, day = int(year_str), int(month_str), int(day_str)
+ if hour_str is None:
+ return date(year, month, day)
+ hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
+ micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+ if offset_sign_str:
+ tz: Optional[tzinfo] = cached_tz(
+ offset_hour_str, offset_minute_str, offset_sign_str
+ )
+ elif zulu_time:
+ tz = timezone.utc
+ else: # local date-time
+ tz = None
+ return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
+
+
+@lru_cache(maxsize=None)
+def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone:
+ sign = 1 if sign_str == "+" else -1
+ return timezone(
+ timedelta(
+ hours=sign * int(hour_str),
+ minutes=sign * int(minute_str),
+ )
+ )
+
+
+def match_to_localtime(match: "re.Match") -> time:
+ hour_str, minute_str, sec_str, micros_str = match.groups()
+ micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+ return time(int(hour_str), int(minute_str), int(sec_str), micros)
+
+
+def match_to_number(match: "re.Match", parse_float: "ParseFloat") -> Any:
+ if match.group("floatpart"):
+ return parse_float(match.group())
+ return int(match.group(), 0)
diff --git a/flit_core/flit_core/vendor/tomli/_types.py b/flit_core/flit_core/vendor/tomli/_types.py
new file mode 100644
index 0000000..e37cc80
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/_types.py
@@ -0,0 +1,6 @@
+from typing import Any, Callable, Tuple
+
+# Type annotations
+ParseFloat = Callable[[str], Any]
+Key = Tuple[str, ...]
+Pos = int
diff --git a/flit_core/flit_core/vendor/tomli/py.typed b/flit_core/flit_core/vendor/tomli/py.typed
new file mode 100644
index 0000000..7632ecf
--- /dev/null
+++ b/flit_core/flit_core/vendor/tomli/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561
diff --git a/flit_core/flit_core/versionno.py b/flit_core/flit_core/versionno.py
new file mode 100644
index 0000000..eed1a5b
--- /dev/null
+++ b/flit_core/flit_core/versionno.py
@@ -0,0 +1,127 @@
+"""Normalise version number according to PEP 440"""
+import logging
+import os
+import re
+
+log = logging.getLogger(__name__)
+
+# Regex below from packaging, via PEP 440. BSD License:
+# Copyright (c) Donald Stufft and individual contributors.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+VERSION_PERMISSIVE = re.compile(r"""
+ \s*v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+\s*$""", re.VERBOSE)
+
+pre_spellings = {
+ 'a': 'a', 'alpha': 'a',
+ 'b': 'b', 'beta': 'b',
+ 'rc': 'rc', 'c': 'rc', 'pre': 'rc', 'preview': 'rc',
+}
+
+def normalise_version(orig_version):
+ """Normalise version number according to rules in PEP 440
+
+ Raises InvalidVersion if the version does not match PEP 440. This can be
+ overridden with the FLIT_ALLOW_INVALID environment variable.
+
+ https://www.python.org/dev/peps/pep-0440/#normalization
+ """
+ version = orig_version.lower()
+ m = VERSION_PERMISSIVE.match(version)
+ if not m:
+ if os.environ.get('FLIT_ALLOW_INVALID'):
+ log.warning("Invalid version number {!r} allowed by FLIT_ALLOW_INVALID"
+ .format(orig_version))
+ return version
+ else:
+ from .common import InvalidVersion
+ raise InvalidVersion("Version number {!r} does not match PEP 440 rules"
+ .format(orig_version))
+
+ components = []
+ add = components.append
+
+ epoch, release = m.group('epoch', 'release')
+ if epoch is not None:
+ add(str(int(epoch)) + '!')
+ add('.'.join(str(int(rp)) for rp in release.split('.')))
+
+ pre_l, pre_n = m.group('pre_l', 'pre_n')
+ if pre_l is not None:
+ pre_l = pre_spellings[pre_l]
+ pre_n = '0' if pre_n is None else str(int(pre_n))
+ add(pre_l + pre_n)
+
+ post_n1, post_l, post_n2 = m.group('post_n1', 'post_l', 'post_n2')
+ if post_n1 is not None:
+ add('.post' + str(int(post_n1)))
+ elif post_l is not None:
+ post_n = '0' if post_n2 is None else str(int(post_n2))
+ add('.post' + str(int(post_n)))
+
+ dev_l, dev_n = m.group('dev_l', 'dev_n')
+ if dev_l is not None:
+ dev_n = '0' if dev_n is None else str(int(dev_n))
+ add('.dev' + dev_n)
+
+ local = m.group('local')
+ if local is not None:
+ local = local.replace('-', '.').replace('_', '.')
+ l = [str(int(c)) if c.isdigit() else c
+ for c in local.split('.')]
+ add('+' + '.'.join(l))
+
+ version = ''.join(components)
+ if version != orig_version:
+ log.warning("Version number normalised: {!r} -> {!r} (see PEP 440)"
+ .format(orig_version, version))
+ return version
+
diff --git a/flit_core/flit_core/wheel.py b/flit_core/flit_core/wheel.py
new file mode 100644
index 0000000..08cb70a
--- /dev/null
+++ b/flit_core/flit_core/wheel.py
@@ -0,0 +1,259 @@
+import argparse
+from base64 import urlsafe_b64encode
+import contextlib
+from datetime import datetime
+import hashlib
+import io
+import logging
+import os
+import os.path as osp
+import stat
+import tempfile
+from pathlib import Path
+from types import SimpleNamespace
+from typing import Optional
+import zipfile
+
+from flit_core import __version__
+from . import common
+
+log = logging.getLogger(__name__)
+
+wheel_file_template = u"""\
+Wheel-Version: 1.0
+Generator: flit {version}
+Root-Is-Purelib: true
+""".format(version=__version__)
+
+def _write_wheel_file(f, supports_py2=False):
+ f.write(wheel_file_template)
+ if supports_py2:
+ f.write(u"Tag: py2-none-any\n")
+ f.write(u"Tag: py3-none-any\n")
+
+
+def _set_zinfo_mode(zinfo, mode):
+ # Set the bits for the mode and bit 0xFFFF for “regular file”
+ zinfo.external_attr = mode << 16
+
+
+def zip_timestamp_from_env() -> Optional[tuple]:
+ """Prepare a timestamp from $SOURCE_DATE_EPOCH, if set"""
+ try:
+ # If SOURCE_DATE_EPOCH is set (e.g. by Debian), it's used for
+ # timestamps inside the zip file.
+ d = datetime.utcfromtimestamp(int(os.environ['SOURCE_DATE_EPOCH']))
+ except (KeyError, ValueError):
+ # Otherwise, we'll use the mtime of files, and generated files will
+ # default to 2016-1-1 00:00:00
+ return None
+
+ if d.year >= 1980:
+ log.info("Zip timestamps will be from SOURCE_DATE_EPOCH: %s", d)
+ # zipfile expects a 6-tuple, not a datetime object
+ return d.year, d.month, d.day, d.hour, d.minute, d.second
+ else:
+ log.info("SOURCE_DATE_EPOCH is below the minimum for zip file timestamps")
+ log.info("Zip timestamps will be 1980-01-01 00:00:00")
+ return 1980, 1, 1, 0, 0, 0
+
+
+class WheelBuilder:
+ def __init__(
+ self, directory, module, metadata, entrypoints, target_fp, data_directory
+ ):
+ """Build a wheel from a module/package
+ """
+ self.directory = directory
+ self.module = module
+ self.metadata = metadata
+ self.entrypoints = entrypoints
+ self.data_directory = data_directory
+
+ self.records = []
+ self.source_time_stamp = zip_timestamp_from_env()
+
+ # Open the zip file ready to write
+ self.wheel_zip = zipfile.ZipFile(target_fp, 'w',
+ compression=zipfile.ZIP_DEFLATED)
+
+ @classmethod
+ def from_ini_path(cls, ini_path, target_fp):
+ from .config import read_flit_config
+ directory = ini_path.parent
+ ini_info = read_flit_config(ini_path)
+ entrypoints = ini_info.entrypoints
+ module = common.Module(ini_info.module, directory)
+ metadata = common.make_metadata(module, ini_info)
+ return cls(
+ directory, module, metadata, entrypoints, target_fp, ini_info.data_directory
+ )
+
+ @property
+ def dist_info(self):
+ return common.dist_info_name(self.metadata.name, self.metadata.version)
+
+ @property
+ def wheel_filename(self):
+ dist_name = common.normalize_dist_name(self.metadata.name, self.metadata.version)
+ tag = ('py2.' if self.metadata.supports_py2 else '') + 'py3-none-any'
+ return '{}-{}.whl'.format(dist_name, tag)
+
+ def _add_file(self, full_path, rel_path):
+ log.debug("Adding %s to zip file", full_path)
+ full_path, rel_path = str(full_path), str(rel_path)
+ if os.sep != '/':
+ # We always want to have /-separated paths in the zip file and in
+ # RECORD
+ rel_path = rel_path.replace(os.sep, '/')
+
+ if self.source_time_stamp is None:
+ zinfo = zipfile.ZipInfo.from_file(full_path, rel_path)
+ else:
+ # Set timestamps in zipfile for reproducible build
+ zinfo = zipfile.ZipInfo(rel_path, self.source_time_stamp)
+
+ # Normalize permission bits to either 755 (executable) or 644
+ st_mode = os.stat(full_path).st_mode
+ new_mode = common.normalize_file_permissions(st_mode)
+ _set_zinfo_mode(zinfo, new_mode & 0xFFFF) # Unix attributes
+
+ if stat.S_ISDIR(st_mode):
+ zinfo.external_attr |= 0x10 # MS-DOS directory flag
+
+ zinfo.compress_type = zipfile.ZIP_DEFLATED
+
+ hashsum = hashlib.sha256()
+ with open(full_path, 'rb') as src, self.wheel_zip.open(zinfo, 'w') as dst:
+ while True:
+ buf = src.read(1024 * 8)
+ if not buf:
+ break
+ hashsum.update(buf)
+ dst.write(buf)
+
+ size = os.stat(full_path).st_size
+ hash_digest = urlsafe_b64encode(hashsum.digest()).decode('ascii').rstrip('=')
+ self.records.append((rel_path, hash_digest, size))
+
+ @contextlib.contextmanager
+ def _write_to_zip(self, rel_path, mode=0o644):
+ sio = io.StringIO()
+ yield sio
+
+ log.debug("Writing data to %s in zip file", rel_path)
+ # The default is a fixed timestamp rather than the current time, so
+ # that building a wheel twice on the same computer can automatically
+ # give you the exact same result.
+ date_time = self.source_time_stamp or (2016, 1, 1, 0, 0, 0)
+ zi = zipfile.ZipInfo(rel_path, date_time)
+ _set_zinfo_mode(zi, mode)
+ b = sio.getvalue().encode('utf-8')
+ hashsum = hashlib.sha256(b)
+ hash_digest = urlsafe_b64encode(hashsum.digest()).decode('ascii').rstrip('=')
+ self.wheel_zip.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED)
+ self.records.append((rel_path, hash_digest, len(b)))
+
+ def copy_module(self):
+ log.info('Copying package file(s) from %s', self.module.path)
+ source_dir = str(self.module.source_dir)
+
+ for full_path in self.module.iter_files():
+ rel_path = osp.relpath(full_path, source_dir)
+ self._add_file(full_path, rel_path)
+
+ def add_pth(self):
+ with self._write_to_zip(self.module.name + ".pth") as f:
+ f.write(str(self.module.source_dir.resolve()))
+
+ def add_data_directory(self):
+ dir_in_whl = '{}.data/data/'.format(
+ common.normalize_dist_name(self.metadata.name, self.metadata.version)
+ )
+ for full_path in common.walk_data_dir(self.data_directory):
+ rel_path = os.path.relpath(full_path, self.data_directory)
+ self._add_file(full_path, dir_in_whl + rel_path)
+
+ def write_metadata(self):
+ log.info('Writing metadata files')
+
+ if self.entrypoints:
+ with self._write_to_zip(self.dist_info + '/entry_points.txt') as f:
+ common.write_entry_points(self.entrypoints, f)
+
+ for base in ('COPYING', 'LICENSE'):
+ for path in sorted(self.directory.glob(base + '*')):
+ if path.is_file():
+ self._add_file(path, '%s/%s' % (self.dist_info, path.name))
+
+ with self._write_to_zip(self.dist_info + '/WHEEL') as f:
+ _write_wheel_file(f, supports_py2=self.metadata.supports_py2)
+
+ with self._write_to_zip(self.dist_info + '/METADATA') as f:
+ self.metadata.write_metadata_file(f)
+
+ def write_record(self):
+ log.info('Writing the record of files')
+ # Write a record of the files in the wheel
+ with self._write_to_zip(self.dist_info + '/RECORD') as f:
+ for path, hash, size in self.records:
+ f.write(u'{},sha256={},{}\n'.format(path, hash, size))
+ # RECORD itself is recorded with no hash or size
+ f.write(self.dist_info + '/RECORD,,\n')
+
+ def build(self, editable=False):
+ try:
+ if editable:
+ self.add_pth()
+ else:
+ self.copy_module()
+ self.add_data_directory()
+ self.write_metadata()
+ self.write_record()
+ finally:
+ self.wheel_zip.close()
+
+def make_wheel_in(ini_path, wheel_directory, editable=False):
+ # We don't know the final filename until metadata is loaded, so write to
+ # a temporary_file, and rename it afterwards.
+ (fd, temp_path) = tempfile.mkstemp(suffix='.whl', dir=str(wheel_directory))
+ try:
+ with io.open(fd, 'w+b') as fp:
+ wb = WheelBuilder.from_ini_path(ini_path, fp)
+ wb.build(editable)
+
+ wheel_path = wheel_directory / wb.wheel_filename
+ os.replace(temp_path, str(wheel_path))
+ except:
+ os.unlink(temp_path)
+ raise
+
+ log.info("Built wheel: %s", wheel_path)
+ return SimpleNamespace(builder=wb, file=wheel_path)
+
+
+def main(argv=None):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ 'srcdir',
+ type=Path,
+ nargs='?',
+ default=Path.cwd(),
+ help='source directory (defaults to current directory)',
+ )
+
+ parser.add_argument(
+ '--outdir',
+ '-o',
+ help='output directory (defaults to {srcdir}/dist)',
+ )
+ args = parser.parse_args(argv)
+ outdir = args.srcdir / 'dist' if args.outdir is None else Path(args.outdir)
+ print("Building wheel from", args.srcdir)
+ pyproj_toml = args.srcdir / 'pyproject.toml'
+ outdir.mkdir(parents=True, exist_ok=True)
+ info = make_wheel_in(pyproj_toml, outdir)
+ print("Wheel built", outdir / info.file.name)
+
+if __name__ == "__main__":
+ main()
diff --git a/flit_core/pyproject.toml b/flit_core/pyproject.toml
new file mode 100644
index 0000000..e11bf62
--- /dev/null
+++ b/flit_core/pyproject.toml
@@ -0,0 +1,25 @@
+[build-system]
+requires = []
+build-backend = "flit_core.buildapi"
+backend-path = ["."]
+
+[project]
+name="flit_core"
+authors=[
+ {name = "Thomas Kluyver & contributors", email = "thomas@kluyver.me.uk"},
+]
+description = "Distribution-building parts of Flit. See flit package for more information"
+dependencies = []
+requires-python = '>=3.6'
+license = {file = "LICENSE"}
+classifiers = [
+ "License :: OSI Approved :: BSD License",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+]
+dynamic = ["version"]
+
+[project.urls]
+Source = "https://github.com/pypa/flit"
+
+[tool.flit.sdist]
+include = ["bootstrap_install.py", "build_dists.py"]
diff --git a/flit_core/update-vendored-tomli.sh b/flit_core/update-vendored-tomli.sh
new file mode 100755
index 0000000..c10af1f
--- /dev/null
+++ b/flit_core/update-vendored-tomli.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Update the vendored copy of tomli
+set -euo pipefail
+
+version=$1
+echo "Bundling tomli version $version"
+
+rm -rf flit_core/vendor/tomli*
+pip install --target flit_core/vendor/ "tomli==$version"
+
+# Convert absolute imports to relative (from tomli.foo -> from .foo)
+for file in flit_core/vendor/tomli/*.py; do
+ sed -i -E 's/((from|import)[[:space:]]+)tomli\./\1\./' "$file"
+done
+
+# Delete some files that aren't useful in this context.
+# Leave LICENSE & METADATA present.
+rm flit_core/vendor/tomli*.dist-info/{INSTALLER,RECORD,REQUESTED,WHEEL}
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..caeefcd
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,46 @@
+[build-system]
+requires = ["flit_core >=3.8.0,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "flit"
+authors = [
+ {name = "Thomas Kluyver", email = "thomas@kluyver.me.uk"},
+]
+dependencies = [
+ "flit_core >=3.8.0",
+ "requests",
+ "docutils",
+ "tomli-w",
+]
+requires-python = ">=3.6"
+readme = "README.rst"
+license = {file = "LICENSE"}
+classifiers = ["Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Programming Language :: Python :: 3",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+]
+dynamic = ['version', 'description']
+
+[project.optional-dependencies]
+test = [
+ "testpath",
+ "responses",
+ "pytest>=2.7.3",
+ "pytest-cov",
+ "tomli",
+]
+doc = [
+ "sphinx",
+ "sphinxcontrib_github_alt",
+ "pygments-github-lexers", # TOML highlighting
+]
+
+[project.urls]
+Documentation = "https://flit.pypa.io"
+Source = "https://github.com/pypa/flit"
+Changelog = "https://flit.pypa.io/en/stable/history.html"
+
+[project.scripts]
+flit = "flit:main"
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/__init__.py
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..4c5ecef
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,15 @@
+from pathlib import Path
+import pytest
+from shutil import copytree
+
+samples_dir = Path(__file__).parent / 'samples'
+
+@pytest.fixture
+def copy_sample(tmp_path):
+ """Copy a subdirectory from the samples dir to a temp dir"""
+ def copy(dirname):
+ dst = tmp_path / dirname
+ copytree(str(samples_dir / dirname), str(dst))
+ return dst
+
+ return copy
diff --git a/tests/samples/EG_README.rst b/tests/samples/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/tests/samples/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/tests/samples/altdistname/package1/__init__.py b/tests/samples/altdistname/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/tests/samples/altdistname/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/tests/samples/altdistname/package1/data_dir/foo.sh b/tests/samples/altdistname/package1/data_dir/foo.sh
new file mode 100644
index 0000000..92abcfb
--- /dev/null
+++ b/tests/samples/altdistname/package1/data_dir/foo.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo "Example data file"
diff --git a/tests/samples/altdistname/package1/foo.py b/tests/samples/altdistname/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/tests/samples/altdistname/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/tests/samples/altdistname/package1/subpkg/__init__.py b/tests/samples/altdistname/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/altdistname/package1/subpkg/__init__.py
diff --git a/tests/samples/altdistname/package1/subpkg/sp_data_dir/test.json b/tests/samples/altdistname/package1/subpkg/sp_data_dir/test.json
new file mode 100644
index 0000000..f77d03c
--- /dev/null
+++ b/tests/samples/altdistname/package1/subpkg/sp_data_dir/test.json
@@ -0,0 +1 @@
+{"example": true}
diff --git a/tests/samples/altdistname/package1/subpkg2/__init__.py b/tests/samples/altdistname/package1/subpkg2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/altdistname/package1/subpkg2/__init__.py
diff --git a/tests/samples/altdistname/pyproject.toml b/tests/samples/altdistname/pyproject.toml
new file mode 100644
index 0000000..c2d08ca
--- /dev/null
+++ b/tests/samples/altdistname/pyproject.toml
@@ -0,0 +1,11 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/package1"
+dist-name = "package-Dist1"
+
diff --git a/tests/samples/bad-description-ext.toml b/tests/samples/bad-description-ext.toml
new file mode 100644
index 0000000..1062829
--- /dev/null
+++ b/tests/samples/bad-description-ext.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "module1.py" # WRONG
diff --git a/tests/samples/entrypoints_conflict/console_entry_points.txt b/tests/samples/entrypoints_conflict/console_entry_points.txt
new file mode 100644
index 0000000..eb47371
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/console_entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+foo=bar:baz \ No newline at end of file
diff --git a/tests/samples/entrypoints_conflict/package1/__init__.py b/tests/samples/entrypoints_conflict/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/tests/samples/entrypoints_conflict/package1/data_dir/foo.sh b/tests/samples/entrypoints_conflict/package1/data_dir/foo.sh
new file mode 100644
index 0000000..92abcfb
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/package1/data_dir/foo.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo "Example data file"
diff --git a/tests/samples/entrypoints_conflict/package1/foo.py b/tests/samples/entrypoints_conflict/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/tests/samples/entrypoints_conflict/package1/subpkg/__init__.py b/tests/samples/entrypoints_conflict/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/package1/subpkg/__init__.py
diff --git a/tests/samples/entrypoints_conflict/package1/subpkg/sp_data_dir/test.json b/tests/samples/entrypoints_conflict/package1/subpkg/sp_data_dir/test.json
new file mode 100644
index 0000000..f77d03c
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/package1/subpkg/sp_data_dir/test.json
@@ -0,0 +1 @@
+{"example": true}
diff --git a/tests/samples/entrypoints_conflict/package1/subpkg2/__init__.py b/tests/samples/entrypoints_conflict/package1/subpkg2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/package1/subpkg2/__init__.py
diff --git a/tests/samples/entrypoints_conflict/pyproject.toml b/tests/samples/entrypoints_conflict/pyproject.toml
new file mode 100644
index 0000000..506a4eb
--- /dev/null
+++ b/tests/samples/entrypoints_conflict/pyproject.toml
@@ -0,0 +1,16 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/package1"
+
+# The sections below conflict
+[tool.flit.scripts]
+pkg_script = "package1:main"
+
+[tool.flit.entrypoints.console_scripts]
+foo = "bar:baz"
diff --git a/tests/samples/entrypoints_valid/package1/__init__.py b/tests/samples/entrypoints_valid/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/tests/samples/entrypoints_valid/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/tests/samples/entrypoints_valid/package1/data_dir/foo.sh b/tests/samples/entrypoints_valid/package1/data_dir/foo.sh
new file mode 100644
index 0000000..92abcfb
--- /dev/null
+++ b/tests/samples/entrypoints_valid/package1/data_dir/foo.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo "Example data file"
diff --git a/tests/samples/entrypoints_valid/package1/foo.py b/tests/samples/entrypoints_valid/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/tests/samples/entrypoints_valid/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/tests/samples/entrypoints_valid/package1/subpkg/__init__.py b/tests/samples/entrypoints_valid/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/entrypoints_valid/package1/subpkg/__init__.py
diff --git a/tests/samples/entrypoints_valid/package1/subpkg/sp_data_dir/test.json b/tests/samples/entrypoints_valid/package1/subpkg/sp_data_dir/test.json
new file mode 100644
index 0000000..f77d03c
--- /dev/null
+++ b/tests/samples/entrypoints_valid/package1/subpkg/sp_data_dir/test.json
@@ -0,0 +1 @@
+{"example": true}
diff --git a/tests/samples/entrypoints_valid/package1/subpkg2/__init__.py b/tests/samples/entrypoints_valid/package1/subpkg2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/entrypoints_valid/package1/subpkg2/__init__.py
diff --git a/tests/samples/entrypoints_valid/pyproject.toml b/tests/samples/entrypoints_valid/pyproject.toml
new file mode 100644
index 0000000..d89da6a
--- /dev/null
+++ b/tests/samples/entrypoints_valid/pyproject.toml
@@ -0,0 +1,15 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/package1"
+
+[tool.flit.scripts]
+pkg_script = "package1:main"
+
+[tool.flit.entrypoints.myplugins]
+package1 = "package1:main"
diff --git a/tests/samples/extras-dev-conflict.toml b/tests/samples/extras-dev-conflict.toml
new file mode 100644
index 0000000..0fe249d
--- /dev/null
+++ b/tests/samples/extras-dev-conflict.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+dev-requires = ["apackage"]
+
+[tool.flit.metadata.requires-extra]
+dev = ["anotherpackage"]
diff --git a/tests/samples/extras/module1.py b/tests/samples/extras/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/extras/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/extras/pyproject.toml b/tests/samples/extras/pyproject.toml
new file mode 100644
index 0000000..557ba2a
--- /dev/null
+++ b/tests/samples/extras/pyproject.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = ["toml"]
+
+[tool.flit.metadata.requires-extra]
+test = ["pytest"]
+custom = ["requests"]
diff --git a/tests/samples/invalid_classifier.toml b/tests/samples/invalid_classifier.toml
new file mode 100644
index 0000000..931d72f
--- /dev/null
+++ b/tests/samples/invalid_classifier.toml
@@ -0,0 +1,14 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+description-file = "my-description.rst"
+home-page = "http://github.com/sirrobin/package1"
+classifiers = [
+ "License :: OSI Approved :: BSD License",
+ "Intended Audience :: Pacman",
+]
diff --git a/tests/samples/invalid_version1.py b/tests/samples/invalid_version1.py
new file mode 100644
index 0000000..dd3268a
--- /dev/null
+++ b/tests/samples/invalid_version1.py
@@ -0,0 +1,3 @@
+"""Sample module with invalid __version__ string"""
+
+__version__ = "not starting with a number" \ No newline at end of file
diff --git a/tests/samples/missing-description-file.toml b/tests/samples/missing-description-file.toml
new file mode 100644
index 0000000..00fae72
--- /dev/null
+++ b/tests/samples/missing-description-file.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "missingdescriptionfile"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/missingdescriptionfile"
+description-file = "definitely-missing.rst"
diff --git a/tests/samples/module1.py b/tests/samples/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/module1_ini/flit.ini b/tests/samples/module1_ini/flit.ini
new file mode 100644
index 0000000..9bbfc4e
--- /dev/null
+++ b/tests/samples/module1_ini/flit.ini
@@ -0,0 +1,5 @@
+[metadata]
+module=module1
+author=Sir Robin
+author-email=robin@camelot.uk
+home-page=http://github.com/sirrobin/module1
diff --git a/tests/samples/module1_ini/module1.py b/tests/samples/module1_ini/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/module1_ini/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/module1_toml/EG_README.rst b/tests/samples/module1_toml/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/tests/samples/module1_toml/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/tests/samples/module1_toml/module1.py b/tests/samples/module1_toml/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/module1_toml/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/module1_toml/pyproject.toml b/tests/samples/module1_toml/pyproject.toml
new file mode 100644
index 0000000..740ec87
--- /dev/null
+++ b/tests/samples/module1_toml/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.urls]
+Documentation = "https://example.com/module1"
diff --git a/tests/samples/module2.py b/tests/samples/module2.py
new file mode 100644
index 0000000..cc83e39
--- /dev/null
+++ b/tests/samples/module2.py
@@ -0,0 +1,5 @@
+"""
+Docstring formatted like this.
+"""
+
+__version__ = '7.0'
diff --git a/tests/samples/module3/LICENSE b/tests/samples/module3/LICENSE
new file mode 100644
index 0000000..dfd033f
--- /dev/null
+++ b/tests/samples/module3/LICENSE
@@ -0,0 +1 @@
+Dummy license - check that it gets packaged
diff --git a/tests/samples/module3/pyproject.toml b/tests/samples/module3/pyproject.toml
new file mode 100644
index 0000000..95d8a80
--- /dev/null
+++ b/tests/samples/module3/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "module3"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module3"
+
diff --git a/tests/samples/module3/src/module3.py b/tests/samples/module3/src/module3.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/module3/src/module3.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/moduleunimportable.py b/tests/samples/moduleunimportable.py
new file mode 100644
index 0000000..147d26e
--- /dev/null
+++ b/tests/samples/moduleunimportable.py
@@ -0,0 +1,8 @@
+
+"""
+A sample unimportable module
+"""
+
+raise ImportError()
+
+__version__ = "0.1"
diff --git a/tests/samples/modulewithconstructedversion.py b/tests/samples/modulewithconstructedversion.py
new file mode 100644
index 0000000..5d9ec93
--- /dev/null
+++ b/tests/samples/modulewithconstructedversion.py
@@ -0,0 +1,4 @@
+
+"""This module has a __version__ that requires runtime interpretation"""
+
+__version__ = ".".join(["1", "2", "3"])
diff --git a/tests/samples/modulewithlocalversion/modulewithlocalversion.py b/tests/samples/modulewithlocalversion/modulewithlocalversion.py
new file mode 100644
index 0000000..4d11be9
--- /dev/null
+++ b/tests/samples/modulewithlocalversion/modulewithlocalversion.py
@@ -0,0 +1,5 @@
+"""
+A module with a local version specifier
+"""
+
+__version__ = "0.1.dev0+test"
diff --git a/tests/samples/modulewithlocalversion/pyproject.toml b/tests/samples/modulewithlocalversion/pyproject.toml
new file mode 100644
index 0000000..bb80669
--- /dev/null
+++ b/tests/samples/modulewithlocalversion/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "modulewithlocalversion"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/modulewithlocalversion"
+
diff --git a/tests/samples/my-description.rst b/tests/samples/my-description.rst
new file mode 100644
index 0000000..623cb1d
--- /dev/null
+++ b/tests/samples/my-description.rst
@@ -0,0 +1 @@
+Sample description for test.
diff --git a/tests/samples/no_docstring-pkg.toml b/tests/samples/no_docstring-pkg.toml
new file mode 100644
index 0000000..b68827f
--- /dev/null
+++ b/tests/samples/no_docstring-pkg.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "no_docstring"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/no_docstring"
+description-file = "EG_README.rst"
+
+[tool.flit.metadata.urls]
+Documentation = "https://example.com/no_docstring"
diff --git a/tests/samples/no_docstring.py b/tests/samples/no_docstring.py
new file mode 100644
index 0000000..29524eb
--- /dev/null
+++ b/tests/samples/no_docstring.py
@@ -0,0 +1 @@
+__version__ = '7.0'
diff --git a/tests/samples/ns1-pkg-mod/ns1/module.py b/tests/samples/ns1-pkg-mod/ns1/module.py
new file mode 100644
index 0000000..4e02147
--- /dev/null
+++ b/tests/samples/ns1-pkg-mod/ns1/module.py
@@ -0,0 +1,5 @@
+"""An example single file module in a namespace package
+"""
+
+__version__ = '0.1'
+
diff --git a/tests/samples/ns1-pkg-mod/pyproject.toml b/tests/samples/ns1-pkg-mod/pyproject.toml
new file mode 100644
index 0000000..215732a
--- /dev/null
+++ b/tests/samples/ns1-pkg-mod/pyproject.toml
@@ -0,0 +1,7 @@
+[build-system]
+requires = ["flit_core >=3.5,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "ns1.module"
+dynamic = ["version", "description"]
diff --git a/tests/samples/ns1-pkg/EG_README.rst b/tests/samples/ns1-pkg/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/tests/samples/ns1-pkg/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/tests/samples/ns1-pkg/ns1/pkg/__init__.py b/tests/samples/ns1-pkg/ns1/pkg/__init__.py
new file mode 100644
index 0000000..445afbb
--- /dev/null
+++ b/tests/samples/ns1-pkg/ns1/pkg/__init__.py
@@ -0,0 +1,8 @@
+"""
+==================
+ns1.pkg
+==================
+"""
+
+__version__ = '0.1'
+
diff --git a/tests/samples/ns1-pkg/pyproject.toml b/tests/samples/ns1-pkg/pyproject.toml
new file mode 100644
index 0000000..acbabb1
--- /dev/null
+++ b/tests/samples/ns1-pkg/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=3.5,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "ns1.pkg"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
diff --git a/tests/samples/ns1-pkg2/EG_README.rst b/tests/samples/ns1-pkg2/EG_README.rst
new file mode 100644
index 0000000..a742974
--- /dev/null
+++ b/tests/samples/ns1-pkg2/EG_README.rst
@@ -0,0 +1,4 @@
+This is an example long description for tests to load.
+
+This file is `valid reStructuredText
+<http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html>`_.
diff --git a/tests/samples/ns1-pkg2/ns1/pkg2/__init__.py b/tests/samples/ns1-pkg2/ns1/pkg2/__init__.py
new file mode 100644
index 0000000..dbe87a4
--- /dev/null
+++ b/tests/samples/ns1-pkg2/ns1/pkg2/__init__.py
@@ -0,0 +1,8 @@
+"""
+==================
+ns1.pkg2
+==================
+"""
+
+__version__ = '0.1'
+
diff --git a/tests/samples/ns1-pkg2/pyproject.toml b/tests/samples/ns1-pkg2/pyproject.toml
new file mode 100644
index 0000000..d792a97
--- /dev/null
+++ b/tests/samples/ns1-pkg2/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit_core >=3.5,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "ns1.pkg2"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
diff --git a/tests/samples/package1/my-description.rst b/tests/samples/package1/my-description.rst
new file mode 100644
index 0000000..623cb1d
--- /dev/null
+++ b/tests/samples/package1/my-description.rst
@@ -0,0 +1 @@
+Sample description for test.
diff --git a/tests/samples/package1/package1/__init__.py b/tests/samples/package1/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/tests/samples/package1/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/tests/samples/package1/package1/data_dir/foo.sh b/tests/samples/package1/package1/data_dir/foo.sh
new file mode 100644
index 0000000..92abcfb
--- /dev/null
+++ b/tests/samples/package1/package1/data_dir/foo.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo "Example data file"
diff --git a/tests/samples/package1/package1/foo.py b/tests/samples/package1/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/tests/samples/package1/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/tests/samples/package1/package1/subpkg/__init__.py b/tests/samples/package1/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/package1/package1/subpkg/__init__.py
diff --git a/tests/samples/package1/package1/subpkg/sp_data_dir/test.json b/tests/samples/package1/package1/subpkg/sp_data_dir/test.json
new file mode 100644
index 0000000..f77d03c
--- /dev/null
+++ b/tests/samples/package1/package1/subpkg/sp_data_dir/test.json
@@ -0,0 +1 @@
+{"example": true}
diff --git a/tests/samples/package1/package1/subpkg2/__init__.py b/tests/samples/package1/package1/subpkg2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/package1/package1/subpkg2/__init__.py
diff --git a/tests/samples/package1/pyproject.toml b/tests/samples/package1/pyproject.toml
new file mode 100644
index 0000000..c4c4130
--- /dev/null
+++ b/tests/samples/package1/pyproject.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+description-file = "my-description.rst"
+home-page = "http://github.com/sirrobin/package1"
+
+[tool.flit.scripts]
+pkg_script = "package1:main"
diff --git a/tests/samples/package2/package2-pkg.ini b/tests/samples/package2/package2-pkg.ini
new file mode 100644
index 0000000..3b0864d
--- /dev/null
+++ b/tests/samples/package2/package2-pkg.ini
@@ -0,0 +1,8 @@
+[metadata]
+module=package2
+author=Sir Robin
+author-email=robin@camelot.uk
+home-page=http://github.com/sirrobin/package2
+
+[scripts]
+pkg_script=package2:main
diff --git a/tests/samples/package2/pyproject.toml b/tests/samples/package2/pyproject.toml
new file mode 100644
index 0000000..6119bbb
--- /dev/null
+++ b/tests/samples/package2/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit_core >=2,<4"]
+build-backend = "flit_core.buildapi"
+
+[tool.flit.metadata]
+module = "package2"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/package2"
+
+[scripts]
+pkg_script = "package2:main"
diff --git a/tests/samples/package2/src/package2/__init__.py b/tests/samples/package2/src/package2/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/tests/samples/package2/src/package2/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/tests/samples/package2/src/package2/foo.py b/tests/samples/package2/src/package2/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/tests/samples/package2/src/package2/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/tests/samples/packageinsrc/pyproject.toml b/tests/samples/packageinsrc/pyproject.toml
new file mode 100644
index 0000000..b70209f
--- /dev/null
+++ b/tests/samples/packageinsrc/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit"]
+build-backend = "flit.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = []
diff --git a/tests/samples/packageinsrc/src/module1.py b/tests/samples/packageinsrc/src/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/packageinsrc/src/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/pep517/module1.py b/tests/samples/pep517/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/pep517/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/pep517/pyproject.toml b/tests/samples/pep517/pyproject.toml
new file mode 100644
index 0000000..6b4fa15
--- /dev/null
+++ b/tests/samples/pep517/pyproject.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit"]
+build-backend = "flit.buildapi"
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = [
+ "requests >= 2.18",
+ "docutils",
+]
diff --git a/tests/samples/requires-dev.toml b/tests/samples/requires-dev.toml
new file mode 100644
index 0000000..46e3170
--- /dev/null
+++ b/tests/samples/requires-dev.toml
@@ -0,0 +1,11 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+# This should generate a warning tell you to use requires-extra.dev
+dev-requires = ["apackage"]
diff --git a/tests/samples/requires-envmark/module1.py b/tests/samples/requires-envmark/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/requires-envmark/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/requires-envmark/pyproject.toml b/tests/samples/requires-envmark/pyproject.toml
new file mode 100644
index 0000000..e97c5f0
--- /dev/null
+++ b/tests/samples/requires-envmark/pyproject.toml
@@ -0,0 +1,12 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+requires = [
+ "requests",
+ "pathlib2; python_version == '2.7'",
+]
diff --git a/tests/samples/requires-extra-envmark/module1.py b/tests/samples/requires-extra-envmark/module1.py
new file mode 100644
index 0000000..87f0370
--- /dev/null
+++ b/tests/samples/requires-extra-envmark/module1.py
@@ -0,0 +1,3 @@
+"""Example module"""
+
+__version__ = '0.1'
diff --git a/tests/samples/requires-extra-envmark/pyproject.toml b/tests/samples/requires-extra-envmark/pyproject.toml
new file mode 100644
index 0000000..fe6975e
--- /dev/null
+++ b/tests/samples/requires-extra-envmark/pyproject.toml
@@ -0,0 +1,11 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+
+[tool.flit.metadata.requires-extra]
+test = ["pathlib2; python_version == \"2.7\""]
diff --git a/tests/samples/requires-requests.toml b/tests/samples/requires-requests.toml
new file mode 100644
index 0000000..bf26ac5
--- /dev/null
+++ b/tests/samples/requires-requests.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["flit"]
+
+[tool.flit.metadata]
+module = "module1"
+author = "Sir Robin"
+author-email = "robin@camelot.uk"
+home-page = "http://github.com/sirrobin/module1"
+description-file = "EG_README.rst"
+requires = ["requests"]
diff --git a/tests/samples/with_flit_ini/flit.ini b/tests/samples/with_flit_ini/flit.ini
new file mode 100644
index 0000000..0637840
--- /dev/null
+++ b/tests/samples/with_flit_ini/flit.ini
@@ -0,0 +1,9 @@
+[metadata]
+module=package1
+author=Sir Robin
+author-email=robin@camelot.uk
+home-page=http://github.com/sirrobin/package1
+entry-points-file=some_entry_points.txt
+
+[scripts]
+pkg_script=package1:main
diff --git a/tests/samples/with_flit_ini/package1/__init__.py b/tests/samples/with_flit_ini/package1/__init__.py
new file mode 100644
index 0000000..07978d8
--- /dev/null
+++ b/tests/samples/with_flit_ini/package1/__init__.py
@@ -0,0 +1,6 @@
+"""A sample package"""
+
+__version__ = '0.1'
+
+def main():
+ print("package1 main")
diff --git a/tests/samples/with_flit_ini/package1/foo.py b/tests/samples/with_flit_ini/package1/foo.py
new file mode 100644
index 0000000..1337a53
--- /dev/null
+++ b/tests/samples/with_flit_ini/package1/foo.py
@@ -0,0 +1 @@
+a = 1
diff --git a/tests/samples/with_flit_ini/package1/subpkg/__init__.py b/tests/samples/with_flit_ini/package1/subpkg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/samples/with_flit_ini/package1/subpkg/__init__.py
diff --git a/tests/samples/with_flit_ini/some_entry_points.txt b/tests/samples/with_flit_ini/some_entry_points.txt
new file mode 100644
index 0000000..317be93
--- /dev/null
+++ b/tests/samples/with_flit_ini/some_entry_points.txt
@@ -0,0 +1,2 @@
+[myplugins]
+package1=package1:main \ No newline at end of file
diff --git a/tests/test_build.py b/tests/test_build.py
new file mode 100644
index 0000000..59dd90e
--- /dev/null
+++ b/tests/test_build.py
@@ -0,0 +1,84 @@
+from pathlib import Path
+import pytest
+import shutil
+import sys
+from tempfile import TemporaryDirectory
+from testpath import assert_isdir, MockCommand
+
+from flit_core import common
+from flit import build
+
+samples_dir = Path(__file__).parent / 'samples'
+
+LIST_FILES_TEMPLATE = """\
+#!{python}
+import sys
+from os.path import join
+if '--deleted' not in sys.argv:
+ files = ['pyproject.toml', '{module}', 'EG_README.rst']
+ print('\\0'.join(files), end='\\0')
+"""
+
+def test_build_main(copy_sample):
+ td = copy_sample('module1_toml')
+ (td / '.git').mkdir() # Fake a git repo
+
+ with MockCommand('git', LIST_FILES_TEMPLATE.format(
+ python=sys.executable, module='module1.py')):
+ res = build.main(td / 'pyproject.toml')
+ assert res.wheel.file.suffix == '.whl'
+ assert res.sdist.file.name.endswith('.tar.gz')
+
+ assert_isdir(td / 'dist')
+
+def test_build_sdist_only(copy_sample):
+ td = copy_sample('module1_toml')
+ (td / '.git').mkdir() # Fake a git repo
+
+ with MockCommand('git', LIST_FILES_TEMPLATE.format(
+ python=sys.executable, module='module1.py')):
+ res = build.main(td / 'pyproject.toml', formats={'sdist'})
+ assert res.wheel is None
+
+ # Compare str path to work around pathlib/pathlib2 mismatch on Py 3.5
+ assert [str(p) for p in (td / 'dist').iterdir()] == [str(res.sdist.file)]
+
+def test_build_wheel_only(copy_sample):
+ td = copy_sample('module1_toml')
+ (td / '.git').mkdir() # Fake a git repo
+
+ with MockCommand('git', LIST_FILES_TEMPLATE.format(
+ python=sys.executable, module='module1.py')):
+ res = build.main(td / 'pyproject.toml', formats={'wheel'})
+ assert res.sdist is None
+
+ # Compare str path to work around pathlib/pathlib2 mismatch on Py 3.5
+ assert [str(p) for p in (td / 'dist').iterdir()] == [str(res.wheel.file)]
+
+def test_build_ns_main(copy_sample):
+ td = copy_sample('ns1-pkg')
+ (td / '.git').mkdir() # Fake a git repo
+
+ with MockCommand('git', LIST_FILES_TEMPLATE.format(
+ python=sys.executable, module='ns1/pkg/__init__.py')):
+ res = build.main(td / 'pyproject.toml')
+ assert res.wheel.file.suffix == '.whl'
+ assert res.sdist.file.name.endswith('.tar.gz')
+
+ assert_isdir(td / 'dist')
+
+
+def test_build_module_no_docstring():
+ with TemporaryDirectory() as td:
+ pyproject = Path(td, 'pyproject.toml')
+ shutil.copy(str(samples_dir / 'no_docstring-pkg.toml'), str(pyproject))
+ shutil.copy(str(samples_dir / 'no_docstring.py'), td)
+ shutil.copy(str(samples_dir / 'EG_README.rst'), td)
+ Path(td, '.git').mkdir() # Fake a git repo
+
+
+ with MockCommand('git', LIST_FILES_TEMPLATE.format(
+ python=sys.executable, module='no_docstring.py')):
+ with pytest.raises(common.NoDocstringError) as exc_info:
+ build.main(pyproject)
+ assert 'no_docstring.py' in str(exc_info.value)
diff --git a/tests/test_command.py b/tests/test_command.py
new file mode 100644
index 0000000..1cec17d
--- /dev/null
+++ b/tests/test_command.py
@@ -0,0 +1,13 @@
+from subprocess import Popen, PIPE, STDOUT
+import sys
+
+def test_flit_help():
+ p = Popen([sys.executable, '-m', 'flit', '--help'], stdout=PIPE, stderr=STDOUT)
+ out, _ = p.communicate()
+ assert 'Build wheel' in out.decode('utf-8', 'replace')
+
+def test_flit_usage():
+ p = Popen([sys.executable, '-m', 'flit'], stdout=PIPE, stderr=STDOUT)
+ out, _ = p.communicate()
+ assert 'Build wheel' in out.decode('utf-8', 'replace')
+ assert p.poll() == 1
diff --git a/tests/test_config.py b/tests/test_config.py
new file mode 100644
index 0000000..214cd17
--- /dev/null
+++ b/tests/test_config.py
@@ -0,0 +1,10 @@
+from pathlib import Path
+import pytest
+
+from flit.config import read_flit_config, ConfigError
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_invalid_classifier():
+ with pytest.raises(ConfigError):
+ read_flit_config(samples_dir / 'invalid_classifier.toml')
diff --git a/tests/test_find_python_executable.py b/tests/test_find_python_executable.py
new file mode 100644
index 0000000..161dc7a
--- /dev/null
+++ b/tests/test_find_python_executable.py
@@ -0,0 +1,30 @@
+import os
+import re
+import sys
+
+import pytest
+
+from flit import PythonNotFoundError, find_python_executable
+
+
+def test_default():
+ assert find_python_executable(None) == sys.executable
+
+
+def test_self():
+ assert find_python_executable(sys.executable) == sys.executable
+
+
+def test_abs():
+ assert find_python_executable("/usr/bin/python") == "/usr/bin/python"
+
+
+def test_find_in_path():
+ assert os.path.isabs(find_python_executable("python"))
+
+
+@pytest.mark.parametrize("bad_python_name", ["pyhton", "ls", "."])
+def test_exception(bad_python_name: str):
+ """Test that an appropriate exception (that contains the error string) is raised."""
+ with pytest.raises(PythonNotFoundError, match=re.escape(bad_python_name)):
+ find_python_executable(bad_python_name)
diff --git a/tests/test_init.py b/tests/test_init.py
new file mode 100644
index 0000000..832343f
--- /dev/null
+++ b/tests/test_init.py
@@ -0,0 +1,255 @@
+import builtins
+from contextlib import contextmanager
+from pathlib import Path
+from tempfile import TemporaryDirectory
+from testpath import assert_isfile
+from unittest.mock import patch
+import pytest
+
+try:
+ import tomllib
+except ImportError:
+ import tomli as tomllib
+
+from flit import init
+
+
+@contextmanager
+def patch_data_dir():
+ with TemporaryDirectory() as td:
+ with patch.object(init, 'get_data_dir', lambda: Path(td)):
+ yield td
+
+def test_store_defaults():
+ with patch_data_dir():
+ assert init.get_defaults() == {}
+ d = {'author': 'Test'}
+ init.store_defaults(d)
+ assert init.get_defaults() == d
+
+def fake_input(entries):
+ it = iter(entries)
+ def inner(prompt):
+ try:
+ return next(it)
+ except StopIteration:
+ raise EOFError
+
+ return inner
+
+def faking_input(entries):
+ return patch.object(builtins, 'input', fake_input(entries))
+
+def test_prompt_options():
+ ti = init.TerminalIniter()
+ with faking_input(['4', '1']):
+ res = ti.prompt_options('Pick one', [('A', 'Apple'), ('B', 'Banana')])
+ assert res == 'A'
+
+ # Test with a default
+ with faking_input(['']):
+ res = ti.prompt_options('Pick one', [('A', 'Apple'), ('B', 'Banana')],
+ default='B')
+ assert res == 'B'
+
+@contextmanager
+def make_dir(files=(), dirs=()):
+ with TemporaryDirectory() as td:
+ tdp = Path(td)
+ for d in dirs:
+ (tdp / d).mkdir()
+ for f in files:
+ (tdp / f).touch()
+ yield td
+
+def test_guess_module_name():
+ with make_dir(['foo.py', 'foo-bar.py', 'test_foo.py', 'setup.py']) as td:
+ ib = init.IniterBase(td)
+ assert ib.guess_module_name() == 'foo'
+
+ with make_dir(['baz/__init__.py', 'tests/__init__.py'], ['baz', 'tests']) as td:
+ ib = init.IniterBase(td)
+ assert ib.guess_module_name() == 'baz'
+
+ with make_dir(['src/foo.py', 'src/foo-bar.py', 'test_foo.py', 'setup.py'],
+ ['src',]) as td:
+ ib = init.IniterBase(td)
+ assert ib.guess_module_name() == 'foo'
+
+ with make_dir(['src/baz/__init__.py', 'tests/__init__.py'], ['src', 'src/baz', 'tests']) as td:
+ ib = init.IniterBase(td)
+ assert ib.guess_module_name() == 'baz'
+
+ with make_dir(['foo.py', 'bar.py']) as td:
+ ib = init.IniterBase(td)
+ assert ib.guess_module_name() is None
+
+ with make_dir(['src/foo.py', 'src/bar.py'], ['src']) as td:
+ ib = init.IniterBase(td)
+ assert ib.guess_module_name() is None
+
+def test_write_license():
+ with TemporaryDirectory() as td:
+ ib = init.IniterBase(td)
+ ib.write_license('mit', 'Thomas Kluyver')
+ assert_isfile(Path(td, 'LICENSE'))
+
+def test_init():
+ responses = ['foo', # Module name
+ 'Test Author', # Author
+ 'test@example.com', # Author email
+ 'http://example.com/', # Home page
+ '1' # License (1 -> MIT)
+ ]
+ with TemporaryDirectory() as td, \
+ patch_data_dir(), \
+ faking_input(responses):
+ ti = init.TerminalIniter(td)
+ ti.initialise()
+
+ generated = Path(td) / 'pyproject.toml'
+ assert_isfile(generated)
+ with generated.open('rb') as f:
+ data = tomllib.load(f)
+ assert data['project']['authors'][0]['email'] == "test@example.com"
+ license = Path(td) / 'LICENSE'
+ assert data['project']['license']['file'] == 'LICENSE'
+ assert_isfile(license)
+ with license.open() as f:
+ license_text = f.read()
+ assert license_text.startswith("The MIT License (MIT)")
+ assert "{year}" not in license_text
+ assert "Test Author" in license_text
+
+def test_init_homepage_and_license_are_optional():
+ responses = ['test_module_name',
+ 'Test Author',
+ 'test_email@example.com',
+ '', # Home page omitted
+ '4', # Skip - choose a license later
+ ]
+ with TemporaryDirectory() as td, \
+ patch_data_dir(), \
+ faking_input(responses):
+ ti = init.TerminalIniter(td)
+ ti.initialise()
+ with Path(td, 'pyproject.toml').open('rb') as f:
+ data = tomllib.load(f)
+ assert not Path(td, 'LICENSE').exists()
+ assert data['project'] == {
+ 'authors': [{'name': 'Test Author', 'email': 'test_email@example.com'}],
+ 'name': 'test_module_name',
+ 'dynamic': ['version', 'description'],
+ }
+
+def test_init_homepage_validator():
+ responses = ['test_module_name',
+ 'Test Author',
+ 'test_email@example.com',
+ 'www.uh-oh-spagghetti-o.com', # fails validation
+ 'https://www.example.org', # passes
+ '4', # Skip - choose a license later
+ ]
+ with TemporaryDirectory() as td, \
+ patch_data_dir(), \
+ faking_input(responses):
+ ti = init.TerminalIniter(td)
+ ti.initialise()
+ with Path(td, 'pyproject.toml').open('rb') as f:
+ data = tomllib.load(f)
+ assert data['project'] == {
+ 'authors': [{'name': 'Test Author', 'email': 'test_email@example.com'}],
+ 'name': 'test_module_name',
+ 'urls': {'Home': 'https://www.example.org'},
+ 'dynamic': ['version', 'description'],
+ }
+
+def test_author_email_field_is_optional():
+ responses = ['test_module_name',
+ 'Test Author',
+ '', # Author-email field is skipped
+ 'https://www.example.org',
+ '4',
+ ]
+ with TemporaryDirectory() as td, \
+ patch_data_dir(), \
+ faking_input(responses):
+ ti = init.TerminalIniter(td)
+ ti.initialise()
+ with Path(td, 'pyproject.toml').open('rb') as f:
+ data = tomllib.load(f)
+ assert not Path(td, 'LICENSE').exists()
+
+ assert data['project'] == {
+ 'authors': [{'name': 'Test Author'}],
+ 'name': 'test_module_name',
+ 'urls': {'Home': 'https://www.example.org'},
+ 'dynamic': ['version', 'description'],
+ }
+
+
+@pytest.mark.parametrize(
+ "readme_file",
+ ["readme.md", "README.MD", "README.md",
+ "Readme.md", "readme.MD", "readme.rst",
+ "readme.txt"])
+def test_find_readme(readme_file):
+ with make_dir([readme_file]) as td:
+ ib = init.IniterBase(td)
+ assert ib.find_readme() == readme_file
+
+
+def test_find_readme_not_found():
+ with make_dir() as td:
+ ib = init.IniterBase(td)
+ assert ib.find_readme() is None
+
+
+def test_init_readme_found_yes_choosen():
+ responses = ['test_module_name',
+ 'Test Author',
+ 'test_email@example.com',
+ '', # Home page omitted
+ '4', # Skip - choose a license later
+ ]
+ with make_dir(["readme.md"]) as td, \
+ patch_data_dir(), \
+ faking_input(responses):
+ ti = init.TerminalIniter(td)
+ ti.initialise()
+ with Path(td, 'pyproject.toml').open('rb') as f:
+ data = tomllib.load(f)
+
+ assert data['project'] == {
+ 'authors': [{'name': 'Test Author', 'email': 'test_email@example.com'}],
+ 'name': 'test_module_name',
+ 'readme': 'readme.md',
+ 'dynamic': ['version', 'description'],
+ }
+
+
+def test_init_non_ascii_author_name():
+ responses = ['foo', # Module name
+ 'Test Authôr', # Author
+ '', # Author email omitted
+ '', # Home page omitted
+ '1' # License (1 -> MIT)
+ ]
+ with TemporaryDirectory() as td, \
+ patch_data_dir(), \
+ faking_input(responses):
+ ti = init.TerminalIniter(td)
+ ti.initialise()
+
+ generated = Path(td) / 'pyproject.toml'
+ assert_isfile(generated)
+ with generated.open('r', encoding='utf-8') as f:
+ raw_text = f.read()
+ print(raw_text)
+ assert "Test Authôr" in raw_text
+ assert "\\u00f4" not in raw_text
+ license = Path(td) / 'LICENSE'
+ assert_isfile(license)
+ with license.open(encoding='utf-8') as f:
+ license_text = f.read()
+ assert "Test Authôr" in license_text
diff --git a/tests/test_install.py b/tests/test_install.py
new file mode 100644
index 0000000..b4e9068
--- /dev/null
+++ b/tests/test_install.py
@@ -0,0 +1,365 @@
+import json
+import os
+import pathlib
+import sys
+import tempfile
+from unittest import TestCase, SkipTest
+from unittest.mock import patch
+
+import pytest
+from testpath import (
+ assert_isfile, assert_isdir, assert_islink, assert_not_path_exists, MockCommand
+)
+
+from flit import install
+from flit.install import Installer, _requires_dist_to_pip_requirement, DependencyError
+import flit_core.tests
+
+samples_dir = pathlib.Path(__file__).parent / 'samples'
+core_samples_dir = pathlib.Path(flit_core.tests.__file__).parent / 'samples'
+
+class InstallTests(TestCase):
+ def setUp(self):
+ td = tempfile.TemporaryDirectory()
+ self.addCleanup(td.cleanup)
+ self.get_dirs_patch = patch('flit.install.get_dirs',
+ return_value={
+ 'scripts': os.path.join(td.name, 'scripts'),
+ 'purelib': os.path.join(td.name, 'site-packages'),
+ 'data': os.path.join(td.name, 'data'),
+ })
+ self.get_dirs_patch.start()
+ self.tmpdir = pathlib.Path(td.name)
+
+ def tearDown(self):
+ self.get_dirs_patch.stop()
+
+ def _assert_direct_url(self, directory, package, version, expected_editable):
+ direct_url_file = (
+ self.tmpdir
+ / 'site-packages'
+ / '{}-{}.dist-info'.format(package, version)
+ / 'direct_url.json'
+ )
+ assert_isfile(direct_url_file)
+ with direct_url_file.open() as f:
+ direct_url = json.load(f)
+ assert direct_url['url'].startswith('file:///')
+ assert direct_url['url'] == directory.as_uri()
+ assert direct_url['dir_info'].get('editable') is expected_editable
+
+ def test_install_module(self):
+ Installer.from_ini_path(samples_dir / 'module1_toml' / 'pyproject.toml').install_directly()
+ assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
+ assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')
+ self._assert_direct_url(
+ samples_dir / 'module1_toml', 'module1', '0.1', expected_editable=False
+ )
+
+ def test_install_module_pep621(self):
+ Installer.from_ini_path(
+ core_samples_dir / 'pep621_nodynamic' / 'pyproject.toml',
+ ).install_directly()
+ assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
+ assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.3.dist-info')
+ self._assert_direct_url(
+ core_samples_dir / 'pep621_nodynamic', 'module1', '0.3',
+ expected_editable=False
+ )
+
+ def test_install_package(self):
+ oldcwd = os.getcwd()
+ os.chdir(str(samples_dir / 'package1'))
+ try:
+ Installer.from_ini_path(pathlib.Path('pyproject.toml')).install_directly()
+ finally:
+ os.chdir(oldcwd)
+ assert_isdir(self.tmpdir / 'site-packages' / 'package1')
+ assert_isdir(self.tmpdir / 'site-packages' / 'package1-0.1.dist-info')
+ assert_isfile(self.tmpdir / 'scripts' / 'pkg_script')
+ with (self.tmpdir / 'scripts' / 'pkg_script').open() as f:
+ assert f.readline().strip() == "#!" + sys.executable
+ self._assert_direct_url(
+ samples_dir / 'package1', 'package1', '0.1', expected_editable=False
+ )
+
+ def test_install_module_in_src(self):
+ oldcwd = os.getcwd()
+ os.chdir(samples_dir / 'packageinsrc')
+ try:
+ Installer.from_ini_path(pathlib.Path('pyproject.toml')).install_directly()
+ finally:
+ os.chdir(oldcwd)
+ assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
+ assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')
+
+ def test_install_ns_package_native(self):
+ Installer.from_ini_path(samples_dir / 'ns1-pkg' / 'pyproject.toml').install_directly()
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1')
+ assert_isfile(self.tmpdir / 'site-packages' / 'ns1' / 'pkg' / '__init__.py')
+ assert_not_path_exists(self.tmpdir / 'site-packages' / 'ns1' / '__init__.py')
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1_pkg-0.1.dist-info')
+
+ def test_install_ns_package_module_native(self):
+ Installer.from_ini_path(samples_dir / 'ns1-pkg-mod' / 'pyproject.toml').install_directly()
+ assert_isfile(self.tmpdir / 'site-packages' / 'ns1' / 'module.py')
+ assert_not_path_exists(self.tmpdir / 'site-packages' / 'ns1' / '__init__.py')
+
+ def test_install_ns_package_native_symlink(self):
+ if os.name == 'nt':
+ raise SkipTest('symlink')
+ Installer.from_ini_path(
+ samples_dir / 'ns1-pkg' / 'pyproject.toml', symlink=True
+ ).install_directly()
+ Installer.from_ini_path(
+ samples_dir / 'ns1-pkg2' / 'pyproject.toml', symlink=True
+ ).install_directly()
+ Installer.from_ini_path(
+ samples_dir / 'ns1-pkg-mod' / 'pyproject.toml', symlink=True
+ ).install_directly()
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1')
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1' / 'pkg')
+ assert_islink(self.tmpdir / 'site-packages' / 'ns1' / 'pkg',
+ to=str(samples_dir / 'ns1-pkg' / 'ns1' / 'pkg'))
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1_pkg-0.1.dist-info')
+
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1' / 'pkg2')
+ assert_islink(self.tmpdir / 'site-packages' / 'ns1' / 'pkg2',
+ to=str(samples_dir / 'ns1-pkg2' / 'ns1' / 'pkg2'))
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1_pkg2-0.1.dist-info')
+
+ assert_islink(self.tmpdir / 'site-packages' / 'ns1' / 'module.py',
+ to=samples_dir / 'ns1-pkg-mod' / 'ns1' / 'module.py')
+ assert_isdir(self.tmpdir / 'site-packages' / 'ns1_module-0.1.dist-info')
+
+ def test_install_ns_package_pth_file(self):
+ Installer.from_ini_path(
+ samples_dir / 'ns1-pkg' / 'pyproject.toml', pth=True
+ ).install_directly()
+
+ pth_file = self.tmpdir / 'site-packages' / 'ns1.pkg.pth'
+ assert_isfile(pth_file)
+ assert pth_file.read_text('utf-8').strip() == str(samples_dir / 'ns1-pkg')
+
+ def test_symlink_package(self):
+ if os.name == 'nt':
+ raise SkipTest("symlink")
+ Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', symlink=True).install()
+ assert_islink(self.tmpdir / 'site-packages' / 'package1',
+ to=samples_dir / 'package1' / 'package1')
+ assert_isfile(self.tmpdir / 'scripts' / 'pkg_script')
+ with (self.tmpdir / 'scripts' / 'pkg_script').open() as f:
+ assert f.readline().strip() == "#!" + sys.executable
+ self._assert_direct_url(
+ samples_dir / 'package1', 'package1', '0.1', expected_editable=True
+ )
+
+ def test_symlink_module_pep621(self):
+ if os.name == 'nt':
+ raise SkipTest("symlink")
+ Installer.from_ini_path(
+ core_samples_dir / 'pep621_nodynamic' / 'pyproject.toml', symlink=True
+ ).install_directly()
+ assert_islink(self.tmpdir / 'site-packages' / 'module1.py',
+ to=core_samples_dir / 'pep621_nodynamic' / 'module1.py')
+ assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.3.dist-info')
+ self._assert_direct_url(
+ core_samples_dir / 'pep621_nodynamic', 'module1', '0.3',
+ expected_editable=True
+ )
+
+ def test_symlink_module_in_src(self):
+ if os.name == 'nt':
+ raise SkipTest("symlink")
+ oldcwd = os.getcwd()
+ os.chdir(samples_dir / 'packageinsrc')
+ try:
+ Installer.from_ini_path(
+ pathlib.Path('pyproject.toml'), symlink=True
+ ).install_directly()
+ finally:
+ os.chdir(oldcwd)
+ assert_islink(self.tmpdir / 'site-packages' / 'module1.py',
+ to=(samples_dir / 'packageinsrc' / 'src' / 'module1.py'))
+ assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')
+
+ def test_pth_package(self):
+ Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', pth=True).install()
+ assert_isfile(self.tmpdir / 'site-packages' / 'package1.pth')
+ with open(str(self.tmpdir / 'site-packages' / 'package1.pth')) as f:
+ assert f.read() == str(samples_dir / 'package1')
+ assert_isfile(self.tmpdir / 'scripts' / 'pkg_script')
+ self._assert_direct_url(
+ samples_dir / 'package1', 'package1', '0.1', expected_editable=True
+ )
+
+ def test_pth_module_in_src(self):
+ oldcwd = os.getcwd()
+ os.chdir(samples_dir / 'packageinsrc')
+ try:
+ Installer.from_ini_path(
+ pathlib.Path('pyproject.toml'), pth=True
+ ).install_directly()
+ finally:
+ os.chdir(oldcwd)
+ pth_path = self.tmpdir / 'site-packages' / 'module1.pth'
+ assert_isfile(pth_path)
+ assert pth_path.read_text('utf-8').strip() == str(
+ samples_dir / 'packageinsrc' / 'src'
+ )
+ assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')
+
+ def test_dist_name(self):
+ Installer.from_ini_path(samples_dir / 'altdistname' / 'pyproject.toml').install_directly()
+ assert_isdir(self.tmpdir / 'site-packages' / 'package1')
+ assert_isdir(self.tmpdir / 'site-packages' / 'package_dist1-0.1.dist-info')
+
+ def test_entry_points(self):
+ Installer.from_ini_path(samples_dir / 'entrypoints_valid' / 'pyproject.toml').install_directly()
+ assert_isfile(self.tmpdir / 'site-packages' / 'package1-0.1.dist-info' / 'entry_points.txt')
+
+ def test_pip_install(self):
+ ins = Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', python='mock_python',
+ user=False)
+
+ with MockCommand('mock_python') as mock_py:
+ ins.install()
+
+ calls = mock_py.get_calls()
+ assert len(calls) == 1
+ cmd = calls[0]['argv']
+ assert cmd[1:4] == ['-m', 'pip', 'install']
+ assert cmd[4].endswith('package1')
+
+ def test_symlink_other_python(self):
+ if os.name == 'nt':
+ raise SkipTest('symlink')
+ (self.tmpdir / 'site-packages2').mkdir()
+ (self.tmpdir / 'scripts2').mkdir()
+
+ # Called by Installer._auto_user() :
+ script1 = ("#!{python}\n"
+ "import sysconfig\n"
+ "print(True)\n" # site.ENABLE_USER_SITE
+ "print({purelib!r})" # sysconfig.get_path('purelib')
+ ).format(python=sys.executable,
+ purelib=str(self.tmpdir / 'site-packages2'))
+
+ # Called by Installer._get_dirs() :
+ script2 = ("#!{python}\n"
+ "import json, sys\n"
+ "json.dump({{'purelib': {purelib!r}, 'scripts': {scripts!r}, 'data': {data!r} }}, "
+ "sys.stdout)"
+ ).format(python=sys.executable,
+ purelib=str(self.tmpdir / 'site-packages2'),
+ scripts=str(self.tmpdir / 'scripts2'),
+ data=str(self.tmpdir / 'data'),
+ )
+
+ with MockCommand('mock_python', content=script1):
+ ins = Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', python='mock_python',
+ symlink=True)
+ with MockCommand('mock_python', content=script2):
+ ins.install()
+
+ assert_islink(self.tmpdir / 'site-packages2' / 'package1',
+ to=samples_dir / 'package1' / 'package1')
+ assert_isfile(self.tmpdir / 'scripts2' / 'pkg_script')
+ with (self.tmpdir / 'scripts2' / 'pkg_script').open() as f:
+ assert f.readline().strip() == "#!mock_python"
+
+ def test_install_requires(self):
+ ins = Installer.from_ini_path(samples_dir / 'requires-requests.toml',
+ user=False, python='mock_python')
+
+ with MockCommand('mock_python') as mockpy:
+ ins.install_requirements()
+ calls = mockpy.get_calls()
+ assert len(calls) == 1
+ assert calls[0]['argv'][1:5] == ['-m', 'pip', 'install', '-r']
+
+ def test_install_reqs_my_python_if_needed_pep621(self):
+ ins = Installer.from_ini_path(
+ core_samples_dir / 'pep621_nodynamic' / 'pyproject.toml',
+ deps='none',
+ )
+
+ # This shouldn't try to get version & docstring from the module
+ ins.install_reqs_my_python_if_needed()
+
+ def test_extras_error(self):
+ with pytest.raises(DependencyError):
+ Installer.from_ini_path(samples_dir / 'requires-requests.toml',
+ user=False, deps='none', extras='dev')
+
+ def test_install_data_dir(self):
+ Installer.from_ini_path(
+ core_samples_dir / 'with_data_dir' / 'pyproject.toml',
+ ).install_directly()
+ assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
+ assert_isfile(self.tmpdir / 'data' / 'share' / 'man' / 'man1' / 'foo.1')
+
+ def test_symlink_data_dir(self):
+ if os.name == 'nt':
+ raise SkipTest("symlink")
+ Installer.from_ini_path(
+ core_samples_dir / 'with_data_dir' / 'pyproject.toml', symlink=True
+ ).install_directly()
+ assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
+ assert_islink(
+ self.tmpdir / 'data' / 'share' / 'man' / 'man1' / 'foo.1',
+ to=core_samples_dir / 'with_data_dir' / 'data' / 'share' / 'man' / 'man1' / 'foo.1'
+ )
+
+@pytest.mark.parametrize(('deps', 'extras', 'installed'), [
+ ('none', [], set()),
+ ('develop', [], {'pytest ;', 'toml ;'}),
+ ('production', [], {'toml ;'}),
+ ('all', [], {'toml ;', 'pytest ;', 'requests ;'}),
+])
+def test_install_requires_extra(deps, extras, installed):
+ it = InstallTests()
+ try:
+ it.setUp()
+ ins = Installer.from_ini_path(samples_dir / 'extras' / 'pyproject.toml', python='mock_python',
+ user=False, deps=deps, extras=extras)
+
+ cmd = MockCommand('mock_python')
+ get_reqs = (
+ "#!{python}\n"
+ "import sys\n"
+ "with open({recording_file!r}, 'wb') as w, open(sys.argv[-1], 'rb') as r:\n"
+ " w.write(r.read())"
+ ).format(python=sys.executable, recording_file=cmd.recording_file)
+ cmd.content = get_reqs
+
+ with cmd as mock_py:
+ ins.install_requirements()
+ with open(mock_py.recording_file) as f:
+ str_deps = f.read()
+ deps = str_deps.split('\n') if str_deps else []
+
+ assert set(deps) == installed
+ finally:
+ it.tearDown()
+
+def test_requires_dist_to_pip_requirement():
+ rd = 'pathlib2 (>=2.3); python_version == "2.7"'
+ assert _requires_dist_to_pip_requirement(rd) == \
+ 'pathlib2>=2.3 ; python_version == "2.7"'
+
+def test_test_writable_dir_win():
+ with tempfile.TemporaryDirectory() as td:
+ assert install._test_writable_dir_win(td) is True
+
+ # Ironically, I don't know how to make a non-writable dir on Windows,
+ # so although the functionality is for Windows, the test is for Posix
+ if os.name != 'posix':
+ return
+
+ # Remove write permissions from the directory
+ os.chmod(td, 0o444)
+ try:
+ assert install._test_writable_dir_win(td) is False
+ finally:
+ os.chmod(td, 0o644)
diff --git a/tests/test_sdist.py b/tests/test_sdist.py
new file mode 100644
index 0000000..0ddcb82
--- /dev/null
+++ b/tests/test_sdist.py
@@ -0,0 +1,152 @@
+import ast
+from os.path import join as pjoin
+from pathlib import Path
+import pytest
+from shutil import which, copy, copytree
+import sys
+import tarfile
+from tempfile import TemporaryDirectory
+from testpath import assert_isfile, MockCommand
+
+from flit import sdist, common
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_auto_packages():
+ module = common.Module('package1', samples_dir / 'package1')
+ packages, pkg_data = sdist.auto_packages(module)
+ assert packages == ['package1', 'package1.subpkg', 'package1.subpkg2']
+ assert pkg_data == {'': ['*'],
+ 'package1': ['data_dir/*'],
+ 'package1.subpkg': ['sp_data_dir/*'],
+ }
+
+def test_make_sdist():
+ # Smoke test of making a complete sdist
+ if not which('git'):
+ pytest.skip("requires git")
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml')
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ builder.build(td)
+ sdist_file = td / 'package1-0.1.tar.gz'
+ assert_isfile(sdist_file)
+
+ with tarfile.open(str(sdist_file)) as tf:
+ assert 'package1-0.1/setup.py' in tf.getnames()
+
+
+def test_sdist_no_setup_py():
+ # Smoke test of making a complete sdist
+ if not which('git'):
+ pytest.skip("requires git")
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml')
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ builder.build(td, gen_setup_py=False)
+ sdist_file = td / 'package1-0.1.tar.gz'
+ assert_isfile(sdist_file)
+
+ with tarfile.open(str(sdist_file)) as tf:
+ assert 'package1-0.1/setup.py' not in tf.getnames()
+
+
+LIST_FILES = """\
+#!{python}
+import sys
+from os.path import join
+if '--deleted' not in sys.argv:
+ files = [
+ 'foo',
+ join('dir1', 'bar'),
+ join('dir1', 'subdir', 'qux'),
+ join('dir2', 'abc'),
+ join('dist', 'def'),
+ ]
+ mode = '{vcs}'
+ if mode == 'git':
+ print('\\0'.join(files), end='\\0')
+ elif mode == 'hg':
+ for f in files:
+ print(f)
+"""
+
+LIST_FILES_GIT = LIST_FILES.format(python=sys.executable, vcs='git')
+LIST_FILES_HG = LIST_FILES.format(python=sys.executable, vcs='hg')
+
+
+def test_get_files_list_git(copy_sample):
+ td = copy_sample('module1_toml')
+ (td / '.git').mkdir()
+
+ builder = sdist.SdistBuilder.from_ini_path(td / 'pyproject.toml')
+ with MockCommand('git', LIST_FILES_GIT):
+ files = builder.select_files()
+
+ assert set(files) == {
+ 'foo', pjoin('dir1', 'bar'), pjoin('dir1', 'subdir', 'qux'),
+ pjoin('dir2', 'abc')
+ }
+
+def test_get_files_list_hg(tmp_path):
+ dir1 = tmp_path / 'dir1'
+ copytree(str(samples_dir / 'module1_toml'), str(dir1))
+ (tmp_path / '.hg').mkdir()
+ builder = sdist.SdistBuilder.from_ini_path(dir1 / 'pyproject.toml')
+ with MockCommand('hg', LIST_FILES_HG):
+ files = builder.select_files()
+
+ assert set(files) == {
+ 'bar', pjoin('subdir', 'qux')
+ }
+
+def get_setup_assigns(setup):
+ """Parse setup.py, execute assignments, return the namespace"""
+ setup_ast = ast.parse(setup)
+ # Select only assignment statements
+ setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
+ ns = {}
+ exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
+ return ns
+
+def test_make_setup_py():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml')
+ ns = get_setup_assigns(builder.make_setup_py())
+ assert ns['packages'] == ['package1', 'package1.subpkg', 'package1.subpkg2']
+ assert 'install_requires' not in ns
+ assert ns['entry_points'] == \
+ {'console_scripts': ['pkg_script = package1:main']}
+
+def test_make_setup_py_reqs():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'extras' / 'pyproject.toml')
+ ns = get_setup_assigns(builder.make_setup_py())
+ assert ns['install_requires'] == ['toml']
+ assert ns['extras_require'] == {'test': ['pytest'], 'custom': ['requests']}
+
+def test_make_setup_py_reqs_envmark():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'requires-envmark' / 'pyproject.toml')
+ ns = get_setup_assigns(builder.make_setup_py())
+ assert ns['install_requires'] == ['requests']
+ assert ns['extras_require'] == {":python_version == '2.7'": ['pathlib2']}
+
+def test_make_setup_py_reqs_extra_envmark():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'requires-extra-envmark' / 'pyproject.toml')
+ ns = get_setup_assigns(builder.make_setup_py())
+ assert ns['extras_require'] == {'test:python_version == "2.7"': ['pathlib2']}
+
+def test_make_setup_py_package_dir_src():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'packageinsrc' / 'pyproject.toml')
+ ns = get_setup_assigns(builder.make_setup_py())
+ assert ns['package_dir'] == {'': 'src'}
+
+def test_make_setup_py_ns_pkg():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'ns1-pkg' / 'pyproject.toml')
+ setup = builder.make_setup_py()
+ ns = get_setup_assigns(setup)
+ assert ns['packages'] == ['ns1', 'ns1.pkg']
+
+def test_make_setup_py_ns_pkg_mod():
+ builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'ns1-pkg-mod' / 'pyproject.toml')
+ setup = builder.make_setup_py()
+ ns = get_setup_assigns(setup)
+ assert ns['packages'] == ['ns1']
diff --git a/tests/test_tomlify.py b/tests/test_tomlify.py
new file mode 100644
index 0000000..65e6178
--- /dev/null
+++ b/tests/test_tomlify.py
@@ -0,0 +1,32 @@
+import os
+from pathlib import Path
+try:
+ import tomllib
+except ImportError:
+ import tomli as tomllib
+from shutil import copy
+from testpath import assert_isfile
+
+from flit import tomlify
+
+samples_dir = Path(__file__).parent / 'samples'
+
+def test_tomlify(copy_sample, monkeypatch):
+ td = copy_sample('with_flit_ini')
+ monkeypatch.chdir(td)
+
+ tomlify.main(argv=[])
+
+ pyproject_toml = (td / 'pyproject.toml')
+ assert_isfile(pyproject_toml)
+
+ with pyproject_toml.open('rb') as f:
+ content = tomllib.load(f)
+
+ assert 'build-system' in content
+ assert 'tool' in content
+ assert 'flit' in content['tool']
+ flit_table = content['tool']['flit']
+ assert 'metadata' in flit_table
+ assert 'scripts' in flit_table
+ assert 'entrypoints' in flit_table
diff --git a/tests/test_upload.py b/tests/test_upload.py
new file mode 100644
index 0000000..73ede36
--- /dev/null
+++ b/tests/test_upload.py
@@ -0,0 +1,168 @@
+from contextlib import contextmanager
+from tempfile import NamedTemporaryFile
+import os
+import io
+import pathlib
+import sys
+
+import pytest
+import responses
+from testpath import modified_env
+from unittest.mock import patch
+
+from flit import upload
+from flit.build import ALL_FORMATS
+
+samples_dir = pathlib.Path(__file__).parent / 'samples'
+
+repo_settings = {'url': upload.PYPI,
+ 'username': 'user',
+ 'password': 'pw',
+ 'is_warehouse': True,
+ }
+
+pypirc1 = """
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username: fred
+password: s3cret
+"""
+# That's not a real password. Well, hopefully not.
+
+@contextmanager
+def temp_pypirc(content):
+ try:
+ temp_file = NamedTemporaryFile("w+", delete=False)
+ temp_file.write(content)
+ temp_file.close()
+ yield temp_file.name
+ finally:
+ os.unlink(temp_file.name)
+
+
+@responses.activate
+def test_upload(copy_sample):
+ responses.add(responses.POST, upload.PYPI, status=200)
+ td = copy_sample('module1_toml')
+
+ with temp_pypirc(pypirc1) as pypirc, \
+ patch('flit.upload.get_repository', return_value=repo_settings):
+ upload.main(td / 'pyproject.toml', repo_name='pypi', pypirc_path=pypirc)
+
+ assert len(responses.calls) == 2
+
+def test_get_repository():
+ with temp_pypirc(pypirc1) as pypirc:
+ repo = upload.get_repository(pypirc_path=pypirc)
+ assert repo['url'] == upload.PYPI
+ assert repo['username'] == 'fred'
+ assert repo['password'] == 's3cret'
+
+def test_get_repository_env():
+ with temp_pypirc(pypirc1) as pypirc, \
+ modified_env({
+ 'FLIT_INDEX_URL': 'https://pypi.example.com',
+ 'FLIT_USERNAME': 'alice',
+ 'FLIT_PASSWORD': 'p4ssword', # Also not a real password
+ }):
+ repo = upload.get_repository(pypirc_path=pypirc)
+ # Because we haven't specified a repo name, environment variables should
+ # have higher priority than the config file.
+ assert repo['url'] == 'https://pypi.example.com'
+ assert repo['username'] == 'alice'
+ assert repo['password'] == 'p4ssword'
+
+@contextmanager
+def _fake_keyring(pw):
+ class FakeKeyring:
+ @staticmethod
+ def get_password(service_name, username):
+ return pw
+
+ class FakeKeyringErrMod:
+ class KeyringError(Exception):
+ pass
+
+ with patch.dict('sys.modules', {
+ 'keyring': FakeKeyring(), 'keyring.errors': FakeKeyringErrMod(),
+ }):
+ yield
+
+pypirc2 = """
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username: fred
+"""
+
+def test_get_repository_keyring():
+ with modified_env({'FLIT_PASSWORD': None}), \
+ _fake_keyring('tops3cret'):
+ repo = upload.get_repository(pypirc_path=io.StringIO(pypirc2))
+
+ assert repo['username'] == 'fred'
+ assert repo['password'] == 'tops3cret'
+
+
+pypirc3_repo = "https://invalid-repo.inv"
+pypirc3_user = "test"
+pypirc3_pass = "not_a_real_password"
+pypirc3 = f"""
+[distutils] =
+index-servers =
+ test123
+
+[test123]
+repository: {pypirc3_repo}
+username: {pypirc3_user}
+password: {pypirc3_pass}
+"""
+
+
+def test_upload_pypirc_file(copy_sample):
+ with temp_pypirc(pypirc3) as pypirc, \
+ patch("flit.upload.upload_file") as upload_file:
+ td = copy_sample("module1_toml")
+ formats = list(ALL_FORMATS)[:1]
+ upload.main(
+ td / "pyproject.toml",
+ formats=set(formats),
+ repo_name="test123",
+ pypirc_path=pypirc,
+ )
+ _, _, repo = upload_file.call_args[0]
+
+ assert repo["url"] == pypirc3_repo
+ assert repo["username"] == pypirc3_user
+ assert repo["password"] == pypirc3_pass
+
+
+def test_upload_invalid_pypirc_file(copy_sample):
+ with patch("flit.upload.upload_file"):
+ td = copy_sample("module1_toml")
+ formats = list(ALL_FORMATS)[:1]
+ with pytest.raises(FileNotFoundError):
+ upload.main(
+ td / "pyproject.toml",
+ formats=set(formats),
+ repo_name="test123",
+ pypirc_path="./file.invalid",
+ )
+
+def test_upload_default_pypirc_file(copy_sample):
+ with patch("flit.upload.do_upload") as do_upload:
+ td = copy_sample("module1_toml")
+ formats = list(ALL_FORMATS)[:1]
+ upload.main(
+ td / "pyproject.toml",
+ formats=set(formats),
+ repo_name="test123",
+ )
+
+ file = do_upload.call_args[0][2]
+ assert file == "~/.pypirc"
diff --git a/tests/test_validate.py b/tests/test_validate.py
new file mode 100644
index 0000000..21b918c
--- /dev/null
+++ b/tests/test_validate.py
@@ -0,0 +1,243 @@
+import errno
+import pytest
+import responses
+
+from flit import validate as fv
+
+def test_validate_entrypoints():
+ assert fv.validate_entrypoints(
+ {'console_scripts': {'flit': 'flit:main'}}) == []
+ assert fv.validate_entrypoints(
+ {'some.group': {'flit': 'flit.buildapi'}}) == []
+
+ res = fv.validate_entrypoints({'some.group': {'flit': 'a:b:c'}})
+ assert len(res) == 1
+
+def test_validate_name():
+ def check(name):
+ return fv.validate_name({'name': name})
+
+ assert check('foo.bar_baz') == []
+ assert check('5minus6') == []
+
+ assert len(check('_foo')) == 1 # Must start with alphanumeric
+ assert len(check('foo.')) == 1 # Must end with alphanumeric
+ assert len(check('Bücher')) == 1 # ASCII only
+
+def test_validate_requires_python():
+ assert fv.validate_requires_python({}) == [] # Not required
+
+ def check(spec):
+ return fv.validate_requires_python({'requires_python': spec})
+
+ assert check('>=3') == []
+ assert check('>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*') == []
+
+ assert len(check('3')) == 1
+ assert len(check('@12')) == 1
+ assert len(check('>=2.7; !=3.0.*')) == 1 # Comma separated, not semicolon
+
+def test_validate_requires_dist():
+ assert fv.validate_requires_dist({}) == [] # Not required
+
+ def check(spec):
+ return fv.validate_requires_dist({'requires_dist': [spec]})
+
+ assert check('requests') == []
+ assert check('requests[extra-foo]') == []
+ assert check('requests (>=2.14)') == [] # parentheses allowed but not recommended
+ assert check('requests >=2.14') == []
+ assert check('pexpect; sys_platform == "win32"') == []
+ # Altogether now
+ assert check('requests[extra-foo] >=2.14; python_version < "3.0"') == []
+
+ # URL specifier
+ assert check('requests @ https://example.com/requests.tar.gz') == []
+ assert check(
+ 'requests @ https://example.com/requests.tar.gz ; python_version < "3.8"'
+ ) == []
+
+ # Problems
+ assert len(check('Bücher')) == 1
+ assert len(check('requests 2.14')) == 1
+ assert len(check('pexpect; sys.platform == "win32"')) == 1 # '.' -> '_'
+ assert len(check('requests >=2.14 @ https://example.com/requests.tar.gz')) == 1
+ # Several problems in one requirement
+ assert len(check('pexpect[_foo] =3; sys.platform == "win32"')) == 3
+
+def test_validate_environment_marker():
+ vem = fv.validate_environment_marker
+
+ assert vem('python_version >= "3" and os_name == \'posix\'') == []
+
+ res = vem('python_version >= "3') # Unclosed string
+ assert len(res) == 1
+ assert res[0].startswith("Invalid string")
+
+ res = vem('python_verson >= "3"') # Misspelled name
+ assert len(res) == 1
+ assert res[0].startswith("Invalid variable")
+
+ res = vem("os_name is 'posix'") # No 'is' comparisons
+ assert len(res) == 1
+ assert res[0].startswith("Invalid expression")
+
+ res = vem("'2' < python_version < '4'") # No chained comparisons
+ assert len(res) == 1
+ assert res[0].startswith("Invalid expression")
+
+ assert len(vem('os.name == "linux\'')) == 2
+
+def test_validate_url():
+ vurl = fv.validate_url
+ assert vurl("https://github.com/pypa/flit") == []
+
+ assert len(vurl("github.com/pypa/flit")) == 1
+ assert len(vurl("https://")) == 1
+
+
+def test_validate_project_urls():
+ vpu = fv.validate_project_urls
+
+ def check(prurl):
+ return vpu({'project_urls': [prurl]})
+ assert vpu({}) == [] # Not required
+ assert check('Documentation, https://flit.readthedocs.io/') == []
+
+ # Missing https://
+ assert len(check('Documentation, flit.readthedocs.io')) == 1
+ # Double comma
+ assert len(check('A, B, flit.readthedocs.io')) == 1
+ # No name
+ assert len(check(', https://flit.readthedocs.io/')) == 1
+ # Name longer than 32 chars
+ assert len(check('Supercalifragilisticexpialidocious, https://flit.readthedocs.io/')) == 1
+
+
+def test_read_classifiers_cached(monkeypatch, tmp_path):
+
+ def mock_get_cache_dir():
+ tmp_file = tmp_path / "classifiers.lst"
+ with tmp_file.open("w") as fh:
+ fh.write("A\nB\nC")
+ return tmp_path
+
+ monkeypatch.setattr(fv, "get_cache_dir", mock_get_cache_dir)
+
+ classifiers = fv._read_classifiers_cached()
+
+ assert classifiers == {'A', 'B', 'C'}
+
+
+@responses.activate
+def test_download_and_cache_classifiers(monkeypatch, tmp_path):
+ responses.add(
+ responses.GET,
+ 'https://pypi.org/pypi?%3Aaction=list_classifiers',
+ body="A\nB\nC")
+
+ def mock_get_cache_dir():
+ return tmp_path
+
+ monkeypatch.setattr(fv, "get_cache_dir", mock_get_cache_dir)
+
+ classifiers = fv._download_and_cache_classifiers()
+
+ assert classifiers == {"A", "B", "C"}
+
+
+def test_validate_classifiers_private(monkeypatch):
+ """
+ Test that `Private :: Do Not Upload` considered a valid classifier.
+ This is a special case because it is not listed in a trove classifier
+ but it is a way to make sure that a private package is not get uploaded
+ on PyPI by accident.
+
+ Implementation on PyPI side:
+ https://github.com/pypa/warehouse/pull/5440
+ Issue about officially documenting the trick:
+ https://github.com/pypa/packaging.python.org/issues/643
+ """
+ monkeypatch.setattr(fv, "_read_classifiers_cached", lambda: set())
+
+ actual = fv.validate_classifiers({'invalid'})
+ assert actual == ["Unrecognised classifier: 'invalid'"]
+
+ assert fv.validate_classifiers({'Private :: Do Not Upload'}) == []
+
+
+@responses.activate
+@pytest.mark.parametrize("error", [PermissionError, OSError(errno.EROFS, "")])
+def test_download_and_cache_classifiers_with_unacessible_dir(monkeypatch, error):
+ responses.add(
+ responses.GET,
+ 'https://pypi.org/pypi?%3Aaction=list_classifiers',
+ body="A\nB\nC")
+
+ class MockCacheDir:
+ def mkdir(self, parents):
+ raise error
+ def __truediv__(self, other):
+ raise error
+
+ monkeypatch.setattr(fv, "get_cache_dir", MockCacheDir)
+
+ classifiers = fv._download_and_cache_classifiers()
+
+ assert classifiers == {"A", "B", "C"}
+
+
+def test_verify_classifiers_valid_classifiers():
+ classifiers = {"A"}
+ valid_classifiers = {"A", "B"}
+
+ problems = fv._verify_classifiers(classifiers, valid_classifiers)
+
+ assert problems == []
+
+def test_verify_classifiers_invalid_classifiers():
+ classifiers = {"A", "B"}
+ valid_classifiers = {"A"}
+
+ problems = fv._verify_classifiers(classifiers, valid_classifiers)
+
+ assert problems == ["Unrecognised classifier: 'B'"]
+
+def test_validate_readme_rst():
+ metadata = {
+ 'description_content_type': 'text/x-rst',
+ 'description': "Invalid ``rst'",
+ }
+ problems = fv.validate_readme_rst(metadata)
+
+ assert len(problems) == 2 # 1 message that rst is invalid + 1 with details
+ assert "valid rst" in problems[0]
+
+ # Markdown should be ignored
+ metadata = {
+ 'description_content_type': 'text/markdown',
+ 'description': "Invalid `rst'",
+ }
+ problems = fv.validate_readme_rst(metadata)
+
+ assert problems == []
+
+RST_WITH_CODE = """
+Code snippet:
+
+.. code-block:: python
+
+ a = [i ** 2 for i in range(5)]
+"""
+
+def test_validate_readme_rst_code():
+ # Syntax highlighting shouldn't require pygments
+ metadata = {
+ 'description_content_type': 'text/x-rst',
+ 'description': RST_WITH_CODE,
+ }
+ problems = fv.validate_readme_rst(metadata)
+ for p in problems:
+ print(p)
+
+ assert problems == []
diff --git a/tests/test_vcs.py b/tests/test_vcs.py
new file mode 100644
index 0000000..4ed5ac3
--- /dev/null
+++ b/tests/test_vcs.py
@@ -0,0 +1,27 @@
+from contextlib import contextmanager
+import os
+from pathlib import Path
+from tempfile import TemporaryDirectory
+
+from flit import vcs
+
+@contextmanager
+def cwd(path):
+ if isinstance(path, Path):
+ path = str(path)
+ old_wd = os.getcwd()
+ os.chdir(path)
+ try:
+ yield
+ finally:
+ os.chdir(old_wd)
+
+def test_identify_git_parent():
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ (td / '.git').mkdir()
+ subdir = (td / 'subdir')
+ subdir.mkdir()
+ with cwd(subdir):
+ assert vcs.identify_vcs(Path('.')).name == 'git'
+
diff --git a/tests/test_wheel.py b/tests/test_wheel.py
new file mode 100644
index 0000000..3b88391
--- /dev/null
+++ b/tests/test_wheel.py
@@ -0,0 +1,221 @@
+import configparser
+import os
+from pathlib import Path
+import tempfile
+from unittest import skipIf
+import zipfile
+
+import pytest
+from testpath import assert_isfile, assert_isdir, assert_not_path_exists
+
+from flit.wheel import WheelBuilder, make_wheel_in
+from flit.config import EntryPointsConflict
+
+samples_dir = Path(__file__).parent / 'samples'
+
+
+def unpack(path):
+ z = zipfile.ZipFile(str(path))
+ t = tempfile.TemporaryDirectory()
+ z.extractall(t.name)
+ return t
+
+def test_wheel_module(copy_sample):
+ td = copy_sample('module1_toml')
+ make_wheel_in(td / 'pyproject.toml', td)
+ assert_isfile(td / 'module1-0.1-py2.py3-none-any.whl')
+
+def test_editable_wheel_module(copy_sample):
+ td = copy_sample('module1_toml')
+ make_wheel_in(td / 'pyproject.toml', td, editable=True)
+ whl_file = td / 'module1-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ pth_path = Path(unpacked, 'module1.pth')
+ assert_isfile(pth_path)
+ assert pth_path.read_text() == str(td)
+ assert_isdir(Path(unpacked, 'module1-0.1.dist-info'))
+
+def test_editable_wheel_has_absolute_pth(copy_sample):
+ td = copy_sample('module1_toml')
+ oldcwd = os.getcwd()
+ os.chdir(str(td))
+ try:
+ make_wheel_in(Path('pyproject.toml'), Path('.'), editable=True)
+ whl_file = 'module1-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ pth_path = Path(unpacked, 'module1.pth')
+ assert_isfile(pth_path)
+ assert Path(pth_path.read_text()).is_absolute()
+ assert pth_path.read_text() == str(td.resolve())
+ assert_isdir(Path(unpacked, 'module1-0.1.dist-info'))
+ finally:
+ os.chdir(oldcwd)
+
+def test_wheel_package(copy_sample):
+ td = copy_sample('package1')
+ make_wheel_in(td / 'pyproject.toml', td)
+ assert_isfile(td / 'package1-0.1-py2.py3-none-any.whl')
+
+def test_editable_wheel_package(copy_sample):
+ td = copy_sample('package1')
+ make_wheel_in(td / 'pyproject.toml', td, editable=True)
+ whl_file = td / 'package1-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ pth_path = Path(unpacked, 'package1.pth')
+ assert_isfile(pth_path)
+ assert pth_path.read_text() == str(td)
+ assert_isdir(Path(unpacked, 'package1-0.1.dist-info'))
+
+def test_editable_wheel_namespace_package(copy_sample):
+ td = copy_sample('ns1-pkg')
+ make_wheel_in(td / 'pyproject.toml', td, editable=True)
+ whl_file = td / 'ns1_pkg-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ pth_path = Path(unpacked, 'ns1.pkg.pth')
+ assert_isfile(pth_path)
+ assert pth_path.read_text() == str(td)
+ assert_isdir(Path(unpacked, 'ns1_pkg-0.1.dist-info'))
+
+def test_wheel_src_module(copy_sample):
+ td = copy_sample('module3')
+ make_wheel_in(td / 'pyproject.toml', td)
+
+ whl_file = td / 'module3-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ assert_isfile(Path(unpacked, 'module3.py'))
+ assert_isdir(Path(unpacked, 'module3-0.1.dist-info'))
+ assert_isfile(Path(unpacked, 'module3-0.1.dist-info', 'LICENSE'))
+
+def test_editable_wheel_src_module(copy_sample):
+ td = copy_sample('module3')
+ make_wheel_in(td / 'pyproject.toml', td, editable=True)
+ whl_file = td / 'module3-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ pth_path = Path(unpacked, 'module3.pth')
+ assert_isfile(pth_path)
+ assert pth_path.read_text() == str(td / "src")
+ assert_isdir(Path(unpacked, 'module3-0.1.dist-info'))
+
+def test_wheel_src_package(copy_sample):
+ td = copy_sample('package2')
+ make_wheel_in(td / 'pyproject.toml', td)
+
+ whl_file = td / 'package2-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ print(os.listdir(unpacked))
+ assert_isfile(Path(unpacked, 'package2', '__init__.py'))
+
+def test_editable_wheel_src_package(copy_sample):
+ td = copy_sample('package2')
+ make_wheel_in(td / 'pyproject.toml', td, editable=True)
+ whl_file = td / 'package2-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ pth_path = Path(unpacked, 'package2.pth')
+ assert_isfile(pth_path)
+ assert pth_path.read_text() == str(td / "src")
+ assert_isdir(Path(unpacked, 'package2-0.1.dist-info'))
+
+
+def test_wheel_ns_package(copy_sample):
+ td = copy_sample('ns1-pkg')
+ res = make_wheel_in(td / 'pyproject.toml', td)
+ assert res.file == td / 'ns1_pkg-0.1-py2.py3-none-any.whl'
+ assert_isfile(res.file)
+ with unpack(res.file) as td_unpack:
+ assert_isdir(Path(td_unpack, 'ns1_pkg-0.1.dist-info'))
+ assert_isfile(Path(td_unpack, 'ns1', 'pkg', '__init__.py'))
+ assert_not_path_exists(Path(td_unpack, 'ns1', '__init__.py'))
+
+def test_dist_name(copy_sample):
+ td = copy_sample('altdistname')
+ make_wheel_in(td / 'pyproject.toml', td)
+ res = td / 'package_dist1-0.1-py2.py3-none-any.whl'
+ assert_isfile(res)
+ with unpack(res) as td_unpack:
+ assert_isdir(Path(td_unpack, 'package_dist1-0.1.dist-info'))
+
+def test_entry_points(copy_sample):
+ td = copy_sample('entrypoints_valid')
+ make_wheel_in(td / 'pyproject.toml', td)
+ assert_isfile(td / 'package1-0.1-py2.py3-none-any.whl')
+ with unpack(td / 'package1-0.1-py2.py3-none-any.whl') as td_unpack:
+ entry_points = Path(td_unpack, 'package1-0.1.dist-info', 'entry_points.txt')
+ assert_isfile(entry_points)
+ cp = configparser.ConfigParser()
+ cp.read(str(entry_points))
+ assert 'console_scripts' in cp.sections()
+ assert 'myplugins' in cp.sections()
+
+def test_entry_points_conflict(copy_sample):
+ td = copy_sample('entrypoints_conflict')
+ with pytest.raises(EntryPointsConflict):
+ make_wheel_in(td / 'pyproject.toml', td)
+
+def test_wheel_builder():
+ # Slightly lower level interface
+ with tempfile.TemporaryDirectory() as td:
+ target = Path(td, 'sample.whl')
+ with target.open('wb') as f:
+ wb = WheelBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', f)
+ wb.build()
+
+ assert zipfile.is_zipfile(str(target))
+ assert wb.wheel_filename == 'package1-0.1-py2.py3-none-any.whl'
+
+@skipIf(os.name == 'nt', 'Windows does not preserve necessary permissions')
+def test_permissions_normed(copy_sample):
+ td = copy_sample('module1_toml')
+
+ (td / 'module1.py').chmod(0o620)
+ make_wheel_in(td / 'pyproject.toml', td)
+
+ whl = td / 'module1-0.1-py2.py3-none-any.whl'
+ assert_isfile(whl)
+ with zipfile.ZipFile(str(whl)) as zf:
+ info = zf.getinfo('module1.py')
+ perms = (info.external_attr >> 16) & 0o777
+ assert perms == 0o644, oct(perms)
+ whl.unlink()
+
+ # This time with executable bit set
+ (td / 'module1.py').chmod(0o720)
+ make_wheel_in(td / 'pyproject.toml', td)
+
+ assert_isfile(whl)
+ with zipfile.ZipFile(str(whl)) as zf:
+ info = zf.getinfo('module1.py')
+ perms = (info.external_attr >> 16) & 0o777
+ assert perms == 0o755, oct(perms)
+
+ info = zf.getinfo('module1-0.1.dist-info/METADATA')
+ perms = (info.external_attr >> 16) & 0o777
+ assert perms == 0o644, oct(perms)
+
+def test_compression(tmp_path):
+ info = make_wheel_in(samples_dir / 'module1_toml' / 'pyproject.toml', tmp_path)
+ assert_isfile(info.file)
+ with zipfile.ZipFile(str(info.file)) as zf:
+ for name in [
+ 'module1.py',
+ 'module1-0.1.dist-info/METADATA',
+ ]:
+ assert zf.getinfo(name).compress_type == zipfile.ZIP_DEFLATED
+
+def test_wheel_module_local_version(copy_sample):
+ """Test if a local version specifier is preserved in wheel filename and dist-info dir name"""
+ td = copy_sample('modulewithlocalversion')
+ make_wheel_in(td / 'pyproject.toml', td)
+
+ whl_file = td / 'modulewithlocalversion-0.1.dev0+test-py2.py3-none-any.whl'
+ assert_isfile(whl_file)
+ with unpack(whl_file) as unpacked:
+ assert_isfile(Path(unpacked, 'modulewithlocalversion.py'))
+ assert_isdir(Path(unpacked, 'modulewithlocalversion-0.1.dev0+test.dist-info'))
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..df1f24d
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,42 @@
+[tox]
+envlist = py{311,310,39,38,37,36},bootstrap
+skip_missing_interpreters = true
+
+[gh-actions]
+python =
+ 3.6: py36
+ 3.7: py37
+ 3.8: py38, bootstrap
+ 3.9: py39
+ 3.10: py310
+ 3.11: py311
+
+[testenv]
+deps =
+ requests
+ testpath
+ responses
+ docutils
+ tomli;python_version < "3.11"
+ tomli-w
+ pytest>=2.7.3
+ pytest-cov
+
+skip_install=true
+
+setenv =
+ PYTHONPATH = flit_core
+
+commands =
+ python -m pytest --cov=flit --cov=flit_core/flit_core
+
+[testenv:bootstrap]
+skip_install = true
+# Make the install step a no-op, so nothing gets installed in the env
+install_command = true {packages}
+whitelist_externals = true
+changedir = flit_core
+commands =
+ python -c "from flit_core.buildapi import build_wheel;\
+ from tempfile import mkdtemp;\
+ build_wheel(mkdtemp())"