summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore8
-rw-r--r--.pre-commit-config.yaml54
-rw-r--r--.pre-commit-hooks.yaml6
-rw-r--r--CHANGELOG.md1407
-rw-r--r--CONTRIBUTING.md146
-rw-r--r--LICENSE19
-rw-r--r--README.md9
-rw-r--r--azure-pipelines.yml50
-rw-r--r--pre_commit/__init__.py0
-rw-r--r--pre_commit/__main__.py5
-rw-r--r--pre_commit/clientlib.py317
-rw-r--r--pre_commit/color.py97
-rw-r--r--pre_commit/commands/__init__.py0
-rw-r--r--pre_commit/commands/autoupdate.py182
-rw-r--r--pre_commit/commands/clean.py14
-rw-r--r--pre_commit/commands/gc.py90
-rw-r--r--pre_commit/commands/hook_impl.py187
-rw-r--r--pre_commit/commands/init_templatedir.py33
-rw-r--r--pre_commit/commands/install_uninstall.py175
-rw-r--r--pre_commit/commands/migrate_config.py59
-rw-r--r--pre_commit/commands/run.py360
-rw-r--r--pre_commit/commands/sample_config.py21
-rw-r--r--pre_commit/commands/try_repo.py77
-rw-r--r--pre_commit/constants.py24
-rw-r--r--pre_commit/envcontext.py67
-rw-r--r--pre_commit/error_handler.py64
-rw-r--r--pre_commit/file_lock.py76
-rw-r--r--pre_commit/git.py196
-rw-r--r--pre_commit/hook.py63
-rw-r--r--pre_commit/languages/__init__.py0
-rw-r--r--pre_commit/languages/all.py60
-rw-r--r--pre_commit/languages/conda.py84
-rw-r--r--pre_commit/languages/docker.py114
-rw-r--r--pre_commit/languages/docker_image.py22
-rw-r--r--pre_commit/languages/fail.py20
-rw-r--r--pre_commit/languages/golang.py97
-rw-r--r--pre_commit/languages/helpers.py109
-rw-r--r--pre_commit/languages/node.py93
-rw-r--r--pre_commit/languages/perl.py67
-rw-r--r--pre_commit/languages/pygrep.py87
-rw-r--r--pre_commit/languages/python.py210
-rw-r--r--pre_commit/languages/python_venv.py46
-rw-r--r--pre_commit/languages/ruby.py126
-rw-r--r--pre_commit/languages/rust.py106
-rw-r--r--pre_commit/languages/script.py19
-rw-r--r--pre_commit/languages/swift.py64
-rw-r--r--pre_commit/languages/system.py19
-rw-r--r--pre_commit/logging_handler.py40
-rw-r--r--pre_commit/main.py410
-rw-r--r--pre_commit/make_archives.py65
-rw-r--r--pre_commit/meta_hooks/__init__.py0
-rw-r--r--pre_commit/meta_hooks/check_hooks_apply.py39
-rw-r--r--pre_commit/meta_hooks/check_useless_excludes.py72
-rw-r--r--pre_commit/meta_hooks/identity.py16
-rw-r--r--pre_commit/output.py32
-rw-r--r--pre_commit/parse_shebang.py84
-rw-r--r--pre_commit/prefix.py17
-rw-r--r--pre_commit/repository.py208
-rw-r--r--pre_commit/resources/__init__.py0
-rw-r--r--pre_commit/resources/empty_template_.npmignore1
-rw-r--r--pre_commit/resources/empty_template_Cargo.toml7
-rw-r--r--pre_commit/resources/empty_template_Makefile.PL6
-rw-r--r--pre_commit/resources/empty_template_environment.yml9
-rw-r--r--pre_commit/resources/empty_template_main.go3
-rw-r--r--pre_commit/resources/empty_template_main.rs1
-rw-r--r--pre_commit/resources/empty_template_package.json4
-rw-r--r--pre_commit/resources/empty_template_pre_commit_dummy_package.gemspec6
-rw-r--r--pre_commit/resources/empty_template_setup.py4
-rwxr-xr-xpre_commit/resources/hook-tmpl44
-rw-r--r--pre_commit/resources/rbenv.tar.gzbin0 -> 31781 bytes
-rw-r--r--pre_commit/resources/ruby-build.tar.gzbin0 -> 62567 bytes
-rw-r--r--pre_commit/resources/ruby-download.tar.gzbin0 -> 5343 bytes
-rw-r--r--pre_commit/staged_files_only.py90
-rw-r--r--pre_commit/store.py250
-rw-r--r--pre_commit/util.py272
-rw-r--r--pre_commit/xargs.py157
-rw-r--r--requirements-dev.txt4
-rw-r--r--setup.cfg70
-rw-r--r--setup.py2
-rw-r--r--testing/__init__.py0
-rw-r--r--testing/auto_namedtuple.py11
-rw-r--r--testing/fixtures.py146
-rwxr-xr-xtesting/gen-languages-all28
-rwxr-xr-xtesting/get-swift.sh27
-rw-r--r--testing/resources/arbitrary_bytes_repo/.pre-commit-hooks.yaml5
-rwxr-xr-xtesting/resources/arbitrary_bytes_repo/hook.sh7
-rw-r--r--testing/resources/arg_per_line_hooks_repo/.pre-commit-hooks.yaml6
-rwxr-xr-xtesting/resources/arg_per_line_hooks_repo/bin/hook.sh5
-rw-r--r--testing/resources/conda_hooks_repo/.pre-commit-hooks.yaml10
-rw-r--r--testing/resources/conda_hooks_repo/environment.yml6
-rw-r--r--testing/resources/docker_hooks_repo/.pre-commit-hooks.yaml17
-rw-r--r--testing/resources/docker_hooks_repo/Dockerfile3
-rw-r--r--testing/resources/docker_image_hooks_repo/.pre-commit-hooks.yaml8
-rw-r--r--testing/resources/exclude_types_repo/.pre-commit-hooks.yaml6
-rwxr-xr-xtesting/resources/exclude_types_repo/bin/hook.sh3
-rw-r--r--testing/resources/failing_hook_repo/.pre-commit-hooks.yaml5
-rwxr-xr-xtesting/resources/failing_hook_repo/bin/hook.sh4
-rw-r--r--testing/resources/golang_hooks_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/golang_hooks_repo/golang-hello-world/main.go17
-rw-r--r--testing/resources/img1.jpgbin0 -> 843 bytes
-rw-r--r--testing/resources/img2.jpgbin0 -> 891 bytes
-rw-r--r--testing/resources/img3.jpgbin0 -> 859 bytes
-rw-r--r--testing/resources/logfile_repo/.pre-commit-hooks.yaml6
-rwxr-xr-xtesting/resources/logfile_repo/bin/hook.sh5
-rw-r--r--testing/resources/modified_file_returns_zero_repo/.pre-commit-hooks.yaml15
-rwxr-xr-xtesting/resources/modified_file_returns_zero_repo/bin/hook.sh7
-rwxr-xr-xtesting/resources/modified_file_returns_zero_repo/bin/hook2.sh2
-rwxr-xr-xtesting/resources/modified_file_returns_zero_repo/bin/hook3.sh6
-rw-r--r--testing/resources/node_hooks_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/node_hooks_repo/bin/main.js3
-rw-r--r--testing/resources/node_hooks_repo/package.json5
-rw-r--r--testing/resources/node_versioned_hooks_repo/.pre-commit-hooks.yaml6
-rw-r--r--testing/resources/node_versioned_hooks_repo/bin/main.js4
-rw-r--r--testing/resources/node_versioned_hooks_repo/package.json5
-rw-r--r--testing/resources/not_found_exe/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/perl_hooks_repo/.gitignore7
-rw-r--r--testing/resources/perl_hooks_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/perl_hooks_repo/MANIFEST4
-rw-r--r--testing/resources/perl_hooks_repo/Makefile.PL10
-rwxr-xr-xtesting/resources/perl_hooks_repo/bin/pre-commit-perl-hello7
-rw-r--r--testing/resources/perl_hooks_repo/lib/PreCommitHello.pm12
-rw-r--r--testing/resources/prints_cwd_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml6
-rw-r--r--testing/resources/python3_hooks_repo/py3_hook.py8
-rw-r--r--testing/resources/python3_hooks_repo/setup.py8
-rw-r--r--testing/resources/python_hooks_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/python_hooks_repo/foo.py7
-rw-r--r--testing/resources/python_hooks_repo/setup.py8
-rw-r--r--testing/resources/python_venv_hooks_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/python_venv_hooks_repo/foo.py7
-rw-r--r--testing/resources/python_venv_hooks_repo/setup.py8
-rw-r--r--testing/resources/ruby_hooks_repo/.gitignore1
-rw-r--r--testing/resources/ruby_hooks_repo/.pre-commit-hooks.yaml5
-rwxr-xr-xtesting/resources/ruby_hooks_repo/bin/ruby_hook3
-rw-r--r--testing/resources/ruby_hooks_repo/lib/.gitignore0
-rw-r--r--testing/resources/ruby_hooks_repo/ruby_hook.gemspec9
-rw-r--r--testing/resources/ruby_versioned_hooks_repo/.gitignore1
-rw-r--r--testing/resources/ruby_versioned_hooks_repo/.pre-commit-hooks.yaml6
-rwxr-xr-xtesting/resources/ruby_versioned_hooks_repo/bin/ruby_hook4
-rw-r--r--testing/resources/ruby_versioned_hooks_repo/lib/.gitignore0
-rw-r--r--testing/resources/ruby_versioned_hooks_repo/ruby_hook.gemspec9
-rw-r--r--testing/resources/rust_hooks_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/rust_hooks_repo/Cargo.lock3
-rw-r--r--testing/resources/rust_hooks_repo/Cargo.toml3
-rw-r--r--testing/resources/rust_hooks_repo/src/main.rs3
-rw-r--r--testing/resources/script_hooks_repo/.pre-commit-hooks.yaml5
-rwxr-xr-xtesting/resources/script_hooks_repo/bin/hook.sh4
-rw-r--r--testing/resources/stdout_stderr_repo/.pre-commit-hooks.yaml8
-rwxr-xr-xtesting/resources/stdout_stderr_repo/stdout-stderr-entry7
-rwxr-xr-xtesting/resources/stdout_stderr_repo/tty-check-entry11
-rw-r--r--testing/resources/swift_hooks_repo/.gitignore4
-rw-r--r--testing/resources/swift_hooks_repo/.pre-commit-hooks.yaml6
-rw-r--r--testing/resources/swift_hooks_repo/Package.swift7
-rw-r--r--testing/resources/swift_hooks_repo/Sources/swift_hooks_repo/main.swift1
-rw-r--r--testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml5
-rw-r--r--testing/resources/types_repo/.pre-commit-hooks.yaml5
-rwxr-xr-xtesting/resources/types_repo/bin/hook.sh3
-rw-r--r--testing/util.py113
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/clientlib_test.py313
-rw-r--r--tests/color_test.py59
-rw-r--r--tests/commands/__init__.py0
-rw-r--r--tests/commands/autoupdate_test.py437
-rw-r--r--tests/commands/clean_test.py33
-rw-r--r--tests/commands/gc_test.py161
-rw-r--r--tests/commands/hook_impl_test.py235
-rw-r--r--tests/commands/init_templatedir_test.py92
-rw-r--r--tests/commands/install_uninstall_test.py901
-rw-r--r--tests/commands/migrate_config_test.py156
-rw-r--r--tests/commands/run_test.py1012
-rw-r--r--tests/commands/sample_config_test.py19
-rw-r--r--tests/commands/try_repo_test.py151
-rw-r--r--tests/conftest.py277
-rw-r--r--tests/envcontext_test.py101
-rw-r--r--tests/error_handler_test.py170
-rw-r--r--tests/git_test.py188
-rw-r--r--tests/languages/__init__.py0
-rw-r--r--tests/languages/docker_test.py23
-rw-r--r--tests/languages/golang_test.py20
-rw-r--r--tests/languages/helpers_test.py82
-rw-r--r--tests/languages/pygrep_test.py65
-rw-r--r--tests/languages/python_test.py75
-rw-r--r--tests/languages/ruby_test.py28
-rw-r--r--tests/logging_handler_test.py21
-rw-r--r--tests/main_test.py189
-rw-r--r--tests/make_archives_test.py46
-rw-r--r--tests/meta_hooks/__init__.py0
-rw-r--r--tests/meta_hooks/check_hooks_apply_test.py138
-rw-r--r--tests/meta_hooks/check_useless_excludes_test.py115
-rw-r--r--tests/meta_hooks/identity_test.py6
-rw-r--r--tests/output_test.py9
-rw-r--r--tests/parse_shebang_test.py152
-rw-r--r--tests/prefix_test.py44
-rw-r--r--tests/repository_test.py902
-rw-r--r--tests/staged_files_only_test.py349
-rw-r--r--tests/store_test.py216
-rw-r--r--tests/util_test.py122
-rw-r--r--tests/xargs_test.py197
-rw-r--r--tox.ini28
199 files changed, 14930 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..5428b0a
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+*.egg-info
+*.py[co]
+/.coverage
+/.mypy_cache
+/.pytest_cache
+/.tox
+/dist
+/venv*
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..c2df486
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,54 @@
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v2.5.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-docstring-first
+ - id: check-json
+ - id: check-yaml
+ - id: debug-statements
+ - id: name-tests-test
+ - id: requirements-txt-fixer
+ - id: double-quote-string-fixer
+- repo: https://gitlab.com/pycqa/flake8
+ rev: 3.7.9
+ hooks:
+ - id: flake8
+ additional_dependencies: [flake8-typing-imports==1.6.0]
+- repo: https://github.com/pre-commit/mirrors-autopep8
+ rev: v1.5
+ hooks:
+ - id: autopep8
+- repo: https://github.com/pre-commit/pre-commit
+ rev: v2.1.1
+ hooks:
+ - id: validate_manifest
+- repo: https://github.com/asottile/pyupgrade
+ rev: v2.0.1
+ hooks:
+ - id: pyupgrade
+ args: [--py36-plus]
+- repo: https://github.com/asottile/reorder_python_imports
+ rev: v1.9.0
+ hooks:
+ - id: reorder-python-imports
+ args: [--py3-plus]
+- repo: https://github.com/asottile/add-trailing-comma
+ rev: v1.5.0
+ hooks:
+ - id: add-trailing-comma
+ args: [--py36-plus]
+- repo: https://github.com/asottile/setup-cfg-fmt
+ rev: v1.6.0
+ hooks:
+ - id: setup-cfg-fmt
+- repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v0.761
+ hooks:
+ - id: mypy
+ exclude: ^testing/resources/
+- repo: meta
+ hooks:
+ - id: check-hooks-apply
+ - id: check-useless-excludes
diff --git a/.pre-commit-hooks.yaml b/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..ef269d1
--- /dev/null
+++ b/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: validate_manifest
+ name: Validate Pre-Commit Manifest
+ description: This validator validates a pre-commit hooks manifest file
+ entry: pre-commit-validate-manifest
+ language: python
+ files: ^(\.pre-commit-hooks\.yaml|hooks\.yaml)$
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..9a6892c
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,1407 @@
+2.2.0 - 2020-03-12
+==================
+
+### Features
+- Add support for the `post-checkout` hook
+ - #1210 issue by @domenkozar.
+ - #1339 PR by @andrewhare.
+- Add more readable `--from-ref` / `--to-ref` aliases for `--source` /
+ `--origin`
+ - #1343 PR by @asottile.
+
+### Fixes
+- Make sure that `--commit-msg-filename` is passed for `commit-msg` /
+ `prepare-commit-msg`.
+ - #1336 PR by @particledecay.
+ - #1341 PR by @particledecay.
+- Fix crash when installation error is un-decodable bytes
+ - #1358 issue by @Guts.
+ - #1359 PR by @asottile.
+- Fix python `healthy()` check when `python` executable goes missing.
+ - #1363 PR by @asottile.
+- Fix crash when script executables are missing shebangs.
+ - #1350 issue by @chriselion.
+ - #1364 PR by @asottile.
+
+### Misc.
+- pre-commit now requires python>=3.6.1 (previously 3.6.0)
+ - #1346 PR by @asottile.
+
+2.1.1 - 2020-02-24
+==================
+
+### Fixes
+- Temporarily restore python 3.6.0 support (broken in 2.0.0)
+ - reported by @obestwalter.
+ - 081f3028 by @asottile.
+
+2.1.0 - 2020-02-18
+==================
+
+### Features
+- Replace `aspy.yaml` with `sort_keys=False`.
+ - #1306 PR by @asottile.
+- Add support for `perl`.
+ - #1303 PR by @scop.
+
+### Fixes
+- Improve `.git/hooks/*` shebang creation when pythons are in `/usr/local/bin`.
+ - #1312 issue by @kbsezginel.
+ - #1319 PR by @asottile.
+
+### Misc.
+- Add repository badge for pre-commit.
+ - [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit)
+ - #1334 PR by @ddelange.
+
+2.0.1 - 2020-01-29
+==================
+
+### Fixes
+- Fix `ImportError` in python 3.6.0 / 3.6.1 for `typing.NoReturn`.
+ - #1302 PR by @asottile.
+
+2.0.0 - 2020-01-28
+==================
+
+### Features
+- Expose `PRE_COMMIT_REMOTE_NAME` and `PRE_COMMIT_REMOTE_URL` as environment
+ variables during `pre-push` hooks.
+ - #1274 issue by @dmbarreiro.
+ - #1288 PR by @dmbarreiro.
+
+### Fixes
+- Fix `python -m pre_commit --version` to mention `pre-commit` instead of
+ `__main__.py`.
+ - #1273 issue by @ssbarnea.
+ - #1276 PR by @orcutt989.
+- Don't filter `GIT_SSL_NO_VERIFY` from environment when cloning.
+ - #1293 PR by @schiermike.
+- Allow `pre-commit init-templatedir` to succeed even if `core.hooksPath` is
+ set.
+ - #1298 issue by @damienrj.
+ - #1299 PR by @asottile.
+
+### Misc
+- Fix changelog date for 1.21.0.
+ - #1275 PR by @flaudisio.
+
+### Updating
+- Removed `pcre` language, use `pygrep` instead.
+ - #1268 PR by @asottile.
+- Removed `--tags-only` argument to `pre-commit autoupdate` (it has done
+ nothing since 0.14.0).
+ - #1269 by @asottile.
+- Remove python2 / python3.5 support. Note that pre-commit still supports
+ running hooks written in python2, but pre-commit itself requires python 3.6+.
+ - #1260 issue by @asottile.
+ - #1277 PR by @asottile.
+ - #1281 PR by @asottile.
+ - #1282 PR by @asottile.
+ - #1287 PR by @asottile.
+ - #1289 PR by @asottile.
+ - #1292 PR by @asottile.
+
+1.21.0 - 2020-01-02
+===================
+
+### Features
+- Add `conda` as a new `language`.
+ - #1204 issue by @xhochy.
+ - #1232 PR by @xhochy.
+- Add top-level configuration `files` for file selection.
+ - #1220 issue by @TheButlah.
+ - #1248 PR by @asottile.
+- Rework `--verbose` / `verbose` to be more consistent with normal runs.
+ - #1249 PR by @asottile.
+- Add support for the `pre-merge-commit` git hook.
+ - #1210 PR by @asottile.
+ - this requires git 2.24+.
+- Add `pre-commit autoupdate --freeze` which produces "frozen" revisions.
+ - #1068 issue by @SkypLabs.
+ - #1256 PR by @asottile.
+- Display hook runtime duration when run with `--verbose`.
+ - #1144 issue by @potiuk.
+ - #1257 PR by @asottile.
+
+### Fixes
+- Produce better error message when erroneously running inside of `.git`.
+ - #1219 issue by @Nusserdt.
+ - #1224 PR by @asottile.
+ - Note: `git` has since fixed this bug: git/git@36fd304d
+- Produce better error message when hook installation fails.
+ - #1250 issue by @asottile.
+ - #1251 PR by @asottile.
+- Fix cloning when `GIT_SSL_CAINFO` is necessary.
+ - #1253 issue by @igankevich.
+ - #1254 PR by @igankevich.
+- Fix `pre-commit try-repo` for bare, on-disk repositories.
+ - #1258 issue by @webknjaz.
+ - #1259 PR by @asottile.
+- Add some whitespace to `pre-commit autoupdate` to improve terminal autolink.
+ - #1261 issue by @yhoiseth.
+ - #1262 PR by @yhoiseth.
+
+### Misc.
+- Minor code documentation updates.
+ - #1200 PR by @ryanrhee.
+ - #1201 PR by @ryanrhee.
+
+1.20.0 - 2019-10-28
+===================
+
+### Features
+- Allow building newer versions of `ruby`.
+ - #1193 issue by @choffee.
+ - #1195 PR by @choffee.
+- Bump versions reported in `pre-commit sample-config`.
+ - #1197 PR by @asottile.
+
+### Fixes
+- Fix rare race condition with multiple concurrent first-time runs.
+ - #1192 issue by @raholler.
+ - #1196 PR by @asottile.
+
+1.19.0 - 2019-10-26
+===================
+
+### Features
+- Allow `--hook-type` to be specified multiple times.
+ - example: `pre-commit install --hook-type pre-commit --hook-type pre-push`
+ - #1139 issue by @MaxymVlasov.
+ - #1145 PR by @asottile.
+- Include more version information in crash logs.
+ - #1142 by @marqueewinq.
+- Hook colors are now passed through on platforms which support `pty`.
+ - #1169 by @asottile.
+- pre-commit now uses `importlib.metadata` directly when running in python 3.8
+ - #1176 by @asottile.
+- Normalize paths to forward slash separators on windows.
+ - makes it easier to match paths with `files:` regex
+ - avoids some quoting bugs in shell-based hooks
+ - #1173 issue by @steigenTI.
+ - #1179 PR by @asottile.
+
+### Fixes
+- Remove some extra newlines from error messages.
+ - #1148 by @asottile.
+- When a hook is not executable it now reports `not executable` instead of
+ `not found`.
+ - #1159 issue by @nixjdm.
+ - #1161 PR by @WillKoehrsen.
+- Fix interleaving of stdout / stderr in hooks.
+ - #1168 by @asottile.
+- Fix python environment `healthy()` check when current working directory
+ contains modules which shadow standard library names.
+ - issue by @vwhsu92.
+ - #1185 PR by @asottile.
+
+### Updating
+- Regexes handling both backslashes and forward slashes for directory
+ separators now only need to handle forward slashes.
+
+1.18.3 - 2019-08-27
+===================
+
+### Fixes
+- Fix `node_modules` plugin installation on windows
+ - #1123 issue by @henryykt.
+ - #1122 PR by @henryykt.
+
+1.18.2 - 2019-08-15
+===================
+
+### Fixes
+- Make default python lookup more deterministic to avoid redundant installs
+ - #1117 PR by @scop.
+
+1.18.1 - 2019-08-11
+===================
+
+### Fixes
+- Fix installation of `rust` hooks with new `cargo`
+ - #1112 issue by @zimbatm.
+ - #1113 PR by @zimbatm.
+
+1.18.0 - 2019-08-03
+===================
+
+### Features
+- Use the current running executable if it matches the requested
+ `language_version`
+ - #1062 PR by @asottile.
+- Print the stage when a hook is not found
+ - #1078 issue by @madkinsz.
+ - #1079 PR by @madkinsz.
+- `pre-commit autoupdate` now supports non-`master` default branches
+ - #1089 PR by @asottile.
+- Add `pre-commit init-templatedir` which makes it easier to automatically
+ enable `pre-commit` in cloned repositories.
+ - #1084 issue by @ssbarnea.
+ - #1090 PR by @asottile.
+ - #1107 PR by @asottile.
+- pre-commit's color can be controlled using
+ `PRE_COMMIT_COLOR={auto,always,never}`
+ - #1073 issue by @saper.
+ - #1092 PR by @geieredgar.
+ - #1098 PR by @geieredgar.
+- pre-commit's color can now be disabled using `TERM=dumb`
+ - #1073 issue by @saper.
+ - #1103 PR by @asottile.
+- pre-commit now supports `docker` based hooks on windows
+ - #1072 by @cz-fish.
+ - #1093 PR by @geieredgar.
+
+### Fixes
+- Fix shallow clone
+ - #1077 PR by @asottile.
+- Fix autoupdate version flip flop when using shallow cloning
+ - #1076 issue by @mxr.
+ - #1088 PR by @asottile.
+- Fix autoupdate when the current revision is invalid
+ - #1088 PR by @asottile.
+
+### Misc.
+- Replace development instructions with `tox --devenv ...`
+ - #1032 issue by @yoavcaspi.
+ - #1067 PR by @asottile.
+
+
+1.17.0 - 2019-06-06
+===================
+
+### Features
+- Produce better output on `^C`
+ - #1030 PR by @asottile.
+- Warn on unknown keys at the top level and repo level
+ - #1028 PR by @yoavcaspi.
+ - #1048 PR by @asottile.
+
+### Fixes
+- Fix handling of `^C` in wrapper script in python 3.x
+ - #1027 PR by @asottile.
+- Fix `rmtree` for non-writable directories
+ - #1042 issue by @detailyang.
+ - #1043 PR by @asottile.
+- Pass `--color` option to `git diff` in `--show-diff-on-failure`
+ - #1007 issue by @chadrik.
+ - #1051 PR by @mandarvaze.
+
+### Misc.
+- Fix test when `pre-commit` is installed globally
+ - #1032 issue by @yoavcaspi.
+ - #1045 PR by @asottile.
+
+
+1.16.1 - 2019-05-08
+===================
+
+### Fixes
+- Don't `UnicodeDecodeError` on unexpected non-UTF8 output in python health
+ check on windows.
+ - #1021 issue by @nicoddemus.
+ - #1022 PR by @asottile.
+
+1.16.0 - 2019-05-04
+===================
+
+### Features
+- Add support for `prepare-commit-msg` hook
+ - #1004 PR by @marcjay.
+
+### Fixes
+- Fix repeated legacy `pre-commit install` on windows
+ - #1010 issue by @AbhimanyuHK.
+ - #1011 PR by @asottile.
+- Whitespace fixup
+ - #1014 PR by @mxr.
+- Fix CI check for working pcre support
+ - #1015 PR by @Myrheimb.
+
+### Misc.
+- Switch CI from travis / appveyor to azure pipelines
+ - #1012 PR by @asottile.
+
+1.15.2 - 2019-04-16
+===================
+
+### Fixes
+- Fix cloning non-branch tag while in the fallback slow-clone strategy.
+ - #997 issue by @jpinner.
+ - #998 PR by @asottile.
+
+1.15.1 - 2019-04-01
+===================
+
+### Fixes
+- Fix command length calculation on posix when `SC_ARG_MAX` is not defined.
+ - #691 issue by @ushuz.
+ - #987 PR by @asottile.
+
+1.15.0 - 2019-03-30
+===================
+
+### Features
+- No longer require being in a `git` repo to run `pre-commit` `clean` / `gc` /
+ `sample-config`.
+ - #959 PR by @asottile.
+- Improve command line length limit detection.
+ - #691 issue by @antonbabenko.
+ - #966 PR by @asottile.
+- Use shallow cloning when possible.
+ - #958 PR by @DanielChabrowski.
+- Add `minimum_pre_commit_version` top level key to require a new-enough
+ version of `pre-commit`.
+ - #977 PR by @asottile.
+- Add helpful CI-friendly message when running
+ `pre-commit run --all-files --show-diff-on-failure`.
+ - #982 PR by @bnorquist.
+
+### Fixes
+- Fix `try-repo` for staged untracked changes.
+ - #973 PR by @DanielChabrowski.
+- Fix rpm build by explicitly using `#!/usr/bin/env python3` in hook template.
+ - #985 issue by @tim77.
+ - #986 PR by @tim77.
+- Guard against infinite recursion when executing legacy hook script.
+ - #981 PR by @tristan0x.
+
+### Misc
+- Add test for `git.no_git_env()`
+ - #972 PR by @javabrett.
+
+1.14.4 - 2019-02-18
+===================
+
+### Fixes
+- Don't filter `GIT_SSH_COMMAND` env variable from `git` commands
+ - #947 issue by @firba1.
+ - #948 PR by @firba1.
+- Install npm packages as if they were installed from `git`
+ - #943 issue by @ssbarnea.
+ - #949 PR by @asottile.
+- Don't filter `GIT_EXEC_PREFIX` env variable from `git` commands
+ - #664 issue by @revolter.
+ - #944 PR by @minrk.
+
+1.14.3 - 2019-02-04
+===================
+
+### Fixes
+- Improve performance of filename classification by 45% - 55%.
+ - #921 PR by @asottile.
+- Fix installing `go` hooks while `GOBIN` environment variable is set.
+ - #924 PR by @ashanbrown.
+- Fix crash while running `pre-commit migrate-config` / `pre-commit autoupdate`
+ with an empty configuration file.
+ - #929 issue by @ardakuyumcu.
+ - #933 PR by @jessebona.
+- Require a newer virtualenv to fix metadata-based setup.cfg installs.
+ - #936 PR by @asottile.
+
+1.14.2 - 2019-01-10
+===================
+
+### Fixes
+- Make the hook shebang detection more timid (1.14.0 regression)
+ - Homebrew/homebrew-core#35825.
+ - #915 PR by @asottile.
+
+1.14.1 - 2019-01-10
+===================
+
+### Fixes
+- Fix python executable lookup on windows when using conda
+ - #913 issue by @dawelter2.
+ - #914 PR by @asottile.
+
+1.14.0 - 2019-01-08
+===================
+
+### Features
+- Add an `alias` configuration value to allow repeated hooks to be
+ differentiated
+ - #882 issue by @s0undt3ch.
+ - #886 PR by @s0undt3ch.
+- Add `identity` meta hook which just prints filenames
+ - #865 issue by @asottile.
+ - #898 PR by @asottile.
+- Factor out `cached-property` and improve startup performance by ~10%
+ - #899 PR by @asottile.
+- Add a warning on unexpected keys in configuration
+ - #899 PR by @asottile.
+- Teach `pre-commit try-repo` to clone uncommitted changes on disk.
+ - #589 issue by @sverhagen.
+ - #703 issue by @asottile.
+ - #904 PR by @asottile.
+- Implement `pre-commit gc` which will clean up no-longer-referenced cache
+ repos.
+ - #283 issue by @jtwang.
+ - #906 PR by @asottile.
+- Add top level config `default_language_version` to streamline overriding the
+ `language_version` configuration in many places
+ - #647 issue by @asottile.
+ - #908 PR by @asottile.
+- Add top level config `default_stages` to streamline overriding the `stages`
+ configuration in many places
+ - #768 issue by @mattlqx.
+ - #909 PR by @asottile.
+
+### Fixes
+- More intelligently pick hook shebang (`#!/usr/bin/env python3`)
+ - #878 issue by @fristedt.
+ - #893 PR by @asottile.
+- Several fixes related to `--files` / `--config`:
+ - `pre-commit run --files x` outside of a git dir no longer stacktraces
+ - `pre-commit run --config ./relative` while in a sub directory of the git
+ repo is now able to find the configuration
+ - `pre-commit run --files ...` no longer runs a subprocess per file
+ (performance)
+ - #895 PR by @asottile.
+- `pre-commit try-repo ./relative` while in a sub directory of the git repo is
+ now able to clone properly
+ - #903 PR by @asottile.
+- Ensure `meta` repos cannot have a language other than `system`
+ - #905 issue by @asottile.
+ - #907 PR by @asottile.
+- Fix committing with unstaged files that were `git add --intent-to-add` added
+ - #881 issue by @henniss.
+ - #912 PR by @asottile.
+
+### Misc.
+- Use `--no-gpg-sign` when running tests
+ - #894 PR by @s0undt3ch.
+
+
+1.13.0 - 2018-12-20
+===================
+
+### Features
+- Run hooks in parallel
+ - individual hooks may opt out of parallel execution with `require_serial: true`
+ - #510 issue by @chriskuehl.
+ - #851 PR by @chriskuehl.
+
+### Fixes
+- Improve platform-specific `xargs` command length detection
+ - #691 issue by @antonbabenko.
+ - #839 PR by @georgeyk.
+- Fix `pre-commit autoupdate` when updating to a latest tag missing a
+ `.pre-commit-hooks.yaml`
+ - #856 issue by @asottile.
+ - #857 PR by @runz0rd.
+- Upgrade the `pre-commit-hooks` version in `pre-commit sample-config`
+ - #870 by @asottile.
+- Improve balancing of multiprocessing by deterministic shuffling of args
+ - #861 issue by @Dunedan.
+ - #874 PR by @chriskuehl.
+- `ruby` hooks work with latest `gem` by removing `--no-ri` / `--no-rdoc` and
+ instead using `--no-document`.
+ - #889 PR by @asottile.
+
+### Misc.
+- Use `--no-gpg-sign` when running tests
+ - #885 PR by @s0undt3ch.
+
+### Updating
+- If a hook requires serial execution, set `require_serial: true` to avoid the new
+ parallel execution.
+- `ruby` hooks now require `gem>=2.0.0`. If your platform doesn't support this
+ by default, select a newer version using
+ [`language_version`](https://pre-commit.com/#overriding-language-version).
+
+
+1.12.0 - 2018-10-23
+===================
+
+### Fixes
+- Install multi-hook repositories only once (performance)
+ - issue by @chriskuehl.
+ - #852 PR by @asottile.
+- Improve performance by factoring out pkg_resources (performance)
+ - #840 issue by @RonnyPfannschmidt.
+ - #846 PR by @asottile.
+
+1.11.2 - 2018-10-10
+===================
+
+### Fixes
+- `check-useless-exclude` now considers `types`
+ - #704 issue by @asottile.
+ - #837 PR by @georgeyk.
+- `pre-push` hook was not identifying all commits on push to new branch
+ - #843 issue by @prem-nuro.
+ - #844 PR by @asottile.
+
+1.11.1 - 2018-09-22
+===================
+
+### Fixes
+- Fix `.git` dir detection in `git<2.5` (regression introduced in
+ [1.10.5](#1105))
+ - #831 issue by @mmacpherson.
+ - #832 PR by @asottile.
+
+1.11.0 - 2018-09-02
+===================
+
+### Features
+- Add new `fail` language which always fails
+ - light-weight way to forbid files by name.
+ - #812 #821 PRs by @asottile.
+
+### Fixes
+- Fix `ResourceWarning`s for unclosed files
+ - #811 PR by @BoboTiG.
+- Don't write ANSI colors on windows when color enabling fails
+ - #819 PR by @jeffreyrack.
+
+1.10.5 - 2018-08-06
+===================
+
+### Fixes
+- Work around `PATH` issue with `brew` `python` on `macos`
+ - Homebrew/homebrew-core#30445 issue by @asottile.
+ - #805 PR by @asottile.
+- Support `pre-commit install` inside a worktree
+ - #808 issue by @s0undt3ch.
+ - #809 PR by @asottile.
+
+1.10.4 - 2018-07-22
+===================
+
+### Fixes
+- Replace `yaml.load` with safe alternative
+ - `yaml.load` can lead to arbitrary code execution, though not where it
+ was used
+ - issue by @tonybaloney.
+ - #779 PR by @asottile.
+- Improve not found error with script paths (`./exe`)
+ - #782 issue by @ssbarnea.
+ - #785 PR by @asottile.
+- Fix minor buffering issue during `--show-diff-on-failure`
+ - #796 PR by @asottile.
+- Default `language_version: python3` for `python_venv` when running in python2
+ - #794 issue by @ssbarnea.
+ - #797 PR by @asottile.
+- `pre-commit run X` only run `X` and not hooks with `stages: [...]`
+ - #772 issue by @asottile.
+ - #803 PR by @mblayman.
+
+### Misc.
+- Improve travis-ci build times by caching rust / swift artifacts
+ - #781 PR by @expobrain.
+- Test against python3.7
+ - #789 PR by @expobrain.
+
+1.10.3 - 2018-07-02
+===================
+
+### Fixes
+- Fix `pre-push` during a force push without a fetch
+ - #777 issue by @domenkozar.
+ - #778 PR by @asottile.
+
+1.10.2 - 2018-06-11
+===================
+
+### Fixes
+- pre-commit now invokes hooks with a consistent ordering of filenames
+ - issue by @mxr.
+ - #767 PR by @asottile.
+
+1.10.1 - 2018-05-28
+===================
+
+### Fixes
+- `python_venv` language would leak dependencies when pre-commit was installed
+ in a `-mvirtualenv` virtualenv
+ - #755 #756 issue and PR by @asottile.
+
+1.10.0 - 2018-05-26
+===================
+
+### Features
+- Add support for hooks written in `rust`
+ - #751 PR by @chriskuehl.
+
+1.9.0 - 2018-05-21
+==================
+
+### Features
+- Add new `python_venv` language which uses the `venv` module instead of
+ `virtualenv`
+ - #631 issue by @dongyuzheng.
+ - #739 PR by @ojii.
+- Include `LICENSE` in distribution
+ - #745 issue by @nicoddemus.
+ - #746 PR by @nicoddemus.
+
+### Fixes
+- Normalize relative paths for `pre-commit try-repo`
+ - #750 PR by @asottile.
+
+
+1.8.2 - 2018-03-17
+==================
+
+### Fixes
+- Fix cloning relative paths (regression in 1.7.0)
+ - #728 issue by @jdswensen.
+ - #729 PR by @asottile.
+
+
+1.8.1 - 2018-03-12
+==================
+
+### Fixes
+- Fix integration with go 1.10 and `pkg` directory
+ - #725 PR by @asottile
+- Restore support for `git<1.8.5` (inadvertently removed in 1.7.0)
+ - #723 issue by @JohnLyman.
+ - #724 PR by @asottile.
+
+
+1.8.0 - 2018-03-11
+==================
+
+### Features
+- Add a `manual` stage for cli-only interaction
+ - #719 issue by @hectorv.
+ - #720 PR by @asottile.
+- Add a `--multiline` option to `pygrep` hooks
+ - #716 PR by @tdeo.
+
+
+1.7.0 - 2018-03-03
+==================
+
+### Features
+- pre-commit config validation was split to a separate `cfgv` library
+ - #700 PR by @asottile.
+- Allow `--repo` to be specified multiple times to autoupdate
+ - #658 issue by @KevinHock.
+ - #713 PR by @asottile.
+- Enable `rev` as a preferred alternative to `sha` in `.pre-commit-config.yaml`
+ - #106 issue by @asottile.
+ - #715 PR by @asottile.
+- Use `--clean-src` option when invoking `nodeenv` to save ~70MB per node env
+ - #717 PR by @asottile.
+- Refuse to install with `core.hooksPath` set
+ - pre-commit/pre-commit-hooks#250 issue by @revolter.
+ - #663 issue by @asottile.
+ - #718 PR by @asottile.
+
+### Fixes
+- hooks with `additional_dependencies` now get isolated environments
+ - #590 issue by @coldnight.
+ - #711 PR by @asottile.
+
+### Misc.
+- test against swift 4.x
+ - #709 by @theresama.
+
+### Updating
+
+- Run `pre-commit migrate-config` to convert `sha` to `rev` in the
+ `.pre-commit-config.yaml` file.
+
+
+1.6.0 - 2018-02-04
+==================
+
+### Features
+- Hooks now may have a `verbose` option to produce output even without failure
+ - #689 issue by @bagerard.
+ - #695 PR by @bagerard.
+- Installed hook no longer requires `bash`
+ - #699 PR by @asottile.
+
+### Fixes
+- legacy pre-push / commit-msg hooks are now invoked as if `git` called them
+ - #693 issue by @samskiter.
+ - #694 PR by @asottile.
+ - #699 PR by @asottile.
+
+1.5.1 - 2018-01-24
+==================
+
+### Fixes
+- proper detection for root commit during pre-push
+ - #503 PR by @philipgian.
+ - #692 PR by @samskiter.
+
+1.5.0 - 2018-01-13
+==================
+
+### Features
+- pre-commit now supports node hooks on windows.
+ - for now, requires python3 due to https://bugs.python.org/issue32539
+ - huge thanks to @wenzowski for the tip!
+ - #200 issue by @asottile.
+ - #685 PR by @asottile.
+
+### Misc.
+- internal reorganization of `PrefixedCommandRunner` -> `Prefix`
+ - #684 PR by @asottile.
+- https-ify links.
+ - pre-commit.com is now served over https.
+ - #688 PR by @asottile.
+
+
+1.4.5 - 2018-01-09
+==================
+
+### Fixes
+- Fix `local` golang repositories with `additional_dependencies`.
+ - #679 #680 issue and PR by @asottile.
+
+### Misc.
+- Replace some string literals with constants
+ - #678 PR by @revolter.
+
+1.4.4 - 2018-01-07
+==================
+
+### Fixes
+- Invoke `git diff` without a pager during `--show-diff-on-failure`.
+ - #676 PR by @asottile.
+
+1.4.3 - 2018-01-02
+==================
+
+### Fixes
+- `pre-commit` on windows can find pythons at non-hardcoded paths.
+ - #674 PR by @asottile.
+
+1.4.2 - 2018-01-02
+==================
+
+### Fixes
+- `pre-commit` no longer clears `GIT_SSH` environment variable when cloning.
+ - #671 PR by @rp-tanium.
+
+1.4.1 - 2017-11-09
+==================
+
+### Fixes
+- `pre-commit autoupdate --repo ...` no longer deletes other repos.
+ - #660 issue by @KevinHock.
+ - #661 PR by @KevinHock.
+
+1.4.0 - 2017-11-08
+==================
+
+### Features
+- Lazily install repositories.
+ - When running `pre-commit run <hookid>`, pre-commit will only install
+ the necessary repositories.
+ - #637 issue by @webknjaz.
+ - #639 PR by @asottile.
+- Version defaulting now applies to local hooks as well.
+ - This extends #556 to apply to local hooks.
+ - #646 PR by @asottile.
+- Add new `repo: meta` hooks.
+ - `meta` hooks expose some linters of the pre-commit configuration itself.
+ - `id: check-useless-excludes`: ensures that `exclude` directives actually
+ apply to *any* file in the repository.
+ - `id: check-hooks-apply`: ensures that the configured hooks apply to
+ at least one file in the repository.
+ - pre-commit/pre-commit-hooks#63 issue by @asottile.
+ - #405 issue by @asottile.
+ - #643 PR by @hackedd.
+ - #653 PR by @asottile.
+ - #654 PR by @asottile.
+- Allow a specific repository to be autoupdated instead of all repositories.
+ - `pre-commit autoupdate --repo ...`
+ - #656 issue by @KevinHock.
+ - #657 PR by @KevinHock.
+
+### Fixes
+- Apply selinux labelling option to docker volumes
+ - #642 PR by @jimmidyson.
+
+
+1.3.0 - 2017-10-08
+==================
+
+### Features
+- Add `pre-commit try-repo` commands
+ - The new `try-repo` takes a repo and will run the hooks configured in
+ that hook repository.
+ - An example invocation:
+ `pre-commit try-repo https://github.com/pre-commit/pre-commit-hooks`
+ - `pre-commit try-repo` can also take all the same arguments as
+ `pre-commit run`.
+ - It can be used to try out a repository without needing to configure it.
+ - It can also be used to test a hook repository while developing it.
+ - #589 issue by @sverhagen.
+ - #633 PR by @asottile.
+
+1.2.0 - 2017-10-03
+==================
+
+### Features
+- Add `pygrep` language
+ - `pygrep` aims to be a more cross-platform alternative to `pcre` hooks.
+ - #630 PR by @asottile.
+
+### Fixes
+- Use `pipes.quote` for executable path in hook template
+ - Fixes bash syntax error when git dir contains spaces
+ - #626 PR by @asottile.
+- Clean up hook template
+ - Simplify code
+ - Fix `--config` not being respected in some situations
+ - #627 PR by @asottile.
+- Use `file://` protocol for cloning under test
+ - Fix `file://` clone paths being treated as urls for golang
+ - #629 PR by @asottile.
+- Add `ctypes` as an import for virtualenv healthchecks
+ - Fixes python3.6.2 <=> python3.6.3 virtualenv invalidation
+ - e70825ab by @asottile.
+
+1.1.2 - 2017-09-20
+==================
+
+### Fixes
+- pre-commit can successfully install commit-msg hooks
+ - Due to an oversight, the commit-msg-tmpl was missing from the packaging
+ - #623 issue by @sobolevn.
+ - #624 PR by @asottile.
+
+1.1.1 - 2017-09-17
+==================
+
+### Features
+- pre-commit also checks the `ssl` module for virtualenv health
+ - Suggestion by @merwok.
+ - #619 PR by @asottile.
+### Fixes
+- pre-commit no longer crashes with unstaged files when run for the first time
+ - #620 #621 issue by @Lucas-C.
+ - #622 PR by @asottile.
+
+1.1.0 - 2017-09-11
+==================
+
+### Features
+- pre-commit configuration gains a `fail_fast` option.
+ - You must be using the v2 configuration format introduced in 1.0.0.
+ - `fail_fast` defaults to `false`.
+ - #240 issue by @Lucas-C.
+ - #616 PR by @asottile.
+- pre-commit configuration gains a global `exclude` option.
+ - This option takes a python regular expression and can be used to exclude
+ files from _all_ hooks.
+ - You must be using the v2 configuration format introduced in 1.0.0.
+ - #281 issue by @asieira.
+ - #617 PR by @asottile.
+
+1.0.1 - 2017-09-07
+==================
+
+### Fixes
+- Fix a regression in the return code of `pre-commit autoupdate`
+ - `pre-commit migrate-config` and `pre-commit autoupdate` return 0 when
+ successful.
+ - #614 PR by @asottile.
+
+1.0.0 - 2017-09-07
+==================
+pre-commit will now be following [semver](https://semver.org/). Thanks to all
+of the [contributors](https://github.com/pre-commit/pre-commit/graphs/contributors)
+that have helped us get this far!
+
+### Features
+
+- pre-commit's cache directory has moved from `~/.pre-commit` to
+ `$XDG_CACHE_HOME/pre-commit` (usually `~/.cache/pre-commit`).
+ - `pre-commit clean` now cleans up both the old and new directory.
+ - If you were caching this directory in CI, you'll want to adjust the
+ location.
+ - #562 issue by @nagromc.
+ - #602 PR by @asottile.
+- A new configuration format for `.pre-commit-config.yaml` is introduced which
+ will enable future development.
+ - The new format has a top-level map instead of a top-level list. The
+ new format puts the hook repositories in a `repos` key.
+ - Old list-based configurations will continue to be supported.
+ - A command `pre-commit migrate-config` has been introduced to "upgrade"
+ the configuration format to the new map-based configuration.
+ - `pre-commit autoupdate` now automatically calls `migrate-config`.
+ - In a later release, list-based configurations will issue a deprecation
+ warning.
+ - An example diff for upgrading a configuration:
+
+ ```diff
+ +repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ sha: v0.9.2
+ hooks:
+ ```
+ - #414 issue by @asottile.
+ - #610 PR by @asottile.
+
+### Updating
+
+- Run `pre-commit migrate-config` to convert `.pre-commit-config.yaml` to the
+ new map format.
+- Update any references from `~/.pre-commit` to `~/.cache/pre-commit`.
+
+0.18.3 - 2017-09-06
+===================
+- Allow --config to affect `pre-commit install`
+- Tweak not found error message during `pre-push` / `commit-msg`
+- Improve node support when running under cygwin.
+
+0.18.2 - 2017-09-05
+===================
+- Fix `--all-files`, detection of staged files, detection of manually edited
+ files during merge conflict, and detection of files to push for non-ascii
+ filenames.
+
+0.18.1 - 2017-09-04
+===================
+- Only mention locking when waiting for a lock.
+- Fix `IOError` during locking in timeout situtation on windows under python 2.
+
+0.18.0 - 2017-09-02
+===================
+- Add a new `docker_image` language type. `docker_image` is intended to be a
+ lightweight hook type similar to `system` / `script` which allows one to use
+ an existing docker image that provides a hook. `docker_image` hooks can
+ also be used as repository `local` hooks.
+
+0.17.0 - 2017-08-24
+===================
+- Fix typos in help
+- Allow `commit-msg` hook to be uninstalled
+- Upgrade the `sample-config`
+- Remove undocumented `--no-stash` and `--allow-unstaged-config`
+- Remove `validate_config` hook pre-commit hook.
+- Fix installation race condition when multiple `pre-commit` processes would
+ attempt to install the same repository.
+
+0.16.3 - 2017-08-10
+===================
+- autoupdate attempts to maintain config formatting.
+
+0.16.2 - 2017-08-06
+===================
+- Initialize submodules in hook repositories.
+
+0.16.1 - 2017-08-04
+===================
+- Improve node support when running under cygwin.
+
+0.16.0 - 2017-08-01
+===================
+- Remove backward compatibility with repositories providing metadata via
+ `hooks.yaml`. New repositories should provide `.pre-commit-hooks.yaml`.
+ Run `pre-commit autoupdate` to upgrade to the latest repositories.
+- Improve golang support when running under cygwin.
+- Fix crash with unstaged trailing whitespace additions while git was
+ configured with `apply.whitespace = error`.
+- Fix crash with unstaged end-of-file crlf additions and the file's lines
+ ended with crlf while git was configured with `core-autocrlf = true`.
+
+0.15.4 - 2017-07-23
+===================
+- Add support for the `commit-msg` git hook
+
+0.15.3 - 2017-07-20
+===================
+- Recover from invalid python virtualenvs
+
+
+0.15.2 - 2017-07-09
+===================
+- Work around a windows-specific virtualenv bug pypa/virtualenv#1062
+ This failure mode was introduced in 0.15.1
+
+0.15.1 - 2017-07-09
+===================
+- Use a more intelligent default language version for python
+
+0.15.0 - 2017-07-02
+===================
+- Add `types` and `exclude_types` for filtering files. These options take
+ an array of "tags" identified for each file. The tags are sourced from
+ [identify](https://github.com/chriskuehl/identify). One can list the tags
+ for a file by running `identify-cli filename`.
+- `files` is now optional (defaulting to `''`)
+- `always_run` + missing `files` also defaults to `files: ''` (previously it
+ defaulted to `'^$'` (this reverses e150921c).
+
+0.14.3 - 2017-06-28
+===================
+- Expose `--origin` and `--source` as `PRE_COMMIT_ORIGIN` and
+ `PRE_COMMIT_SOURCE` environment variables when running as `pre-push`.
+
+0.14.2 - 2017-06-09
+===================
+- Use `--no-ext-diff` when running `git diff`
+
+0.14.1 - 2017-06-02
+===================
+- Don't crash when `always_run` is `True` and `files` is not provided.
+- Set `VIRTUALENV_NO_DOWNLOAD` when making python virtualenvs.
+
+0.14.0 - 2017-05-16
+===================
+- Add a `pre-commit sample-config` command
+- Enable ansi color escapes on modern windows
+- `autoupdate` now defaults to `--tags-only`, use `--bleeding-edge` for the
+ old behavior
+- Add support for `log_file` in hook configuration to tee hook output to a
+ file for CI consumption, etc.
+- Fix crash with unicode commit messages during merges in python 2.
+- Add a `pass_filenames` option to allow disabling automatic filename
+ positional arguments to hooks.
+
+0.13.6 - 2017-03-27
+===================
+- Fix regression in 0.13.5: allow `always_run` and `files` together despite
+ doing nothing.
+
+0.13.5 - 2017-03-26
+===================
+- 0.13.4 contained incorrect files
+
+0.13.4 - 2017-03-26
+===================
+- Add `--show-diff-on-failure` option to `pre-commit run`
+- Replace `jsonschema` with better error messages
+
+0.13.3 - 2017-02-23
+===================
+- Add `--allow-missing-config` to install: allows `git commit` without a
+ configuration.
+
+0.13.2 - 2017-02-17
+===================
+- Version the local hooks repo
+- Allow `minimum_pre_commit_version` for local hooks
+
+0.13.1 - 2017-02-16
+===================
+- Fix dummy gem for ruby local hooks
+
+0.13.0 - 2017-02-16
+===================
+- Autoupdate now works even when the current state is broken.
+- Improve pre-push fileset on new branches
+- Allow "language local" hooks, hooks which install dependencies using
+ `additional_dependencies` and `language` are now allowed in `repo: local`.
+
+0.12.2 - 2017-01-27
+===================
+- Fix docker hooks on older (<1.12) docker
+
+0.12.1 - 2017-01-25
+===================
+- golang hooks now support additional_dependencies
+- Added a --tags-only option to pre-commit autoupdate
+
+0.12.0 - 2017-01-24
+===================
+- The new default file for implementing hooks in remote repositories is now
+ .pre-commit-hooks.yaml to encourage repositories to add the metadata. As
+ such, the previous hooks.yaml is now deprecated and generates a warning.
+- Fix bug with local configuration interfering with ruby hooks
+- Added support for hooks written in golang.
+
+0.11.0 - 2017-01-20
+===================
+- SwiftPM support.
+
+0.10.1 - 2017-01-05
+===================
+- shlex entry of docker based hooks.
+- Make shlex behaviour of entry more consistent.
+
+0.10.0 - 2017-01-04
+===================
+- Add an `install-hooks` command similar to `install --install-hooks` but
+ without the `install` side-effects.
+- Adds support for docker based hooks.
+
+0.9.4 - 2016-12-05
+==================
+- Warn when cygwin / python mismatch
+- Add --config for customizing configuration during run
+- Update rbenv + plugins to latest versions
+- pcre hooks now fail when grep / ggrep are not present
+
+0.9.3 - 2016-11-07
+==================
+- Fix python hook installation when a strange setup.cfg exists
+
+0.9.2 - 2016-10-25
+==================
+- Remove some python2.6 compatibility
+- UI is no longer sized to terminal width, instead 80 characters or longest
+ necessary width.
+- Fix inability to create python hook environments when using venv / pyvenv on
+ osx
+
+0.9.1 - 2016-09-10
+==================
+- Remove some python2.6 compatibility
+- Fix staged-files-only with external diff tools
+
+0.9.0 - 2016-08-31
+==================
+- Only consider forward diff in changed files
+- Don't run on staged deleted files that still exist
+- Autoupdate to tags when available
+- Stop supporting python2.6
+- Fix crash with staged files containing unstaged lines which have non-utf8
+ bytes and trailing whitespace
+
+0.8.2 - 2016-05-20
+==================
+- Fix a crash introduced in 0.8.0 when an executable was not found
+
+0.8.1 - 2016-05-17
+==================
+- Fix regression introduced in 0.8.0 when already using rbenv with no
+ configured ruby hook version
+
+0.8.0 - 2016-04-11
+==================
+- Fix --files when running in a subdir
+- Improve --help a bit
+- Switch to pyterminalsize for determining terminal size
+
+0.7.6 - 2016-01-19
+==================
+- Work under latest virtualenv
+- No longer create empty directories on windows with latest virtualenv
+
+0.7.5 - 2016-01-15
+==================
+- Consider dead symlinks as files when committing
+
+0.7.4 - 2016-01-12
+==================
+- Produce error message instead of crashing on non-utf8 installation failure
+
+0.7.3 - 2015-12-22
+==================
+- Fix regression introduced in 0.7.1 breaking `git commit -a`
+
+0.7.2 - 2015-12-22
+==================
+- Add `always_run` setting for hooks to run even without file changes.
+
+0.7.1 - 2015-12-19
+==================
+- Support running pre-commit inside submodules
+
+0.7.0 - 2015-12-13
+==================
+- Store state about additional_dependencies for rollforward/rollback compatibility
+
+0.6.8 - 2015-12-07
+==================
+- Build as a universal wheel
+- Allow '.format('-like strings in arguments
+- Add an option to require a minimum pre-commit version
+
+0.6.7 - 2015-12-02
+==================
+- Print a useful message when a hook id is not present
+- Fix printing of non-ascii with unexpected errors
+- Print a message when a hook modifies files but produces no output
+
+0.6.6 - 2015-11-25
+==================
+- Add `additional_dependencies` to hook configuration.
+- Fix pre-commit cloning under git 2.6
+- Small improvements for windows
+
+0.6.5 - 2015-11-19
+==================
+- Allow args for pcre hooks
+
+0.6.4 - 2015-11-13
+==================
+- Fix regression introduced in 0.6.3 regarding hooks which make non-utf8 diffs
+
+0.6.3 - 2015-11-12
+==================
+- Remove `expected_return_code`
+- Fail a hook if it makes modifications to the working directory
+
+0.6.2 - 2015-10-14
+==================
+- Use --no-ri --no-rdoc instead of --no-document for gem to fix old gem
+
+0.6.1 - 2015-10-08
+==================
+- Fix pre-push when pushing something that's already up to date
+
+0.6.0 - 2015-10-05
+==================
+- Filter hooks by stage (commit, push).
+
+0.5.5 - 2015-09-04
+==================
+- Change permissions a few files
+- Rename the validate entrypoints
+- Add --version to some entrypoints
+- Add --no-document to gem installations
+- Use expanduser when finding the python binary
+- Suppress complaint about $TERM when no tty is attached
+- Support pcre hooks on osx through ggrep
+
+0.5.4 - 2015-07-24
+==================
+- Allow hooks to produce outputs with arbitrary bytes
+- Fix pre-commit install when .git/hooks/pre-commit is a dead symlink
+- Allow an unstaged config when using --files or --all-files
+
+0.5.3 - 2015-06-15
+==================
+- Fix autoupdate with "local" hooks - don't purge local hooks.
+
+0.5.2 - 2015-06-02
+==================
+- Fix autoupdate with "local" hooks
+
+0.5.1 - 2015-05-23
+==================
+- Fix bug with unknown non-ascii hook-id
+- Avoid crash when .git/hooks is not present in some git clients
+
+0.5.0 - 2015-05-19
+==================
+- Add a new "local" hook type for running hooks without remote configuration.
+- Complain loudly when .pre-commit-config.yaml is unstaged.
+- Better support for multiple language versions when running hooks.
+- Allow exclude to be defaulted in repository configuration.
+
+0.4.4 - 2015-03-29
+==================
+- Use sys.executable when executing virtualenv
+
+0.4.3 - 2015-03-25
+==================
+- Use reset instead of checkout when checkout out hook repo
+
+0.4.2 - 2015-02-27
+==================
+- Limit length of xargs arguments to workaround windows xargs bug
+
+0.4.1 - 2015-02-27
+==================
+- Don't rename across devices when creating sqlite database
+
+0.4.0 - 2015-02-27
+==================
+- Make ^C^C During installation not cause all subsequent runs to fail
+- Print while installing (instead of while cloning)
+- Use sqlite to manage repositories (instead of symlinks)
+- MVP Windows support
+
+0.3.6 - 2015-02-05
+==================
+- `args` in venv'd languages are now property quoted.
+
+0.3.5 - 2015-01-15
+==================
+- Support running during `pre-push`. See https://pre-commit.com/#advanced 'pre-commit during push'.
+
+0.3.4 - 2015-01-13
+==================
+- Allow hook providers to default `args` in `hooks.yaml`
+
+0.3.3 - 2015-01-06
+==================
+- Improve message for `CalledProcessError`
+
+0.3.2 - 2014-10-07
+==================
+- Fix for `staged_files_only` with color.diff = always #176.
+
+0.3.1 - 2014-10-03
+==================
+- Fix error clobbering #174.
+- Remove dependency on `plumbum`.
+- Allow pre-commit to be run from anywhere in a repository #175.
+
+0.3.0 - 2014-09-18
+==================
+- Add `--files` option to `pre-commit run`
+
+0.2.11 - 2014-09-05
+===================
+- Fix terminal width detection (broken in 0.2.10)
+
+0.2.10 - 2014-09-04
+===================
+- Bump version of nodeenv to fix bug with ~/.npmrc
+- Choose `python` more intelligently when running.
+
+0.2.9 - 2014-09-02
+==================
+- Fix bug where sys.stdout.write must take `bytes` in python 2.6
+
+0.2.8 - 2014-08-13
+==================
+- Allow a client to have duplicates of hooks.
+- Use --prebuilt instead of system for node.
+- Improve some fatal error messages
+
+0.2.7 - 2014-07-28
+==================
+- Produce output when running pre-commit install --install-hooks
+
+0.2.6 - 2014-07-28
+==================
+- Print hookid on failure
+- Use sys.executable for running nodeenv
+- Allow running as `python -m pre_commit`
+
+0.2.5 - 2014-07-17
+==================
+- Default columns to 80 (for non-terminal execution).
+
+0.2.4 - 2014-07-07
+==================
+- Support --install-hooks as an argument to `pre-commit install`
+- Install hooks before attempting to run anything
+- Use `python -m nodeenv` instead of `nodeenv`
+
+0.2.3 - 2014-06-25
+==================
+- Freeze ruby building infrastructure
+- Fix bug that assumed diffs were utf-8
+
+0.2.2 - 2014-06-22
+==================
+- Fix filenames with spaces
+
+0.2.1 - 2014-06-18
+==================
+- Use either `pre-commit` or `python -m pre_commit.main` depending on which is
+ available
+- Don't use readlink -f
+
+0.2.0 - 2014-06-17
+==================
+- Fix for merge-conflict during cherry-picking.
+- Add -V / --version
+- Add migration install mode / install -f / --overwrite
+- Add `pcre` "language" for perl compatible regexes
+- Reorganize packages.
+
+0.1.1 - 2014-06-11
+==================
+- Fixed bug with autoupdate setting defaults on un-updated repos.
+
+0.1.0 - 2014-06-07
+==================
+- Initial Release
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..2b83c82
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,146 @@
+# Contributing
+
+## Local development
+
+- The complete test suite depends on having at least the following installed
+ (possibly not a complete list)
+ - git (A sufficiently newer version is required to run pre-push tests)
+ - python2 (Required by a test which checks different python versions)
+ - python3 (Required by a test which checks different python versions)
+ - tox (or virtualenv)
+ - ruby + gem
+ - docker
+
+### Setting up an environment
+
+This is useful for running specific tests. The easiest way to set this up
+is to run:
+
+1. `tox --devenv venv` (note: requires tox>=3.13)
+2. `. venv/bin/activate`
+
+This will create and put you into a virtualenv which has an editable
+installation of pre-commit. Hack away! Running `pre-commit` will reflect
+your changes immediately.
+
+### Running a specific test
+
+Running a specific test with the environment activated is as easy as:
+`pytest tests -k test_the_name_of_your_test`
+
+### Running all the tests
+
+Running all the tests can be done by running `tox -e py37` (or your
+interpreter version of choice). These often take a long time and consume
+significant cpu while running the slower node / ruby integration tests.
+
+Alternatively, with the environment activated you can run all of the tests
+using:
+`pytest tests`
+
+### Setting up the hooks
+
+With the environment activated simply run `pre-commit install`.
+
+## Documentation
+
+Documentation is hosted at https://pre-commit.com
+
+This website is controlled through
+https://github.com/pre-commit/pre-commit.github.io
+
+## Adding support for a new hook language
+
+pre-commit already supports many [programming languages](https://pre-commit.com/#supported-languages)
+to write hook executables with.
+
+When adding support for a language, you must first decide what level of support
+to implement. The current implemented languages are at varying levels:
+
+- 0th class - pre-commit does not require any dependencies for these languages
+ as they're not actually languages (current examples: fail, pygrep)
+- 1st class - pre-commit will bootstrap a full interpreter requiring nothing to
+ be installed globally (current examples: node, ruby)
+- 2nd class - pre-commit requires the user to install the language globally but
+ will install tools in an isolated fashion (current examples: python, go, rust,
+ swift, docker).
+- 3rd class - pre-commit requires the user to install both the tool and the
+ language globally (current examples: script, system)
+
+"third class" is usually the easiest to implement first and is perfectly
+acceptable.
+
+Ideally the language works on the supported platforms for pre-commit (linux,
+windows, macos) but it's ok to skip one or more platforms (for example, swift
+doesn't run on windows).
+
+When writing your new language, it's often useful to look at other examples in
+the `pre_commit/languages` directory.
+
+It might also be useful to look at a recent pull request which added a
+language, for example:
+
+- [rust](https://github.com/pre-commit/pre-commit/pull/751)
+- [fail](https://github.com/pre-commit/pre-commit/pull/812)
+- [swift](https://github.com/pre-commit/pre-commit/pull/467)
+
+### `language` api
+
+here are the apis that should be implemented for a language
+
+Note that these are also documented in [`pre_commit/languages/all.py`](https://github.com/pre-commit/pre-commit/blob/master/pre_commit/languages/all.py)
+
+#### `ENVIRONMENT_DIR`
+
+a short string which will be used for the prefix of where packages will be
+installed. For example, python uses `py_env` and installs a `virtualenv` at
+that location.
+
+this will be `None` for 0th / 3rd class languages as they don't have an install
+step.
+
+#### `get_default_version`
+
+This is used to retrieve the default `language_version` for a language. If
+one cannot be determined, return `'default'`.
+
+You generally don't need to implement this on a first pass and can just use:
+
+```python
+get_default_version = helpers.basic_default_version
+```
+
+`python` is currently the only language which implements this api
+
+#### `healthy`
+
+This is used to check whether the installed environment is considered healthy.
+This function should return `True` or `False`.
+
+You generally don't need to implement this on a first pass and can just use:
+
+```python
+healthy = helpers.basic_healthy
+```
+
+`python` is currently the only language which implements this api, for python
+it is checking whether some common dlls are still available.
+
+#### `install_environment`
+
+this is the trickiest one to implement and where all the smart parts happen.
+
+this api should do the following things
+
+- (0th / 3rd class): `install_environment = helpers.no_install`
+- (1st class): install a language runtime into the hook's directory
+- (2nd class): install the package at `.` into the `ENVIRONMENT_DIR`
+- (2nd class, optional): install packages listed in `additional_dependencies`
+ into `ENVIRONMENT_DIR` (not a required feature for a first pass)
+
+#### `run_hook`
+
+This is usually the easiest to implement, most of them look the same as the
+`node` hook implementation:
+
+https://github.com/pre-commit/pre-commit/blob/160238220f022035c8ef869c9a8642f622c02118/pre_commit/languages/node.py#L72-L74
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..4a071fc
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2014 pre-commit dev team: Anthony Sottile, Ken Struys
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..98a6d00
--- /dev/null
+++ b/README.md
@@ -0,0 +1,9 @@
+[![Build Status](https://dev.azure.com/asottile/asottile/_apis/build/status/pre-commit.pre-commit?branchName=master)](https://dev.azure.com/asottile/asottile/_build/latest?definitionId=21&branchName=master)
+[![Azure DevOps coverage](https://img.shields.io/azure-devops/coverage/asottile/asottile/21/master.svg)](https://dev.azure.com/asottile/asottile/_build/latest?definitionId=21&branchName=master)
+[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit)
+
+## pre-commit
+
+A framework for managing and maintaining multi-language pre-commit hooks.
+
+For more information see: https://pre-commit.com/
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 0000000..9b385b4
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,50 @@
+trigger:
+ branches:
+ include: [master, test-me-*]
+ tags:
+ include: ['*']
+
+resources:
+ repositories:
+ - repository: asottile
+ type: github
+ endpoint: github
+ name: asottile/azure-pipeline-templates
+ ref: refs/tags/v1.0.0
+
+jobs:
+- template: job--pre-commit.yml@asottile
+- template: job--python-tox.yml@asottile
+ parameters:
+ toxenvs: [py37]
+ os: windows
+ pre_test:
+ - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
+ displayName: Add conda to PATH
+ - powershell: |
+ Write-Host "##vso[task.prependpath]C:\Strawberry\perl\bin"
+ Write-Host "##vso[task.prependpath]C:\Strawberry\perl\site\bin"
+ Write-Host "##vso[task.prependpath]C:\Strawberry\c\bin"
+ displayName: Add strawberry perl to PATH
+- template: job--python-tox.yml@asottile
+ parameters:
+ toxenvs: [py37]
+ os: linux
+ name_postfix: _latest_git
+ pre_test:
+ - task: UseRubyVersion@0
+ - template: step--git-install.yml
+ - bash: |
+ testing/get-swift.sh
+ echo '##vso[task.prependpath]/tmp/swift/usr/bin'
+ displayName: install swift
+- template: job--python-tox.yml@asottile
+ parameters:
+ toxenvs: [pypy3, py36, py37, py38]
+ os: linux
+ pre_test:
+ - task: UseRubyVersion@0
+ - bash: |
+ testing/get-swift.sh
+ echo '##vso[task.prependpath]/tmp/swift/usr/bin'
+ displayName: install swift
diff --git a/pre_commit/__init__.py b/pre_commit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pre_commit/__init__.py
diff --git a/pre_commit/__main__.py b/pre_commit/__main__.py
new file mode 100644
index 0000000..5414068
--- /dev/null
+++ b/pre_commit/__main__.py
@@ -0,0 +1,5 @@
+from pre_commit.main import main
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/clientlib.py b/pre_commit/clientlib.py
new file mode 100644
index 0000000..56ec0dd
--- /dev/null
+++ b/pre_commit/clientlib.py
@@ -0,0 +1,317 @@
+import argparse
+import functools
+import logging
+import shlex
+import sys
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import Sequence
+
+import cfgv
+from identify.identify import ALL_TAGS
+
+import pre_commit.constants as C
+from pre_commit.error_handler import FatalError
+from pre_commit.languages.all import all_languages
+from pre_commit.util import parse_version
+from pre_commit.util import yaml_load
+
+logger = logging.getLogger('pre_commit')
+
+check_string_regex = cfgv.check_and(cfgv.check_string, cfgv.check_regex)
+
+
+def check_type_tag(tag: str) -> None:
+ if tag not in ALL_TAGS:
+ raise cfgv.ValidationError(
+ f'Type tag {tag!r} is not recognized. '
+ f'Try upgrading identify and pre-commit?',
+ )
+
+
+def check_min_version(version: str) -> None:
+ if parse_version(version) > parse_version(C.VERSION):
+ raise cfgv.ValidationError(
+ f'pre-commit version {version} is required but version '
+ f'{C.VERSION} is installed. '
+ f'Perhaps run `pip install --upgrade pre-commit`.',
+ )
+
+
+def _make_argparser(filenames_help: str) -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('filenames', nargs='*', help=filenames_help)
+ parser.add_argument('-V', '--version', action='version', version=C.VERSION)
+ return parser
+
+
+MANIFEST_HOOK_DICT = cfgv.Map(
+ 'Hook', 'id',
+
+ cfgv.Required('id', cfgv.check_string),
+ cfgv.Required('name', cfgv.check_string),
+ cfgv.Required('entry', cfgv.check_string),
+ cfgv.Required('language', cfgv.check_one_of(all_languages)),
+ cfgv.Optional('alias', cfgv.check_string, ''),
+
+ cfgv.Optional('files', check_string_regex, ''),
+ cfgv.Optional('exclude', check_string_regex, '^$'),
+ cfgv.Optional('types', cfgv.check_array(check_type_tag), ['file']),
+ cfgv.Optional('exclude_types', cfgv.check_array(check_type_tag), []),
+
+ cfgv.Optional(
+ 'additional_dependencies', cfgv.check_array(cfgv.check_string), [],
+ ),
+ cfgv.Optional('args', cfgv.check_array(cfgv.check_string), []),
+ cfgv.Optional('always_run', cfgv.check_bool, False),
+ cfgv.Optional('pass_filenames', cfgv.check_bool, True),
+ cfgv.Optional('description', cfgv.check_string, ''),
+ cfgv.Optional('language_version', cfgv.check_string, C.DEFAULT),
+ cfgv.Optional('log_file', cfgv.check_string, ''),
+ cfgv.Optional('minimum_pre_commit_version', cfgv.check_string, '0'),
+ cfgv.Optional('require_serial', cfgv.check_bool, False),
+ cfgv.Optional('stages', cfgv.check_array(cfgv.check_one_of(C.STAGES)), []),
+ cfgv.Optional('verbose', cfgv.check_bool, False),
+)
+MANIFEST_SCHEMA = cfgv.Array(MANIFEST_HOOK_DICT)
+
+
+class InvalidManifestError(FatalError):
+ pass
+
+
+load_manifest = functools.partial(
+ cfgv.load_from_filename,
+ schema=MANIFEST_SCHEMA,
+ load_strategy=yaml_load,
+ exc_tp=InvalidManifestError,
+)
+
+
+def validate_manifest_main(argv: Optional[Sequence[str]] = None) -> int:
+ parser = _make_argparser('Manifest filenames.')
+ args = parser.parse_args(argv)
+ ret = 0
+ for filename in args.filenames:
+ try:
+ load_manifest(filename)
+ except InvalidManifestError as e:
+ print(e)
+ ret = 1
+ return ret
+
+
+LOCAL = 'local'
+META = 'meta'
+
+
+class MigrateShaToRev:
+ key = 'rev'
+
+ @staticmethod
+ def _cond(key: str) -> cfgv.Conditional:
+ return cfgv.Conditional(
+ key, cfgv.check_string,
+ condition_key='repo',
+ condition_value=cfgv.NotIn(LOCAL, META),
+ ensure_absent=True,
+ )
+
+ def check(self, dct: Dict[str, Any]) -> None:
+ if dct.get('repo') in {LOCAL, META}:
+ self._cond('rev').check(dct)
+ self._cond('sha').check(dct)
+ elif 'sha' in dct and 'rev' in dct:
+ raise cfgv.ValidationError('Cannot specify both sha and rev')
+ elif 'sha' in dct:
+ self._cond('sha').check(dct)
+ else:
+ self._cond('rev').check(dct)
+
+ def apply_default(self, dct: Dict[str, Any]) -> None:
+ if 'sha' in dct:
+ dct['rev'] = dct.pop('sha')
+
+ remove_default = cfgv.Required.remove_default
+
+
+def _entry(modname: str) -> str:
+ """the hook `entry` is passed through `shlex.split()` by the command
+ runner, so to prevent issues with spaces and backslashes (on Windows)
+ it must be quoted here.
+ """
+ return f'{shlex.quote(sys.executable)} -m pre_commit.meta_hooks.{modname}'
+
+
+def warn_unknown_keys_root(
+ extra: Sequence[str],
+ orig_keys: Sequence[str],
+ dct: Dict[str, str],
+) -> None:
+ logger.warning(f'Unexpected key(s) present at root: {", ".join(extra)}')
+
+
+def warn_unknown_keys_repo(
+ extra: Sequence[str],
+ orig_keys: Sequence[str],
+ dct: Dict[str, str],
+) -> None:
+ logger.warning(
+ f'Unexpected key(s) present on {dct["repo"]}: {", ".join(extra)}',
+ )
+
+
+_meta = (
+ (
+ 'check-hooks-apply', (
+ ('name', 'Check hooks apply to the repository'),
+ ('files', C.CONFIG_FILE),
+ ('entry', _entry('check_hooks_apply')),
+ ),
+ ),
+ (
+ 'check-useless-excludes', (
+ ('name', 'Check for useless excludes'),
+ ('files', C.CONFIG_FILE),
+ ('entry', _entry('check_useless_excludes')),
+ ),
+ ),
+ (
+ 'identity', (
+ ('name', 'identity'),
+ ('verbose', True),
+ ('entry', _entry('identity')),
+ ),
+ ),
+)
+
+META_HOOK_DICT = cfgv.Map(
+ 'Hook', 'id',
+ cfgv.Required('id', cfgv.check_string),
+ cfgv.Required('id', cfgv.check_one_of(tuple(k for k, _ in _meta))),
+ # language must be system
+ cfgv.Optional('language', cfgv.check_one_of({'system'}), 'system'),
+ *(
+ # default to the hook definition for the meta hooks
+ cfgv.ConditionalOptional(key, cfgv.check_any, value, 'id', hook_id)
+ for hook_id, values in _meta
+ for key, value in values
+ ),
+ *(
+ # default to the "manifest" parsing
+ cfgv.OptionalNoDefault(item.key, item.check_fn)
+ # these will always be defaulted above
+ if item.key in {'name', 'language', 'entry'} else
+ item
+ for item in MANIFEST_HOOK_DICT.items
+ ),
+)
+CONFIG_HOOK_DICT = cfgv.Map(
+ 'Hook', 'id',
+
+ cfgv.Required('id', cfgv.check_string),
+
+ # All keys in manifest hook dict are valid in a config hook dict, but
+ # are optional.
+ # No defaults are provided here as the config is merged on top of the
+ # manifest.
+ *(
+ cfgv.OptionalNoDefault(item.key, item.check_fn)
+ for item in MANIFEST_HOOK_DICT.items
+ if item.key != 'id'
+ ),
+)
+CONFIG_REPO_DICT = cfgv.Map(
+ 'Repository', 'repo',
+
+ cfgv.Required('repo', cfgv.check_string),
+
+ cfgv.ConditionalRecurse(
+ 'hooks', cfgv.Array(CONFIG_HOOK_DICT),
+ 'repo', cfgv.NotIn(LOCAL, META),
+ ),
+ cfgv.ConditionalRecurse(
+ 'hooks', cfgv.Array(MANIFEST_HOOK_DICT),
+ 'repo', LOCAL,
+ ),
+ cfgv.ConditionalRecurse(
+ 'hooks', cfgv.Array(META_HOOK_DICT),
+ 'repo', META,
+ ),
+
+ MigrateShaToRev(),
+ cfgv.WarnAdditionalKeys(('repo', 'rev', 'hooks'), warn_unknown_keys_repo),
+)
+DEFAULT_LANGUAGE_VERSION = cfgv.Map(
+ 'DefaultLanguageVersion', None,
+ cfgv.NoAdditionalKeys(all_languages),
+ *(cfgv.Optional(x, cfgv.check_string, C.DEFAULT) for x in all_languages),
+)
+CONFIG_SCHEMA = cfgv.Map(
+ 'Config', None,
+
+ cfgv.RequiredRecurse('repos', cfgv.Array(CONFIG_REPO_DICT)),
+ cfgv.OptionalRecurse(
+ 'default_language_version', DEFAULT_LANGUAGE_VERSION, {},
+ ),
+ cfgv.Optional(
+ 'default_stages',
+ cfgv.check_array(cfgv.check_one_of(C.STAGES)),
+ C.STAGES,
+ ),
+ cfgv.Optional('files', check_string_regex, ''),
+ cfgv.Optional('exclude', check_string_regex, '^$'),
+ cfgv.Optional('fail_fast', cfgv.check_bool, False),
+ cfgv.Optional(
+ 'minimum_pre_commit_version',
+ cfgv.check_and(cfgv.check_string, check_min_version),
+ '0',
+ ),
+ cfgv.WarnAdditionalKeys(
+ (
+ 'repos',
+ 'default_language_version',
+ 'default_stages',
+ 'files',
+ 'exclude',
+ 'fail_fast',
+ 'minimum_pre_commit_version',
+ ),
+ warn_unknown_keys_root,
+ ),
+)
+
+
+class InvalidConfigError(FatalError):
+ pass
+
+
+def ordered_load_normalize_legacy_config(contents: str) -> Dict[str, Any]:
+ data = yaml_load(contents)
+ if isinstance(data, list):
+ # TODO: Once happy, issue a deprecation warning and instructions
+ return {'repos': data}
+ else:
+ return data
+
+
+load_config = functools.partial(
+ cfgv.load_from_filename,
+ schema=CONFIG_SCHEMA,
+ load_strategy=ordered_load_normalize_legacy_config,
+ exc_tp=InvalidConfigError,
+)
+
+
+def validate_config_main(argv: Optional[Sequence[str]] = None) -> int:
+ parser = _make_argparser('Config filenames.')
+ args = parser.parse_args(argv)
+ ret = 0
+ for filename in args.filenames:
+ try:
+ load_config(filename)
+ except InvalidConfigError as e:
+ print(e)
+ ret = 1
+ return ret
diff --git a/pre_commit/color.py b/pre_commit/color.py
new file mode 100644
index 0000000..5fa7042
--- /dev/null
+++ b/pre_commit/color.py
@@ -0,0 +1,97 @@
+import os
+import sys
+
+if sys.platform == 'win32': # pragma: no cover (windows)
+ def _enable() -> None:
+ from ctypes import POINTER
+ from ctypes import windll
+ from ctypes import WinError
+ from ctypes import WINFUNCTYPE
+ from ctypes.wintypes import BOOL
+ from ctypes.wintypes import DWORD
+ from ctypes.wintypes import HANDLE
+
+ STD_OUTPUT_HANDLE = -11
+ ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
+
+ def bool_errcheck(result, func, args):
+ if not result:
+ raise WinError()
+ return args
+
+ GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(
+ ('GetStdHandle', windll.kernel32), ((1, 'nStdHandle'),),
+ )
+
+ GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(
+ ('GetConsoleMode', windll.kernel32),
+ ((1, 'hConsoleHandle'), (2, 'lpMode')),
+ )
+ GetConsoleMode.errcheck = bool_errcheck
+
+ SetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, DWORD)(
+ ('SetConsoleMode', windll.kernel32),
+ ((1, 'hConsoleHandle'), (1, 'dwMode')),
+ )
+ SetConsoleMode.errcheck = bool_errcheck
+
+ # As of Windows 10, the Windows console supports (some) ANSI escape
+ # sequences, but it needs to be enabled using `SetConsoleMode` first.
+ #
+ # More info on the escape sequences supported:
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx
+ stdout = GetStdHandle(STD_OUTPUT_HANDLE)
+ flags = GetConsoleMode(stdout)
+ SetConsoleMode(stdout, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+
+ try:
+ _enable()
+ except OSError:
+ terminal_supports_color = False
+ else:
+ terminal_supports_color = True
+else: # pragma: win32 no cover
+ terminal_supports_color = True
+
+RED = '\033[41m'
+GREEN = '\033[42m'
+YELLOW = '\033[43;30m'
+TURQUOISE = '\033[46;30m'
+SUBTLE = '\033[2m'
+NORMAL = '\033[m'
+
+
+def format_color(text: str, color: str, use_color_setting: bool) -> str:
+ """Format text with color.
+
+ Args:
+ text - Text to be formatted with color if `use_color`
+ color - The color start string
+ use_color_setting - Whether or not to color
+ """
+ if use_color_setting:
+ return f'{color}{text}{NORMAL}'
+ else:
+ return text
+
+
+COLOR_CHOICES = ('auto', 'always', 'never')
+
+
+def use_color(setting: str) -> bool:
+ """Choose whether to use color based on the command argument.
+
+ Args:
+ setting - Either `auto`, `always`, or `never`
+ """
+ if setting not in COLOR_CHOICES:
+ raise ValueError(setting)
+
+ return (
+ setting == 'always' or (
+ setting == 'auto' and
+ sys.stdout.isatty() and
+ terminal_supports_color and
+ os.getenv('TERM') != 'dumb'
+ )
+ )
diff --git a/pre_commit/commands/__init__.py b/pre_commit/commands/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pre_commit/commands/__init__.py
diff --git a/pre_commit/commands/autoupdate.py b/pre_commit/commands/autoupdate.py
new file mode 100644
index 0000000..5a9a988
--- /dev/null
+++ b/pre_commit/commands/autoupdate.py
@@ -0,0 +1,182 @@
+import os.path
+import re
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import NamedTuple
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit import output
+from pre_commit.clientlib import InvalidManifestError
+from pre_commit.clientlib import load_config
+from pre_commit.clientlib import load_manifest
+from pre_commit.clientlib import LOCAL
+from pre_commit.clientlib import META
+from pre_commit.commands.migrate_config import migrate_config
+from pre_commit.store import Store
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from pre_commit.util import tmpdir
+from pre_commit.util import yaml_dump
+from pre_commit.util import yaml_load
+
+
+class RevInfo(NamedTuple):
+ repo: str
+ rev: str
+ frozen: Optional[str]
+
+ @classmethod
+ def from_config(cls, config: Dict[str, Any]) -> 'RevInfo':
+ return cls(config['repo'], config['rev'], None)
+
+ def update(self, tags_only: bool, freeze: bool) -> 'RevInfo':
+ if tags_only:
+ tag_cmd = ('git', 'describe', 'FETCH_HEAD', '--tags', '--abbrev=0')
+ else:
+ tag_cmd = ('git', 'describe', 'FETCH_HEAD', '--tags', '--exact')
+
+ with tmpdir() as tmp:
+ git.init_repo(tmp, self.repo)
+ cmd_output_b('git', 'fetch', 'origin', 'HEAD', '--tags', cwd=tmp)
+
+ try:
+ rev = cmd_output(*tag_cmd, cwd=tmp)[1].strip()
+ except CalledProcessError:
+ cmd = ('git', 'rev-parse', 'FETCH_HEAD')
+ rev = cmd_output(*cmd, cwd=tmp)[1].strip()
+
+ frozen = None
+ if freeze:
+ exact = cmd_output('git', 'rev-parse', rev, cwd=tmp)[1].strip()
+ if exact != rev:
+ rev, frozen = exact, rev
+ return self._replace(rev=rev, frozen=frozen)
+
+
+class RepositoryCannotBeUpdatedError(RuntimeError):
+ pass
+
+
+def _check_hooks_still_exist_at_rev(
+ repo_config: Dict[str, Any],
+ info: RevInfo,
+ store: Store,
+) -> None:
+ try:
+ path = store.clone(repo_config['repo'], info.rev)
+ manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE))
+ except InvalidManifestError as e:
+ raise RepositoryCannotBeUpdatedError(str(e))
+
+ # See if any of our hooks were deleted with the new commits
+ hooks = {hook['id'] for hook in repo_config['hooks']}
+ hooks_missing = hooks - {hook['id'] for hook in manifest}
+ if hooks_missing:
+ raise RepositoryCannotBeUpdatedError(
+ f'Cannot update because the tip of HEAD is missing these hooks:\n'
+ f'{", ".join(sorted(hooks_missing))}',
+ )
+
+
+REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([^\s#]+)(.*)(\r?\n)$', re.DOTALL)
+
+
+def _original_lines(
+ path: str,
+ rev_infos: List[Optional[RevInfo]],
+ retry: bool = False,
+) -> Tuple[List[str], List[int]]:
+ """detect `rev:` lines or reformat the file"""
+ with open(path) as f:
+ original = f.read()
+
+ lines = original.splitlines(True)
+ idxs = [i for i, line in enumerate(lines) if REV_LINE_RE.match(line)]
+ if len(idxs) == len(rev_infos):
+ return lines, idxs
+ elif retry:
+ raise AssertionError('could not find rev lines')
+ else:
+ with open(path, 'w') as f:
+ f.write(yaml_dump(yaml_load(original)))
+ return _original_lines(path, rev_infos, retry=True)
+
+
+def _write_new_config(path: str, rev_infos: List[Optional[RevInfo]]) -> None:
+ lines, idxs = _original_lines(path, rev_infos)
+
+ for idx, rev_info in zip(idxs, rev_infos):
+ if rev_info is None:
+ continue
+ match = REV_LINE_RE.match(lines[idx])
+ assert match is not None
+ new_rev_s = yaml_dump({'rev': rev_info.rev})
+ new_rev = new_rev_s.split(':', 1)[1].strip()
+ if rev_info.frozen is not None:
+ comment = f' # frozen: {rev_info.frozen}'
+ elif match[4].strip().startswith('# frozen:'):
+ comment = ''
+ else:
+ comment = match[4]
+ lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[5]}'
+
+ with open(path, 'w') as f:
+ f.write(''.join(lines))
+
+
+def autoupdate(
+ config_file: str,
+ store: Store,
+ tags_only: bool,
+ freeze: bool,
+ repos: Sequence[str] = (),
+) -> int:
+ """Auto-update the pre-commit config to the latest versions of repos."""
+ migrate_config(config_file, quiet=True)
+ retv = 0
+ rev_infos: List[Optional[RevInfo]] = []
+ changed = False
+
+ config = load_config(config_file)
+ for repo_config in config['repos']:
+ if repo_config['repo'] in {LOCAL, META}:
+ continue
+
+ info = RevInfo.from_config(repo_config)
+ if repos and info.repo not in repos:
+ rev_infos.append(None)
+ continue
+
+ output.write(f'Updating {info.repo} ... ')
+ new_info = info.update(tags_only=tags_only, freeze=freeze)
+ try:
+ _check_hooks_still_exist_at_rev(repo_config, new_info, store)
+ except RepositoryCannotBeUpdatedError as error:
+ output.write_line(error.args[0])
+ rev_infos.append(None)
+ retv = 1
+ continue
+
+ if new_info.rev != info.rev:
+ changed = True
+ if new_info.frozen:
+ updated_to = f'{new_info.frozen} (frozen)'
+ else:
+ updated_to = new_info.rev
+ msg = f'updating {info.rev} -> {updated_to}.'
+ output.write_line(msg)
+ rev_infos.append(new_info)
+ else:
+ output.write_line('already up to date.')
+ rev_infos.append(None)
+
+ if changed:
+ _write_new_config(config_file, rev_infos)
+
+ return retv
diff --git a/pre_commit/commands/clean.py b/pre_commit/commands/clean.py
new file mode 100644
index 0000000..2be6c16
--- /dev/null
+++ b/pre_commit/commands/clean.py
@@ -0,0 +1,14 @@
+import os.path
+
+from pre_commit import output
+from pre_commit.store import Store
+from pre_commit.util import rmtree
+
+
+def clean(store: Store) -> int:
+ legacy_path = os.path.expanduser('~/.pre-commit')
+ for directory in (store.directory, legacy_path):
+ if os.path.exists(directory):
+ rmtree(directory)
+ output.write_line(f'Cleaned {directory}.')
+ return 0
diff --git a/pre_commit/commands/gc.py b/pre_commit/commands/gc.py
new file mode 100644
index 0000000..7f6d311
--- /dev/null
+++ b/pre_commit/commands/gc.py
@@ -0,0 +1,90 @@
+import os.path
+from typing import Any
+from typing import Dict
+from typing import Set
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit import output
+from pre_commit.clientlib import InvalidConfigError
+from pre_commit.clientlib import InvalidManifestError
+from pre_commit.clientlib import load_config
+from pre_commit.clientlib import load_manifest
+from pre_commit.clientlib import LOCAL
+from pre_commit.clientlib import META
+from pre_commit.store import Store
+
+
+def _mark_used_repos(
+ store: Store,
+ all_repos: Dict[Tuple[str, str], str],
+ unused_repos: Set[Tuple[str, str]],
+ repo: Dict[str, Any],
+) -> None:
+ if repo['repo'] == META:
+ return
+ elif repo['repo'] == LOCAL:
+ for hook in repo['hooks']:
+ deps = hook.get('additional_dependencies')
+ unused_repos.discard((
+ store.db_repo_name(repo['repo'], deps), C.LOCAL_REPO_VERSION,
+ ))
+ else:
+ key = (repo['repo'], repo['rev'])
+ path = all_repos.get(key)
+ # can't inspect manifest if it isn't cloned
+ if path is None:
+ return
+
+ try:
+ manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE))
+ except InvalidManifestError:
+ return
+ else:
+ unused_repos.discard(key)
+ by_id = {hook['id']: hook for hook in manifest}
+
+ for hook in repo['hooks']:
+ if hook['id'] not in by_id:
+ continue
+
+ deps = hook.get(
+ 'additional_dependencies',
+ by_id[hook['id']]['additional_dependencies'],
+ )
+ unused_repos.discard((
+ store.db_repo_name(repo['repo'], deps), repo['rev'],
+ ))
+
+
+def _gc_repos(store: Store) -> int:
+ configs = store.select_all_configs()
+ repos = store.select_all_repos()
+
+ # delete config paths which do not exist
+ dead_configs = [p for p in configs if not os.path.exists(p)]
+ live_configs = [p for p in configs if os.path.exists(p)]
+
+ all_repos = {(repo, ref): path for repo, ref, path in repos}
+ unused_repos = set(all_repos)
+ for config_path in live_configs:
+ try:
+ config = load_config(config_path)
+ except InvalidConfigError:
+ dead_configs.append(config_path)
+ continue
+ else:
+ for repo in config['repos']:
+ _mark_used_repos(store, all_repos, unused_repos, repo)
+
+ store.delete_configs(dead_configs)
+ for db_repo_name, ref in unused_repos:
+ store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)])
+ return len(unused_repos)
+
+
+def gc(store: Store) -> int:
+ with store.exclusive_lock():
+ repos_removed = _gc_repos(store)
+ output.write_line(f'{repos_removed} repo(s) removed.')
+ return 0
diff --git a/pre_commit/commands/hook_impl.py b/pre_commit/commands/hook_impl.py
new file mode 100644
index 0000000..5ff4555
--- /dev/null
+++ b/pre_commit/commands/hook_impl.py
@@ -0,0 +1,187 @@
+import argparse
+import os.path
+import subprocess
+import sys
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.commands.run import run
+from pre_commit.envcontext import envcontext
+from pre_commit.parse_shebang import normalize_cmd
+from pre_commit.store import Store
+
+Z40 = '0' * 40
+
+
+def _run_legacy(
+ hook_type: str,
+ hook_dir: str,
+ args: Sequence[str],
+) -> Tuple[int, bytes]:
+ if os.environ.get('PRE_COMMIT_RUNNING_LEGACY'):
+ raise SystemExit(
+ f"bug: pre-commit's script is installed in migration mode\n"
+ f'run `pre-commit install -f --hook-type {hook_type}` to fix '
+ f'this\n\n'
+ f'Please report this bug at '
+ f'https://github.com/pre-commit/pre-commit/issues',
+ )
+
+ if hook_type == 'pre-push':
+ stdin = sys.stdin.buffer.read()
+ else:
+ stdin = b''
+
+ # not running in legacy mode
+ legacy_hook = os.path.join(hook_dir, f'{hook_type}.legacy')
+ if not os.access(legacy_hook, os.X_OK):
+ return 0, stdin
+
+ with envcontext((('PRE_COMMIT_RUNNING_LEGACY', '1'),)):
+ cmd = normalize_cmd((legacy_hook, *args))
+ return subprocess.run(cmd, input=stdin).returncode, stdin
+
+
+def _validate_config(
+ retv: int,
+ config: str,
+ skip_on_missing_config: bool,
+) -> None:
+ if not os.path.isfile(config):
+ if skip_on_missing_config or os.getenv('PRE_COMMIT_ALLOW_NO_CONFIG'):
+ print(f'`{config}` config file not found. Skipping `pre-commit`.')
+ raise SystemExit(retv)
+ else:
+ print(
+ f'No {config} file was found\n'
+ f'- To temporarily silence this, run '
+ f'`PRE_COMMIT_ALLOW_NO_CONFIG=1 git ...`\n'
+ f'- To permanently silence this, install pre-commit with the '
+ f'--allow-missing-config option\n'
+ f'- To uninstall pre-commit run `pre-commit uninstall`',
+ )
+ raise SystemExit(1)
+
+
+def _ns(
+ hook_type: str,
+ color: bool,
+ *,
+ all_files: bool = False,
+ from_ref: Optional[str] = None,
+ to_ref: Optional[str] = None,
+ remote_name: Optional[str] = None,
+ remote_url: Optional[str] = None,
+ commit_msg_filename: Optional[str] = None,
+ checkout_type: Optional[str] = None,
+) -> argparse.Namespace:
+ return argparse.Namespace(
+ color=color,
+ hook_stage=hook_type.replace('pre-', ''),
+ from_ref=from_ref,
+ to_ref=to_ref,
+ remote_name=remote_name,
+ remote_url=remote_url,
+ commit_msg_filename=commit_msg_filename,
+ all_files=all_files,
+ checkout_type=checkout_type,
+ files=(),
+ hook=None,
+ verbose=False,
+ show_diff_on_failure=False,
+ )
+
+
+def _rev_exists(rev: str) -> bool:
+ return not subprocess.call(('git', 'rev-list', '--quiet', rev))
+
+
+def _pre_push_ns(
+ color: bool,
+ args: Sequence[str],
+ stdin: bytes,
+) -> Optional[argparse.Namespace]:
+ remote_name = args[0]
+ remote_url = args[1]
+
+ for line in stdin.decode().splitlines():
+ _, local_sha, _, remote_sha = line.split()
+ if local_sha == Z40:
+ continue
+ elif remote_sha != Z40 and _rev_exists(remote_sha):
+ return _ns(
+ 'pre-push', color,
+ from_ref=remote_sha, to_ref=local_sha,
+ remote_name=remote_name, remote_url=remote_url,
+ )
+ else:
+ # ancestors not found in remote
+ ancestors = subprocess.check_output((
+ 'git', 'rev-list', local_sha, '--topo-order', '--reverse',
+ '--not', f'--remotes={remote_name}',
+ )).decode().strip()
+ if not ancestors:
+ continue
+ else:
+ first_ancestor = ancestors.splitlines()[0]
+ cmd = ('git', 'rev-list', '--max-parents=0', local_sha)
+ roots = set(subprocess.check_output(cmd).decode().splitlines())
+ if first_ancestor in roots:
+ # pushing the whole tree including root commit
+ return _ns(
+ 'pre-push', color,
+ all_files=True,
+ remote_name=remote_name, remote_url=remote_url,
+ )
+ else:
+ rev_cmd = ('git', 'rev-parse', f'{first_ancestor}^')
+ source = subprocess.check_output(rev_cmd).decode().strip()
+ return _ns(
+ 'pre-push', color,
+ from_ref=source, to_ref=local_sha,
+ remote_name=remote_name, remote_url=remote_url,
+ )
+
+ # nothing to push
+ return None
+
+
+def _run_ns(
+ hook_type: str,
+ color: bool,
+ args: Sequence[str],
+ stdin: bytes,
+) -> Optional[argparse.Namespace]:
+ if hook_type == 'pre-push':
+ return _pre_push_ns(color, args, stdin)
+ elif hook_type in {'prepare-commit-msg', 'commit-msg'}:
+ return _ns(hook_type, color, commit_msg_filename=args[0])
+ elif hook_type in {'pre-merge-commit', 'pre-commit'}:
+ return _ns(hook_type, color)
+ elif hook_type == 'post-checkout':
+ return _ns(
+ hook_type, color,
+ from_ref=args[0], to_ref=args[1], checkout_type=args[2],
+ )
+ else:
+ raise AssertionError(f'unexpected hook type: {hook_type}')
+
+
+def hook_impl(
+ store: Store,
+ *,
+ config: str,
+ color: bool,
+ hook_type: str,
+ hook_dir: str,
+ skip_on_missing_config: bool,
+ args: Sequence[str],
+) -> int:
+ retv, stdin = _run_legacy(hook_type, hook_dir, args)
+ _validate_config(retv, config, skip_on_missing_config)
+ ns = _run_ns(hook_type, color, args, stdin)
+ if ns is None:
+ return retv
+ else:
+ return retv | run(config, store, ns)
diff --git a/pre_commit/commands/init_templatedir.py b/pre_commit/commands/init_templatedir.py
new file mode 100644
index 0000000..f676fb1
--- /dev/null
+++ b/pre_commit/commands/init_templatedir.py
@@ -0,0 +1,33 @@
+import logging
+import os.path
+from typing import Sequence
+
+from pre_commit.commands.install_uninstall import install
+from pre_commit.store import Store
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+
+logger = logging.getLogger('pre_commit')
+
+
+def init_templatedir(
+ config_file: str,
+ store: Store,
+ directory: str,
+ hook_types: Sequence[str],
+) -> int:
+ install(
+ config_file, store, hook_types=hook_types,
+ overwrite=True, skip_on_missing_config=True, git_dir=directory,
+ )
+ try:
+ _, out, _ = cmd_output('git', 'config', 'init.templateDir')
+ except CalledProcessError:
+ configured_path = None
+ else:
+ configured_path = os.path.realpath(os.path.expanduser(out.strip()))
+ dest = os.path.realpath(directory)
+ if configured_path != dest:
+ logger.warning('`init.templateDir` not set to the target directory')
+ logger.warning(f'maybe `git config --global init.templateDir {dest}`?')
+ return 0
diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py
new file mode 100644
index 0000000..c8b7633
--- /dev/null
+++ b/pre_commit/commands/install_uninstall.py
@@ -0,0 +1,175 @@
+import itertools
+import logging
+import os.path
+import shutil
+import sys
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit import git
+from pre_commit import output
+from pre_commit.clientlib import load_config
+from pre_commit.repository import all_hooks
+from pre_commit.repository import install_hook_envs
+from pre_commit.store import Store
+from pre_commit.util import make_executable
+from pre_commit.util import resource_text
+
+
+logger = logging.getLogger(__name__)
+
+# This is used to identify the hook file we install
+PRIOR_HASHES = (
+ '4d9958c90bc262f47553e2c073f14cfe',
+ 'd8ee923c46731b42cd95cc869add4062',
+ '49fd668cb42069aa1b6048464be5d395',
+ '79f09a650522a87b0da915d0d983b2de',
+ 'e358c9dae00eac5d06b38dfdb1e33a8c',
+)
+CURRENT_HASH = '138fd403232d2ddd5efb44317e38bf03'
+TEMPLATE_START = '# start templated\n'
+TEMPLATE_END = '# end templated\n'
+# Homebrew/homebrew-core#35825: be more timid about appropriate `PATH`
+# #1312 os.defpath is too restrictive on BSD
+POSIX_SEARCH_PATH = ('/usr/local/bin', '/usr/bin', '/bin')
+SYS_EXE = os.path.basename(os.path.realpath(sys.executable))
+
+
+def _hook_paths(
+ hook_type: str,
+ git_dir: Optional[str] = None,
+) -> Tuple[str, str]:
+ git_dir = git_dir if git_dir is not None else git.get_git_dir()
+ pth = os.path.join(git_dir, 'hooks', hook_type)
+ return pth, f'{pth}.legacy'
+
+
+def is_our_script(filename: str) -> bool:
+ if not os.path.exists(filename): # pragma: win32 no cover (symlink)
+ return False
+ with open(filename) as f:
+ contents = f.read()
+ return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES)
+
+
+def shebang() -> str:
+ if sys.platform == 'win32':
+ py = SYS_EXE
+ else:
+ exe_choices = [
+ f'python{sys.version_info[0]}.{sys.version_info[1]}',
+ f'python{sys.version_info[0]}',
+ ]
+ # avoid searching for bare `python` as it's likely to be python 2
+ if SYS_EXE != 'python':
+ exe_choices.append(SYS_EXE)
+ for path, exe in itertools.product(POSIX_SEARCH_PATH, exe_choices):
+ if os.access(os.path.join(path, exe), os.X_OK):
+ py = exe
+ break
+ else:
+ py = SYS_EXE
+ return f'#!/usr/bin/env {py}'
+
+
+def _install_hook_script(
+ config_file: str,
+ hook_type: str,
+ overwrite: bool = False,
+ skip_on_missing_config: bool = False,
+ git_dir: Optional[str] = None,
+) -> None:
+ hook_path, legacy_path = _hook_paths(hook_type, git_dir=git_dir)
+
+ os.makedirs(os.path.dirname(hook_path), exist_ok=True)
+
+ # If we have an existing hook, move it to pre-commit.legacy
+ if os.path.lexists(hook_path) and not is_our_script(hook_path):
+ shutil.move(hook_path, legacy_path)
+
+ # If we specify overwrite, we simply delete the legacy file
+ if overwrite and os.path.exists(legacy_path):
+ os.remove(legacy_path)
+ elif os.path.exists(legacy_path):
+ output.write_line(
+ f'Running in migration mode with existing hooks at {legacy_path}\n'
+ f'Use -f to use only pre-commit.',
+ )
+
+ args = ['hook-impl', f'--config={config_file}', f'--hook-type={hook_type}']
+ if skip_on_missing_config:
+ args.append('--skip-on-missing-config')
+ params = {'INSTALL_PYTHON': sys.executable, 'ARGS': args}
+
+ with open(hook_path, 'w') as hook_file:
+ contents = resource_text('hook-tmpl')
+ before, rest = contents.split(TEMPLATE_START)
+ to_template, after = rest.split(TEMPLATE_END)
+
+ before = before.replace('#!/usr/bin/env python3', shebang())
+
+ hook_file.write(before + TEMPLATE_START)
+ for line in to_template.splitlines():
+ var = line.split()[0]
+ hook_file.write(f'{var} = {params[var]!r}\n')
+ hook_file.write(TEMPLATE_END + after)
+ make_executable(hook_path)
+
+ output.write_line(f'pre-commit installed at {hook_path}')
+
+
+def install(
+ config_file: str,
+ store: Store,
+ hook_types: Sequence[str],
+ overwrite: bool = False,
+ hooks: bool = False,
+ skip_on_missing_config: bool = False,
+ git_dir: Optional[str] = None,
+) -> int:
+ if git_dir is None and git.has_core_hookpaths_set():
+ logger.error(
+ 'Cowardly refusing to install hooks with `core.hooksPath` set.\n'
+ 'hint: `git config --unset-all core.hooksPath`',
+ )
+ return 1
+
+ for hook_type in hook_types:
+ _install_hook_script(
+ config_file, hook_type,
+ overwrite=overwrite,
+ skip_on_missing_config=skip_on_missing_config,
+ git_dir=git_dir,
+ )
+
+ if hooks:
+ install_hooks(config_file, store)
+
+ return 0
+
+
+def install_hooks(config_file: str, store: Store) -> int:
+ install_hook_envs(all_hooks(load_config(config_file), store), store)
+ return 0
+
+
+def _uninstall_hook_script(hook_type: str) -> None:
+ hook_path, legacy_path = _hook_paths(hook_type)
+
+ # If our file doesn't exist or it isn't ours, gtfo.
+ if not os.path.exists(hook_path) or not is_our_script(hook_path):
+ return
+
+ os.remove(hook_path)
+ output.write_line(f'{hook_type} uninstalled')
+
+ if os.path.exists(legacy_path):
+ os.rename(legacy_path, hook_path)
+ output.write_line(f'Restored previous hooks to {hook_path}')
+
+
+def uninstall(hook_types: Sequence[str]) -> int:
+ for hook_type in hook_types:
+ _uninstall_hook_script(hook_type)
+ return 0
diff --git a/pre_commit/commands/migrate_config.py b/pre_commit/commands/migrate_config.py
new file mode 100644
index 0000000..d83b8e9
--- /dev/null
+++ b/pre_commit/commands/migrate_config.py
@@ -0,0 +1,59 @@
+import re
+
+import yaml
+
+from pre_commit.util import yaml_load
+
+
+def _indent(s: str) -> str:
+ lines = s.splitlines(True)
+ return ''.join(' ' * 4 + line if line.strip() else line for line in lines)
+
+
+def _is_header_line(line: str) -> bool:
+ return line.startswith(('#', '---')) or not line.strip()
+
+
+def _migrate_map(contents: str) -> str:
+ # Find the first non-header line
+ lines = contents.splitlines(True)
+ i = 0
+ # Only loop on non empty configuration file
+ while i < len(lines) and _is_header_line(lines[i]):
+ i += 1
+
+ header = ''.join(lines[:i])
+ rest = ''.join(lines[i:])
+
+ if isinstance(yaml_load(contents), list):
+ # If they are using the "default" flow style of yaml, this operation
+ # will yield a valid configuration
+ try:
+ trial_contents = f'{header}repos:\n{rest}'
+ yaml_load(trial_contents)
+ contents = trial_contents
+ except yaml.YAMLError:
+ contents = f'{header}repos:\n{_indent(rest)}'
+
+ return contents
+
+
+def _migrate_sha_to_rev(contents: str) -> str:
+ return re.sub(r'(\n\s+)sha:', r'\1rev:', contents)
+
+
+def migrate_config(config_file: str, quiet: bool = False) -> int:
+ with open(config_file) as f:
+ orig_contents = contents = f.read()
+
+ contents = _migrate_map(contents)
+ contents = _migrate_sha_to_rev(contents)
+
+ if contents != orig_contents:
+ with open(config_file, 'w') as f:
+ f.write(contents)
+
+ print('Configuration has been migrated.')
+ elif not quiet:
+ print('Configuration is already migrated.')
+ return 0
diff --git a/pre_commit/commands/run.py b/pre_commit/commands/run.py
new file mode 100644
index 0000000..2f74578
--- /dev/null
+++ b/pre_commit/commands/run.py
@@ -0,0 +1,360 @@
+import argparse
+import contextlib
+import functools
+import logging
+import os
+import re
+import subprocess
+import time
+from typing import Any
+from typing import Collection
+from typing import Dict
+from typing import List
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+
+from identify.identify import tags_from_path
+
+from pre_commit import color
+from pre_commit import git
+from pre_commit import output
+from pre_commit.clientlib import load_config
+from pre_commit.hook import Hook
+from pre_commit.languages.all import languages
+from pre_commit.repository import all_hooks
+from pre_commit.repository import install_hook_envs
+from pre_commit.staged_files_only import staged_files_only
+from pre_commit.store import Store
+from pre_commit.util import cmd_output_b
+from pre_commit.util import EnvironT
+
+
+logger = logging.getLogger('pre_commit')
+
+
+def _start_msg(*, start: str, cols: int, end_len: int) -> str:
+ dots = '.' * (cols - len(start) - end_len - 1)
+ return f'{start}{dots}'
+
+
+def _full_msg(
+ *,
+ start: str,
+ cols: int,
+ end_msg: str,
+ end_color: str,
+ use_color: bool,
+ postfix: str = '',
+) -> str:
+ dots = '.' * (cols - len(start) - len(postfix) - len(end_msg) - 1)
+ end = color.format_color(end_msg, end_color, use_color)
+ return f'{start}{dots}{postfix}{end}\n'
+
+
+def filter_by_include_exclude(
+ names: Collection[str],
+ include: str,
+ exclude: str,
+) -> List[str]:
+ include_re, exclude_re = re.compile(include), re.compile(exclude)
+ return [
+ filename for filename in names
+ if include_re.search(filename)
+ if not exclude_re.search(filename)
+ ]
+
+
+class Classifier:
+ def __init__(self, filenames: Sequence[str]) -> None:
+ # on windows we normalize all filenames to use forward slashes
+ # this makes it easier to filter using the `files:` regex
+ # this also makes improperly quoted shell-based hooks work better
+ # see #1173
+ if os.altsep == '/' and os.sep == '\\':
+ filenames = [f.replace(os.sep, os.altsep) for f in filenames]
+ self.filenames = [f for f in filenames if os.path.lexists(f)]
+
+ @functools.lru_cache(maxsize=None)
+ def _types_for_file(self, filename: str) -> Set[str]:
+ return tags_from_path(filename)
+
+ def by_types(
+ self,
+ names: Sequence[str],
+ types: Collection[str],
+ exclude_types: Collection[str],
+ ) -> List[str]:
+ types, exclude_types = frozenset(types), frozenset(exclude_types)
+ ret = []
+ for filename in names:
+ tags = self._types_for_file(filename)
+ if tags >= types and not tags & exclude_types:
+ ret.append(filename)
+ return ret
+
+ def filenames_for_hook(self, hook: Hook) -> Tuple[str, ...]:
+ names = self.filenames
+ names = filter_by_include_exclude(names, hook.files, hook.exclude)
+ names = self.by_types(names, hook.types, hook.exclude_types)
+ return tuple(names)
+
+
+def _get_skips(environ: EnvironT) -> Set[str]:
+ skips = environ.get('SKIP', '')
+ return {skip.strip() for skip in skips.split(',') if skip.strip()}
+
+
+SKIPPED = 'Skipped'
+NO_FILES = '(no files to check)'
+
+
+def _subtle_line(s: str, use_color: bool) -> None:
+ output.write_line(color.format_color(s, color.SUBTLE, use_color))
+
+
+def _run_single_hook(
+ classifier: Classifier,
+ hook: Hook,
+ skips: Set[str],
+ cols: int,
+ verbose: bool,
+ use_color: bool,
+) -> bool:
+ filenames = classifier.filenames_for_hook(hook)
+
+ if hook.id in skips or hook.alias in skips:
+ output.write(
+ _full_msg(
+ start=hook.name,
+ end_msg=SKIPPED,
+ end_color=color.YELLOW,
+ use_color=use_color,
+ cols=cols,
+ ),
+ )
+ duration = None
+ retcode = 0
+ files_modified = False
+ out = b''
+ elif not filenames and not hook.always_run:
+ output.write(
+ _full_msg(
+ start=hook.name,
+ postfix=NO_FILES,
+ end_msg=SKIPPED,
+ end_color=color.TURQUOISE,
+ use_color=use_color,
+ cols=cols,
+ ),
+ )
+ duration = None
+ retcode = 0
+ files_modified = False
+ out = b''
+ else:
+ # print hook and dots first in case the hook takes a while to run
+ output.write(_start_msg(start=hook.name, end_len=6, cols=cols))
+
+ diff_cmd = ('git', 'diff', '--no-ext-diff')
+ diff_before = cmd_output_b(*diff_cmd, retcode=None)
+ if not hook.pass_filenames:
+ filenames = ()
+ time_before = time.time()
+ language = languages[hook.language]
+ retcode, out = language.run_hook(hook, filenames, use_color)
+ duration = round(time.time() - time_before, 2) or 0
+ diff_after = cmd_output_b(*diff_cmd, retcode=None)
+
+ # if the hook makes changes, fail the commit
+ files_modified = diff_before != diff_after
+
+ if retcode or files_modified:
+ print_color = color.RED
+ status = 'Failed'
+ else:
+ print_color = color.GREEN
+ status = 'Passed'
+
+ output.write_line(color.format_color(status, print_color, use_color))
+
+ if verbose or hook.verbose or retcode or files_modified:
+ _subtle_line(f'- hook id: {hook.id}', use_color)
+
+ if (verbose or hook.verbose) and duration is not None:
+ _subtle_line(f'- duration: {duration}s', use_color)
+
+ if retcode:
+ _subtle_line(f'- exit code: {retcode}', use_color)
+
+ # Print a message if failing due to file modifications
+ if files_modified:
+ _subtle_line('- files were modified by this hook', use_color)
+
+ if out.strip():
+ output.write_line()
+ output.write_line_b(out.strip(), logfile_name=hook.log_file)
+ output.write_line()
+
+ return files_modified or bool(retcode)
+
+
+def _compute_cols(hooks: Sequence[Hook]) -> int:
+ """Compute the number of columns to display hook messages. The widest
+ that will be displayed is in the no files skipped case:
+
+ Hook name...(no files to check) Skipped
+ """
+ if hooks:
+ name_len = max(len(hook.name) for hook in hooks)
+ else:
+ name_len = 0
+
+ cols = name_len + 3 + len(NO_FILES) + 1 + len(SKIPPED)
+ return max(cols, 80)
+
+
+def _all_filenames(args: argparse.Namespace) -> Collection[str]:
+ if args.hook_stage == 'post-checkout': # no files for post-checkout
+ return ()
+ elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
+ return (args.commit_msg_filename,)
+ elif args.from_ref and args.to_ref:
+ return git.get_changed_files(args.from_ref, args.to_ref)
+ elif args.files:
+ return args.files
+ elif args.all_files:
+ return git.get_all_files()
+ elif git.is_in_merge_conflict():
+ return git.get_conflicted_files()
+ else:
+ return git.get_staged_files()
+
+
+def _run_hooks(
+ config: Dict[str, Any],
+ hooks: Sequence[Hook],
+ args: argparse.Namespace,
+ environ: EnvironT,
+) -> int:
+ """Actually run the hooks."""
+ skips = _get_skips(environ)
+ cols = _compute_cols(hooks)
+ filenames = filter_by_include_exclude(
+ _all_filenames(args), config['files'], config['exclude'],
+ )
+ classifier = Classifier(filenames)
+ retval = 0
+ for hook in hooks:
+ retval |= _run_single_hook(
+ classifier, hook, skips, cols,
+ verbose=args.verbose, use_color=args.color,
+ )
+ if retval and config['fail_fast']:
+ break
+ if retval and args.show_diff_on_failure and git.has_diff():
+ if args.all_files:
+ output.write_line(
+ 'pre-commit hook(s) made changes.\n'
+ 'If you are seeing this message in CI, '
+ 'reproduce locally with: `pre-commit run --all-files`.\n'
+ 'To run `pre-commit` as part of git workflow, use '
+ '`pre-commit install`.',
+ )
+ output.write_line('All changes made by hooks:')
+ # args.color is a boolean.
+ # See user_color function in color.py
+ git_color_opt = 'always' if args.color else 'never'
+ subprocess.call((
+ 'git', '--no-pager', 'diff', '--no-ext-diff',
+ f'--color={git_color_opt}',
+ ))
+
+ return retval
+
+
+def _has_unmerged_paths() -> bool:
+ _, stdout, _ = cmd_output_b('git', 'ls-files', '--unmerged')
+ return bool(stdout.strip())
+
+
+def _has_unstaged_config(config_file: str) -> bool:
+ retcode, _, _ = cmd_output_b(
+ 'git', 'diff', '--no-ext-diff', '--exit-code', config_file,
+ retcode=None,
+ )
+ # be explicit, other git errors don't mean it has an unstaged config.
+ return retcode == 1
+
+
+def run(
+ config_file: str,
+ store: Store,
+ args: argparse.Namespace,
+ environ: EnvironT = os.environ,
+) -> int:
+ stash = not args.all_files and not args.files
+
+ # Check if we have unresolved merge conflict files and fail fast.
+ if _has_unmerged_paths():
+ logger.error('Unmerged files. Resolve before committing.')
+ return 1
+ if bool(args.from_ref) != bool(args.to_ref):
+ logger.error('Specify both --from-ref and --to-ref.')
+ return 1
+ if stash and _has_unstaged_config(config_file):
+ logger.error(
+ f'Your pre-commit configuration is unstaged.\n'
+ f'`git add {config_file}` to fix this.',
+ )
+ return 1
+ if (
+ args.hook_stage in {'prepare-commit-msg', 'commit-msg'} and
+ not args.commit_msg_filename
+ ):
+ logger.error(
+ f'`--commit-msg-filename` is required for '
+ f'`--hook-stage {args.hook_stage}`',
+ )
+ return 1
+
+ # Expose from-ref / to-ref as environment variables for hooks to consume
+ if args.from_ref and args.to_ref:
+ # legacy names
+ environ['PRE_COMMIT_ORIGIN'] = args.from_ref
+ environ['PRE_COMMIT_SOURCE'] = args.to_ref
+ # new names
+ environ['PRE_COMMIT_FROM_REF'] = args.from_ref
+ environ['PRE_COMMIT_TO_REF'] = args.to_ref
+
+ if args.remote_name and args.remote_url:
+ environ['PRE_COMMIT_REMOTE_NAME'] = args.remote_name
+ environ['PRE_COMMIT_REMOTE_URL'] = args.remote_url
+
+ if args.checkout_type:
+ environ['PRE_COMMIT_CHECKOUT_TYPE'] = args.checkout_type
+
+ with contextlib.ExitStack() as exit_stack:
+ if stash:
+ exit_stack.enter_context(staged_files_only(store.directory))
+
+ config = load_config(config_file)
+ hooks = [
+ hook
+ for hook in all_hooks(config, store)
+ if not args.hook or hook.id == args.hook or hook.alias == args.hook
+ if args.hook_stage in hook.stages
+ ]
+
+ if args.hook and not hooks:
+ output.write_line(
+ f'No hook with id `{args.hook}` in stage `{args.hook_stage}`',
+ )
+ return 1
+
+ install_hook_envs(hooks, store)
+
+ return _run_hooks(config, hooks, args, environ)
+
+ # https://github.com/python/mypy/issues/7726
+ raise AssertionError('unreachable')
diff --git a/pre_commit/commands/sample_config.py b/pre_commit/commands/sample_config.py
new file mode 100644
index 0000000..d435faa
--- /dev/null
+++ b/pre_commit/commands/sample_config.py
@@ -0,0 +1,21 @@
+# TODO: maybe `git ls-remote git://github.com/pre-commit/pre-commit-hooks` to
+# determine the latest revision? This adds ~200ms from my tests (and is
+# significantly faster than https:// or http://). For now, periodically
+# manually updating the revision is fine.
+SAMPLE_CONFIG = '''\
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v2.4.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ - id: check-added-large-files
+'''
+
+
+def sample_config() -> int:
+ print(SAMPLE_CONFIG, end='')
+ return 0
diff --git a/pre_commit/commands/try_repo.py b/pre_commit/commands/try_repo.py
new file mode 100644
index 0000000..4aee209
--- /dev/null
+++ b/pre_commit/commands/try_repo.py
@@ -0,0 +1,77 @@
+import argparse
+import logging
+import os.path
+from typing import Optional
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit import output
+from pre_commit.clientlib import load_manifest
+from pre_commit.commands.run import run
+from pre_commit.store import Store
+from pre_commit.util import cmd_output_b
+from pre_commit.util import tmpdir
+from pre_commit.util import yaml_dump
+from pre_commit.xargs import xargs
+
+logger = logging.getLogger(__name__)
+
+
+def _repo_ref(tmpdir: str, repo: str, ref: Optional[str]) -> Tuple[str, str]:
+ # if `ref` is explicitly passed, use it
+ if ref is not None:
+ return repo, ref
+
+ ref = git.head_rev(repo)
+ # if it exists on disk, we'll try and clone it with the local changes
+ if os.path.exists(repo) and git.has_diff('HEAD', repo=repo):
+ logger.warning('Creating temporary repo with uncommitted changes...')
+
+ shadow = os.path.join(tmpdir, 'shadow-repo')
+ cmd_output_b('git', 'clone', repo, shadow)
+ cmd_output_b('git', 'checkout', ref, '-b', '_pc_tmp', cwd=shadow)
+
+ idx = git.git_path('index', repo=shadow)
+ objs = git.git_path('objects', repo=shadow)
+ env = dict(os.environ, GIT_INDEX_FILE=idx, GIT_OBJECT_DIRECTORY=objs)
+
+ staged_files = git.get_staged_files(cwd=repo)
+ if staged_files:
+ xargs(('git', 'add', '--'), staged_files, cwd=repo, env=env)
+
+ cmd_output_b('git', 'add', '-u', cwd=repo, env=env)
+ git.commit(repo=shadow)
+
+ return shadow, git.head_rev(shadow)
+ else:
+ return repo, ref
+
+
+def try_repo(args: argparse.Namespace) -> int:
+ with tmpdir() as tempdir:
+ repo, ref = _repo_ref(tempdir, args.repo, args.ref)
+
+ store = Store(tempdir)
+ if args.hook:
+ hooks = [{'id': args.hook}]
+ else:
+ repo_path = store.clone(repo, ref)
+ manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
+ manifest = sorted(manifest, key=lambda hook: hook['id'])
+ hooks = [{'id': hook['id']} for hook in manifest]
+
+ config = {'repos': [{'repo': repo, 'rev': ref, 'hooks': hooks}]}
+ config_s = yaml_dump(config)
+
+ config_filename = os.path.join(tempdir, C.CONFIG_FILE)
+ with open(config_filename, 'w') as cfg:
+ cfg.write(config_s)
+
+ output.write_line('=' * 79)
+ output.write_line('Using config:')
+ output.write_line('=' * 79)
+ output.write(config_s)
+ output.write_line('=' * 79)
+
+ return run(config_filename, store, args)
diff --git a/pre_commit/constants.py b/pre_commit/constants.py
new file mode 100644
index 0000000..e2b8e3a
--- /dev/null
+++ b/pre_commit/constants.py
@@ -0,0 +1,24 @@
+import sys
+
+if sys.version_info < (3, 8): # pragma: no cover (<PY38)
+ import importlib_metadata
+else: # pragma: no cover (PY38+)
+ import importlib.metadata as importlib_metadata
+
+CONFIG_FILE = '.pre-commit-config.yaml'
+MANIFEST_FILE = '.pre-commit-hooks.yaml'
+
+# Bump when installation changes in a backwards / forwards incompatible way
+INSTALLED_STATE_VERSION = '1'
+# Bump when modifying `empty_template`
+LOCAL_REPO_VERSION = '1'
+
+VERSION = importlib_metadata.version('pre_commit')
+
+# `manual` is not invoked by any installed git hook. See #719
+STAGES = (
+ 'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg', 'manual',
+ 'post-checkout', 'push',
+)
+
+DEFAULT = 'default'
diff --git a/pre_commit/envcontext.py b/pre_commit/envcontext.py
new file mode 100644
index 0000000..16d3d15
--- /dev/null
+++ b/pre_commit/envcontext.py
@@ -0,0 +1,67 @@
+import contextlib
+import enum
+import os
+from typing import Generator
+from typing import NamedTuple
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+from pre_commit.util import EnvironT
+
+
+class _Unset(enum.Enum):
+ UNSET = 1
+
+
+UNSET = _Unset.UNSET
+
+
+class Var(NamedTuple):
+ name: str
+ default: str = ''
+
+
+SubstitutionT = Tuple[Union[str, Var], ...]
+ValueT = Union[str, _Unset, SubstitutionT]
+PatchesT = Tuple[Tuple[str, ValueT], ...]
+
+
+def format_env(parts: SubstitutionT, env: EnvironT) -> str:
+ return ''.join(
+ env.get(part.name, part.default) if isinstance(part, Var) else part
+ for part in parts
+ )
+
+
+@contextlib.contextmanager
+def envcontext(
+ patch: PatchesT,
+ _env: Optional[EnvironT] = None,
+) -> Generator[None, None, None]:
+ """In this context, `os.environ` is modified according to `patch`.
+
+ `patch` is an iterable of 2-tuples (key, value):
+ `key`: string
+ `value`:
+ - string: `environ[key] == value` inside the context.
+ - UNSET: `key not in environ` inside the context.
+ - template: A template is a tuple of strings and Var which will be
+ replaced with the previous environment
+ """
+ env = os.environ if _env is None else _env
+ before = env.copy()
+
+ for k, v in patch:
+ if v is UNSET:
+ env.pop(k, None)
+ elif isinstance(v, tuple):
+ env[k] = format_env(v, before)
+ else:
+ env[k] = v
+
+ try:
+ yield
+ finally:
+ env.clear()
+ env.update(before)
diff --git a/pre_commit/error_handler.py b/pre_commit/error_handler.py
new file mode 100644
index 0000000..b2321ae
--- /dev/null
+++ b/pre_commit/error_handler.py
@@ -0,0 +1,64 @@
+import contextlib
+import functools
+import os.path
+import sys
+import traceback
+from typing import Generator
+
+import pre_commit.constants as C
+from pre_commit import output
+from pre_commit.store import Store
+from pre_commit.util import force_bytes
+
+
+class FatalError(RuntimeError):
+ pass
+
+
+def _log_and_exit(msg: str, exc: BaseException, formatted: str) -> None:
+ error_msg = f'{msg}: {type(exc).__name__}: '.encode() + force_bytes(exc)
+ output.write_line_b(error_msg)
+ log_path = os.path.join(Store().directory, 'pre-commit.log')
+ output.write_line(f'Check the log at {log_path}')
+
+ with open(log_path, 'wb') as log:
+ _log_line = functools.partial(output.write_line, stream=log)
+ _log_line_b = functools.partial(output.write_line_b, stream=log)
+
+ _log_line('### version information')
+ _log_line()
+ _log_line('```')
+ _log_line(f'pre-commit version: {C.VERSION}')
+ _log_line('sys.version:')
+ for line in sys.version.splitlines():
+ _log_line(f' {line}')
+ _log_line(f'sys.executable: {sys.executable}')
+ _log_line(f'os.name: {os.name}')
+ _log_line(f'sys.platform: {sys.platform}')
+ _log_line('```')
+ _log_line()
+
+ _log_line('### error information')
+ _log_line()
+ _log_line('```')
+ _log_line_b(error_msg)
+ _log_line('```')
+ _log_line()
+ _log_line('```')
+ _log_line(formatted)
+ _log_line('```')
+ raise SystemExit(1)
+
+
+@contextlib.contextmanager
+def error_handler() -> Generator[None, None, None]:
+ try:
+ yield
+ except (Exception, KeyboardInterrupt) as e:
+ if isinstance(e, FatalError):
+ msg = 'An error has occurred'
+ elif isinstance(e, KeyboardInterrupt):
+ msg = 'Interrupted (^C)'
+ else:
+ msg = 'An unexpected error has occurred'
+ _log_and_exit(msg, e, traceback.format_exc())
diff --git a/pre_commit/file_lock.py b/pre_commit/file_lock.py
new file mode 100644
index 0000000..ff0dc5e
--- /dev/null
+++ b/pre_commit/file_lock.py
@@ -0,0 +1,76 @@
+import contextlib
+import errno
+import os
+from typing import Callable
+from typing import Generator
+
+
+if os.name == 'nt': # pragma: no cover (windows)
+ import msvcrt
+
+ # https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/locking
+
+ # on windows we lock "regions" of files, we don't care about the actual
+ # byte region so we'll just pick *some* number here.
+ _region = 0xffff
+
+ @contextlib.contextmanager
+ def _locked(
+ fileno: int,
+ blocked_cb: Callable[[], None],
+ ) -> Generator[None, None, None]:
+ try:
+ # TODO: https://github.com/python/typeshed/pull/3607
+ msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) # type: ignore
+ except OSError:
+ blocked_cb()
+ while True:
+ try:
+ # TODO: https://github.com/python/typeshed/pull/3607
+ msvcrt.locking(fileno, msvcrt.LK_LOCK, _region) # type: ignore # noqa: E501
+ except OSError as e:
+ # Locking violation. Returned when the _LK_LOCK or _LK_RLCK
+ # flag is specified and the file cannot be locked after 10
+ # attempts.
+ if e.errno != errno.EDEADLOCK:
+ raise
+ else:
+ break
+
+ try:
+ yield
+ finally:
+ # From cursory testing, it seems to get unlocked when the file is
+ # closed so this may not be necessary.
+ # The documentation however states:
+ # "Regions should be locked only briefly and should be unlocked
+ # before closing a file or exiting the program."
+ # TODO: https://github.com/python/typeshed/pull/3607
+ msvcrt.locking(fileno, msvcrt.LK_UNLCK, _region) # type: ignore
+else: # pragma: win32 no cover
+ import fcntl
+
+ @contextlib.contextmanager
+ def _locked(
+ fileno: int,
+ blocked_cb: Callable[[], None],
+ ) -> Generator[None, None, None]:
+ try:
+ fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except OSError: # pragma: no cover (tests are single-threaded)
+ blocked_cb()
+ fcntl.flock(fileno, fcntl.LOCK_EX)
+ try:
+ yield
+ finally:
+ fcntl.flock(fileno, fcntl.LOCK_UN)
+
+
+@contextlib.contextmanager
+def lock(
+ path: str,
+ blocked_cb: Callable[[], None],
+) -> Generator[None, None, None]:
+ with open(path, 'a+') as f:
+ with _locked(f.fileno(), blocked_cb):
+ yield
diff --git a/pre_commit/git.py b/pre_commit/git.py
new file mode 100644
index 0000000..7e757f2
--- /dev/null
+++ b/pre_commit/git.py
@@ -0,0 +1,196 @@
+import logging
+import os.path
+import sys
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Set
+
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from pre_commit.util import EnvironT
+
+
+logger = logging.getLogger(__name__)
+
+
+def zsplit(s: str) -> List[str]:
+ s = s.strip('\0')
+ if s:
+ return s.split('\0')
+ else:
+ return []
+
+
+def no_git_env(_env: Optional[EnvironT] = None) -> Dict[str, str]:
+ # Too many bugs dealing with environment variables and GIT:
+ # https://github.com/pre-commit/pre-commit/issues/300
+ # In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running
+ # pre-commit hooks
+ # In git 1.9.1 (maybe others), git exports GIT_DIR and GIT_INDEX_FILE
+ # while running pre-commit hooks in submodules.
+ # GIT_DIR: Causes git clone to clone wrong thing
+ # GIT_INDEX_FILE: Causes 'error invalid object ...' during commit
+ _env = _env if _env is not None else os.environ
+ return {
+ k: v for k, v in _env.items()
+ if not k.startswith('GIT_') or
+ k in {
+ 'GIT_EXEC_PATH', 'GIT_SSH', 'GIT_SSH_COMMAND', 'GIT_SSL_CAINFO',
+ 'GIT_SSL_NO_VERIFY',
+ }
+ }
+
+
+def get_root() -> str:
+ return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()
+
+
+def get_git_dir(git_root: str = '.') -> str:
+ opts = ('--git-common-dir', '--git-dir')
+ _, out, _ = cmd_output('git', 'rev-parse', *opts, cwd=git_root)
+ for line, opt in zip(out.splitlines(), opts):
+ if line != opt: # pragma: no branch (git < 2.5)
+ return os.path.normpath(os.path.join(git_root, line))
+ else:
+ raise AssertionError('unreachable: no git dir')
+
+
+def get_remote_url(git_root: str) -> str:
+ _, out, _ = cmd_output('git', 'config', 'remote.origin.url', cwd=git_root)
+ return out.strip()
+
+
+def is_in_merge_conflict() -> bool:
+ git_dir = get_git_dir('.')
+ return (
+ os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and
+ os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))
+ )
+
+
+def parse_merge_msg_for_conflicts(merge_msg: bytes) -> List[str]:
+ # Conflicted files start with tabs
+ return [
+ line.lstrip(b'#').strip().decode()
+ for line in merge_msg.splitlines()
+ # '#\t' for git 2.4.1
+ if line.startswith((b'\t', b'#\t'))
+ ]
+
+
+def get_conflicted_files() -> Set[str]:
+ logger.info('Checking merge-conflict files only.')
+ # Need to get the conflicted files from the MERGE_MSG because they could
+ # have resolved the conflict by choosing one side or the other
+ with open(os.path.join(get_git_dir('.'), 'MERGE_MSG'), 'rb') as f:
+ merge_msg = f.read()
+ merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)
+
+ # This will get the rest of the changes made after the merge.
+ # If they resolved the merge conflict by choosing a mesh of both sides
+ # this will also include the conflicted files
+ tree_hash = cmd_output('git', 'write-tree')[1].strip()
+ merge_diff_filenames = zsplit(
+ cmd_output(
+ 'git', 'diff', '--name-only', '--no-ext-diff', '-z',
+ '-m', tree_hash, 'HEAD', 'MERGE_HEAD',
+ )[1],
+ )
+ return set(merge_conflict_filenames) | set(merge_diff_filenames)
+
+
+def get_staged_files(cwd: Optional[str] = None) -> List[str]:
+ return zsplit(
+ cmd_output(
+ 'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',
+ # Everything except for D
+ '--diff-filter=ACMRTUXB',
+ cwd=cwd,
+ )[1],
+ )
+
+
+def intent_to_add_files() -> List[str]:
+ _, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z')
+ parts = list(reversed(zsplit(stdout)))
+ intent_to_add = []
+ while parts:
+ line = parts.pop()
+ status, filename = line[:3], line[3:]
+ if status[0] in {'C', 'R'}: # renames / moves have an additional arg
+ parts.pop()
+ if status[1] == 'A':
+ intent_to_add.append(filename)
+ return intent_to_add
+
+
+def get_all_files() -> List[str]:
+ return zsplit(cmd_output('git', 'ls-files', '-z')[1])
+
+
+def get_changed_files(old: str, new: str) -> List[str]:
+ return zsplit(
+ cmd_output(
+ 'git', 'diff', '--name-only', '--no-ext-diff', '-z',
+ f'{old}...{new}',
+ )[1],
+ )
+
+
+def head_rev(remote: str) -> str:
+ _, out, _ = cmd_output('git', 'ls-remote', '--exit-code', remote, 'HEAD')
+ return out.split()[0]
+
+
+def has_diff(*args: str, repo: str = '.') -> bool:
+ cmd = ('git', 'diff', '--quiet', '--no-ext-diff', *args)
+ return cmd_output_b(*cmd, cwd=repo, retcode=None)[0] == 1
+
+
+def has_core_hookpaths_set() -> bool:
+ _, out, _ = cmd_output_b('git', 'config', 'core.hooksPath', retcode=None)
+ return bool(out.strip())
+
+
+def init_repo(path: str, remote: str) -> None:
+ if os.path.isdir(remote):
+ remote = os.path.abspath(remote)
+
+ env = no_git_env()
+ cmd_output_b('git', 'init', path, env=env)
+ cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)
+
+
+def commit(repo: str = '.') -> None:
+ env = no_git_env()
+ name, email = 'pre-commit', 'asottile+pre-commit@umich.edu'
+ env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = name
+ env['GIT_AUTHOR_EMAIL'] = env['GIT_COMMITTER_EMAIL'] = email
+ cmd = ('git', 'commit', '--no-edit', '--no-gpg-sign', '-n', '-minit')
+ cmd_output_b(*cmd, cwd=repo, env=env)
+
+
+def git_path(name: str, repo: str = '.') -> str:
+ _, out, _ = cmd_output('git', 'rev-parse', '--git-path', name, cwd=repo)
+ return os.path.join(repo, out.strip())
+
+
+def check_for_cygwin_mismatch() -> None:
+ """See https://github.com/pre-commit/pre-commit/issues/354"""
+ if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows)
+ is_cygwin_python = sys.platform == 'cygwin'
+ toplevel = cmd_output('git', 'rev-parse', '--show-toplevel')[1]
+ is_cygwin_git = toplevel.startswith('/')
+
+ if is_cygwin_python ^ is_cygwin_git:
+ exe_type = {True: '(cygwin)', False: '(windows)'}
+ logger.warn(
+ f'pre-commit has detected a mix of cygwin python / git\n'
+ f'This combination is not supported, it is likely you will '
+ f'receive an error later in the program.\n'
+ f'Make sure to use cygwin git+python while using cygwin\n'
+ f'These can be installed through the cygwin installer.\n'
+ f' - python {exe_type[is_cygwin_python]}\n'
+ f' - git {exe_type[is_cygwin_git]}\n',
+ )
diff --git a/pre_commit/hook.py b/pre_commit/hook.py
new file mode 100644
index 0000000..b65ac42
--- /dev/null
+++ b/pre_commit/hook.py
@@ -0,0 +1,63 @@
+import logging
+import shlex
+from typing import Any
+from typing import Dict
+from typing import NamedTuple
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.prefix import Prefix
+
+logger = logging.getLogger('pre_commit')
+
+
+class Hook(NamedTuple):
+ src: str
+ prefix: Prefix
+ id: str
+ name: str
+ entry: str
+ language: str
+ alias: str
+ files: str
+ exclude: str
+ types: Sequence[str]
+ exclude_types: Sequence[str]
+ additional_dependencies: Sequence[str]
+ args: Sequence[str]
+ always_run: bool
+ pass_filenames: bool
+ description: str
+ language_version: str
+ log_file: str
+ minimum_pre_commit_version: str
+ require_serial: bool
+ stages: Sequence[str]
+ verbose: bool
+
+ @property
+ def cmd(self) -> Tuple[str, ...]:
+ return (*shlex.split(self.entry), *self.args)
+
+ @property
+ def install_key(self) -> Tuple[Prefix, str, str, Tuple[str, ...]]:
+ return (
+ self.prefix,
+ self.language,
+ self.language_version,
+ tuple(self.additional_dependencies),
+ )
+
+ @classmethod
+ def create(cls, src: str, prefix: Prefix, dct: Dict[str, Any]) -> 'Hook':
+ # TODO: have cfgv do this (?)
+ extra_keys = set(dct) - _KEYS
+ if extra_keys:
+ logger.warning(
+ f'Unexpected key(s) present on {src} => {dct["id"]}: '
+ f'{", ".join(sorted(extra_keys))}',
+ )
+ return cls(src=src, prefix=prefix, **{k: dct[k] for k in _KEYS})
+
+
+_KEYS = frozenset(set(Hook._fields) - {'src', 'prefix'})
diff --git a/pre_commit/languages/__init__.py b/pre_commit/languages/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pre_commit/languages/__init__.py
diff --git a/pre_commit/languages/all.py b/pre_commit/languages/all.py
new file mode 100644
index 0000000..8f4ffa8
--- /dev/null
+++ b/pre_commit/languages/all.py
@@ -0,0 +1,60 @@
+from typing import Callable
+from typing import NamedTuple
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.hook import Hook
+from pre_commit.languages import conda
+from pre_commit.languages import docker
+from pre_commit.languages import docker_image
+from pre_commit.languages import fail
+from pre_commit.languages import golang
+from pre_commit.languages import node
+from pre_commit.languages import perl
+from pre_commit.languages import pygrep
+from pre_commit.languages import python
+from pre_commit.languages import python_venv
+from pre_commit.languages import ruby
+from pre_commit.languages import rust
+from pre_commit.languages import script
+from pre_commit.languages import swift
+from pre_commit.languages import system
+from pre_commit.prefix import Prefix
+
+
+class Language(NamedTuple):
+ name: str
+ # Use `None` for no installation / environment
+ ENVIRONMENT_DIR: Optional[str]
+ # return a value to replace `'default` for `language_version`
+ get_default_version: Callable[[], str]
+ # return whether the environment is healthy (or should be rebuilt)
+ healthy: Callable[[Prefix, str], bool]
+ # install a repository for the given language and language_version
+ install_environment: Callable[[Prefix, str, Sequence[str]], None]
+ # execute a hook and return the exit code and output
+ run_hook: 'Callable[[Hook, Sequence[str], bool], Tuple[int, bytes]]'
+
+
+# TODO: back to modules + Protocol: https://github.com/python/mypy/issues/5018
+languages = {
+ # BEGIN GENERATED (testing/gen-languages-all)
+ 'conda': Language(name='conda', ENVIRONMENT_DIR=conda.ENVIRONMENT_DIR, get_default_version=conda.get_default_version, healthy=conda.healthy, install_environment=conda.install_environment, run_hook=conda.run_hook), # noqa: E501
+ 'docker': Language(name='docker', ENVIRONMENT_DIR=docker.ENVIRONMENT_DIR, get_default_version=docker.get_default_version, healthy=docker.healthy, install_environment=docker.install_environment, run_hook=docker.run_hook), # noqa: E501
+ 'docker_image': Language(name='docker_image', ENVIRONMENT_DIR=docker_image.ENVIRONMENT_DIR, get_default_version=docker_image.get_default_version, healthy=docker_image.healthy, install_environment=docker_image.install_environment, run_hook=docker_image.run_hook), # noqa: E501
+ 'fail': Language(name='fail', ENVIRONMENT_DIR=fail.ENVIRONMENT_DIR, get_default_version=fail.get_default_version, healthy=fail.healthy, install_environment=fail.install_environment, run_hook=fail.run_hook), # noqa: E501
+ 'golang': Language(name='golang', ENVIRONMENT_DIR=golang.ENVIRONMENT_DIR, get_default_version=golang.get_default_version, healthy=golang.healthy, install_environment=golang.install_environment, run_hook=golang.run_hook), # noqa: E501
+ 'node': Language(name='node', ENVIRONMENT_DIR=node.ENVIRONMENT_DIR, get_default_version=node.get_default_version, healthy=node.healthy, install_environment=node.install_environment, run_hook=node.run_hook), # noqa: E501
+ 'perl': Language(name='perl', ENVIRONMENT_DIR=perl.ENVIRONMENT_DIR, get_default_version=perl.get_default_version, healthy=perl.healthy, install_environment=perl.install_environment, run_hook=perl.run_hook), # noqa: E501
+ 'pygrep': Language(name='pygrep', ENVIRONMENT_DIR=pygrep.ENVIRONMENT_DIR, get_default_version=pygrep.get_default_version, healthy=pygrep.healthy, install_environment=pygrep.install_environment, run_hook=pygrep.run_hook), # noqa: E501
+ 'python': Language(name='python', ENVIRONMENT_DIR=python.ENVIRONMENT_DIR, get_default_version=python.get_default_version, healthy=python.healthy, install_environment=python.install_environment, run_hook=python.run_hook), # noqa: E501
+ 'python_venv': Language(name='python_venv', ENVIRONMENT_DIR=python_venv.ENVIRONMENT_DIR, get_default_version=python_venv.get_default_version, healthy=python_venv.healthy, install_environment=python_venv.install_environment, run_hook=python_venv.run_hook), # noqa: E501
+ 'ruby': Language(name='ruby', ENVIRONMENT_DIR=ruby.ENVIRONMENT_DIR, get_default_version=ruby.get_default_version, healthy=ruby.healthy, install_environment=ruby.install_environment, run_hook=ruby.run_hook), # noqa: E501
+ 'rust': Language(name='rust', ENVIRONMENT_DIR=rust.ENVIRONMENT_DIR, get_default_version=rust.get_default_version, healthy=rust.healthy, install_environment=rust.install_environment, run_hook=rust.run_hook), # noqa: E501
+ 'script': Language(name='script', ENVIRONMENT_DIR=script.ENVIRONMENT_DIR, get_default_version=script.get_default_version, healthy=script.healthy, install_environment=script.install_environment, run_hook=script.run_hook), # noqa: E501
+ 'swift': Language(name='swift', ENVIRONMENT_DIR=swift.ENVIRONMENT_DIR, get_default_version=swift.get_default_version, healthy=swift.healthy, install_environment=swift.install_environment, run_hook=swift.run_hook), # noqa: E501
+ 'system': Language(name='system', ENVIRONMENT_DIR=system.ENVIRONMENT_DIR, get_default_version=system.get_default_version, healthy=system.healthy, install_environment=system.install_environment, run_hook=system.run_hook), # noqa: E501
+ # END GENERATED
+}
+all_languages = sorted(languages)
diff --git a/pre_commit/languages/conda.py b/pre_commit/languages/conda.py
new file mode 100644
index 0000000..071757a
--- /dev/null
+++ b/pre_commit/languages/conda.py
@@ -0,0 +1,84 @@
+import contextlib
+import os
+from typing import Generator
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import SubstitutionT
+from pre_commit.envcontext import UNSET
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'conda'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def get_env_patch(env: str) -> PatchesT:
+ # On non-windows systems executable live in $CONDA_PREFIX/bin, on Windows
+ # they can be in $CONDA_PREFIX/bin, $CONDA_PREFIX/Library/bin,
+ # $CONDA_PREFIX/Scripts and $CONDA_PREFIX. Whereas the latter only
+ # seems to be used for python.exe.
+ path: SubstitutionT = (os.path.join(env, 'bin'), os.pathsep, Var('PATH'))
+ if os.name == 'nt': # pragma: no cover (platform specific)
+ path = (env, os.pathsep, *path)
+ path = (os.path.join(env, 'Scripts'), os.pathsep, *path)
+ path = (os.path.join(env, 'Library', 'bin'), os.pathsep, *path)
+
+ return (
+ ('PYTHONHOME', UNSET),
+ ('VIRTUAL_ENV', UNSET),
+ ('CONDA_PREFIX', env),
+ ('PATH', path),
+ )
+
+
+@contextlib.contextmanager
+def in_env(
+ prefix: Prefix,
+ language_version: str,
+) -> Generator[None, None, None]:
+ directory = helpers.environment_dir(ENVIRONMENT_DIR, language_version)
+ envdir = prefix.path(directory)
+ with envcontext(get_env_patch(envdir)):
+ yield
+
+
+def install_environment(
+ prefix: Prefix,
+ version: str,
+ additional_dependencies: Sequence[str],
+) -> None:
+ helpers.assert_version_default('conda', version)
+ directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
+
+ env_dir = prefix.path(directory)
+ with clean_path_on_failure(env_dir):
+ cmd_output_b(
+ 'conda', 'env', 'create', '-p', env_dir, '--file',
+ 'environment.yml', cwd=prefix.prefix_dir,
+ )
+ if additional_dependencies:
+ cmd_output_b(
+ 'conda', 'install', '-p', env_dir, *additional_dependencies,
+ cwd=prefix.prefix_dir,
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ # TODO: Some rare commands need to be run using `conda run` but mostly we
+ # can run them withot which is much quicker and produces a better
+ # output.
+ # cmd = ('conda', 'run', '-p', env_dir) + hook.cmd
+ with in_env(hook.prefix, hook.language_version):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/docker.py b/pre_commit/languages/docker.py
new file mode 100644
index 0000000..f449584
--- /dev/null
+++ b/pre_commit/languages/docker.py
@@ -0,0 +1,114 @@
+import hashlib
+import os
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import CalledProcessError
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'docker'
+PRE_COMMIT_LABEL = 'PRE_COMMIT'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def md5(s: str) -> str: # pragma: win32 no cover
+ return hashlib.md5(s.encode()).hexdigest()
+
+
+def docker_tag(prefix: Prefix) -> str: # pragma: win32 no cover
+ md5sum = md5(os.path.basename(prefix.prefix_dir)).lower()
+ return f'pre-commit-{md5sum}'
+
+
+def docker_is_running() -> bool: # pragma: win32 no cover
+ try:
+ cmd_output_b('docker', 'ps')
+ except CalledProcessError:
+ return False
+ else:
+ return True
+
+
+def assert_docker_available() -> None: # pragma: win32 no cover
+ assert docker_is_running(), (
+ 'Docker is either not running or not configured in this environment'
+ )
+
+
+def build_docker_image(
+ prefix: Prefix,
+ *,
+ pull: bool,
+) -> None: # pragma: win32 no cover
+ cmd: Tuple[str, ...] = (
+ 'docker', 'build',
+ '--tag', docker_tag(prefix),
+ '--label', PRE_COMMIT_LABEL,
+ )
+ if pull:
+ cmd += ('--pull',)
+ # This must come last for old versions of docker. See #477
+ cmd += ('.',)
+ helpers.run_setup_cmd(prefix, cmd)
+
+
+def install_environment(
+ prefix: Prefix, version: str, additional_dependencies: Sequence[str],
+) -> None: # pragma: win32 no cover
+ helpers.assert_version_default('docker', version)
+ helpers.assert_no_additional_deps('docker', additional_dependencies)
+ assert_docker_available()
+
+ directory = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+
+ # Docker doesn't really have relevant disk environment, but pre-commit
+ # still needs to cleanup its state files on failure
+ with clean_path_on_failure(directory):
+ build_docker_image(prefix, pull=True)
+ os.mkdir(directory)
+
+
+def get_docker_user() -> str: # pragma: win32 no cover
+ try:
+ return f'{os.getuid()}:{os.getgid()}'
+ except AttributeError:
+ return '1000:1000'
+
+
+def docker_cmd() -> Tuple[str, ...]: # pragma: win32 no cover
+ return (
+ 'docker', 'run',
+ '--rm',
+ '-u', get_docker_user(),
+ # https://docs.docker.com/engine/reference/commandline/run/#mount-volumes-from-container-volumes-from
+ # The `Z` option tells Docker to label the content with a private
+ # unshared label. Only the current container can use a private volume.
+ '-v', f'{os.getcwd()}:/src:rw,Z',
+ '--workdir', '/src',
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]: # pragma: win32 no cover
+ assert_docker_available()
+ # Rebuild the docker image in case it has gone missing, as many people do
+ # automated cleanup of docker images.
+ build_docker_image(hook.prefix, pull=False)
+
+ hook_cmd = hook.cmd
+ entry_exe, cmd_rest = hook.cmd[0], hook_cmd[1:]
+
+ entry_tag = ('--entrypoint', entry_exe, docker_tag(hook.prefix))
+ cmd = docker_cmd() + entry_tag + cmd_rest
+ return helpers.run_xargs(hook, cmd, file_args, color=color)
diff --git a/pre_commit/languages/docker_image.py b/pre_commit/languages/docker_image.py
new file mode 100644
index 0000000..0c51df6
--- /dev/null
+++ b/pre_commit/languages/docker_image.py
@@ -0,0 +1,22 @@
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.languages.docker import assert_docker_available
+from pre_commit.languages.docker import docker_cmd
+
+ENVIRONMENT_DIR = None
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+install_environment = helpers.no_install
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]: # pragma: win32 no cover
+ assert_docker_available()
+ cmd = docker_cmd() + hook.cmd
+ return helpers.run_xargs(hook, cmd, file_args, color=color)
diff --git a/pre_commit/languages/fail.py b/pre_commit/languages/fail.py
new file mode 100644
index 0000000..d2b02d2
--- /dev/null
+++ b/pre_commit/languages/fail.py
@@ -0,0 +1,20 @@
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+
+ENVIRONMENT_DIR = None
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+install_environment = helpers.no_install
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ out = f'{hook.entry}\n\n'.encode()
+ out += b'\n'.join(f.encode() for f in file_args) + b'\n'
+ return 1, out
diff --git a/pre_commit/languages/golang.py b/pre_commit/languages/golang.py
new file mode 100644
index 0000000..91ade1e
--- /dev/null
+++ b/pre_commit/languages/golang.py
@@ -0,0 +1,97 @@
+import contextlib
+import os.path
+import sys
+from typing import Generator
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from pre_commit.util import rmtree
+
+ENVIRONMENT_DIR = 'golangenv'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def get_env_patch(venv: str) -> PatchesT:
+ return (
+ ('PATH', (os.path.join(venv, 'bin'), os.pathsep, Var('PATH'))),
+ )
+
+
+@contextlib.contextmanager
+def in_env(prefix: Prefix) -> Generator[None, None, None]:
+ envdir = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+ with envcontext(get_env_patch(envdir)):
+ yield
+
+
+def guess_go_dir(remote_url: str) -> str:
+ if remote_url.endswith('.git'):
+ remote_url = remote_url[:-1 * len('.git')]
+ looks_like_url = (
+ not remote_url.startswith('file://') and
+ ('//' in remote_url or '@' in remote_url)
+ )
+ remote_url = remote_url.replace(':', '/')
+ if looks_like_url:
+ _, _, remote_url = remote_url.rpartition('//')
+ _, _, remote_url = remote_url.rpartition('@')
+ return remote_url
+ else:
+ return 'unknown_src_dir'
+
+
+def install_environment(
+ prefix: Prefix,
+ version: str,
+ additional_dependencies: Sequence[str],
+) -> None:
+ helpers.assert_version_default('golang', version)
+ directory = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+
+ with clean_path_on_failure(directory):
+ remote = git.get_remote_url(prefix.prefix_dir)
+ repo_src_dir = os.path.join(directory, 'src', guess_go_dir(remote))
+
+ # Clone into the goenv we'll create
+ helpers.run_setup_cmd(prefix, ('git', 'clone', '.', repo_src_dir))
+
+ if sys.platform == 'cygwin': # pragma: no cover
+ _, gopath, _ = cmd_output('cygpath', '-w', directory)
+ gopath = gopath.strip()
+ else:
+ gopath = directory
+ env = dict(os.environ, GOPATH=gopath)
+ env.pop('GOBIN', None)
+ cmd_output_b('go', 'get', './...', cwd=repo_src_dir, env=env)
+ for dependency in additional_dependencies:
+ cmd_output_b('go', 'get', dependency, cwd=repo_src_dir, env=env)
+ # Same some disk space, we don't need these after installation
+ rmtree(prefix.path(directory, 'src'))
+ pkgdir = prefix.path(directory, 'pkg')
+ if os.path.exists(pkgdir): # pragma: no cover (go<1.10)
+ rmtree(pkgdir)
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ with in_env(hook.prefix):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/helpers.py b/pre_commit/languages/helpers.py
new file mode 100644
index 0000000..b5c95e5
--- /dev/null
+++ b/pre_commit/languages/helpers.py
@@ -0,0 +1,109 @@
+import multiprocessing
+import os
+import random
+from typing import Any
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+
+import pre_commit.constants as C
+from pre_commit.hook import Hook
+from pre_commit.prefix import Prefix
+from pre_commit.util import cmd_output_b
+from pre_commit.xargs import xargs
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+
+FIXED_RANDOM_SEED = 1542676186
+
+
+def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:
+ cmd_output_b(*cmd, cwd=prefix.prefix_dir)
+
+
+@overload
+def environment_dir(d: None, language_version: str) -> None: ...
+@overload
+def environment_dir(d: str, language_version: str) -> str: ...
+
+
+def environment_dir(d: Optional[str], language_version: str) -> Optional[str]:
+ if d is None:
+ return None
+ else:
+ return f'{d}-{language_version}'
+
+
+def assert_version_default(binary: str, version: str) -> None:
+ if version != C.DEFAULT:
+ raise AssertionError(
+ f'For now, pre-commit requires system-installed {binary}',
+ )
+
+
+def assert_no_additional_deps(
+ lang: str,
+ additional_deps: Sequence[str],
+) -> None:
+ if additional_deps:
+ raise AssertionError(
+ f'For now, pre-commit does not support '
+ f'additional_dependencies for {lang}',
+ )
+
+
+def basic_get_default_version() -> str:
+ return C.DEFAULT
+
+
+def basic_healthy(prefix: Prefix, language_version: str) -> bool:
+ return True
+
+
+def no_install(
+ prefix: Prefix,
+ version: str,
+ additional_dependencies: Sequence[str],
+) -> 'NoReturn':
+ raise AssertionError('This type is not installable')
+
+
+def target_concurrency(hook: Hook) -> int:
+ if hook.require_serial or 'PRE_COMMIT_NO_CONCURRENCY' in os.environ:
+ return 1
+ else:
+ # Travis appears to have a bunch of CPUs, but we can't use them all.
+ if 'TRAVIS' in os.environ:
+ return 2
+ else:
+ try:
+ return multiprocessing.cpu_count()
+ except NotImplementedError:
+ return 1
+
+
+def _shuffled(seq: Sequence[str]) -> List[str]:
+ """Deterministically shuffle"""
+ fixed_random = random.Random()
+ fixed_random.seed(FIXED_RANDOM_SEED, version=1)
+
+ seq = list(seq)
+ random.shuffle(seq, random=fixed_random.random)
+ return seq
+
+
+def run_xargs(
+ hook: Hook,
+ cmd: Tuple[str, ...],
+ file_args: Sequence[str],
+ **kwargs: Any,
+) -> Tuple[int, bytes]:
+ # Shuffle the files so that they more evenly fill out the xargs partitions,
+ # but do it deterministically in case a hook cares about ordering.
+ file_args = _shuffled(file_args)
+ kwargs['target_concurrency'] = target_concurrency(hook)
+ return xargs(cmd, file_args, **kwargs)
diff --git a/pre_commit/languages/node.py b/pre_commit/languages/node.py
new file mode 100644
index 0000000..79ff807
--- /dev/null
+++ b/pre_commit/languages/node.py
@@ -0,0 +1,93 @@
+import contextlib
+import os
+import sys
+from typing import Generator
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.languages.python import bin_dir
+from pre_commit.prefix import Prefix
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'node_env'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def _envdir(prefix: Prefix, version: str) -> str:
+ directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
+ return prefix.path(directory)
+
+
+def get_env_patch(venv: str) -> PatchesT:
+ if sys.platform == 'cygwin': # pragma: no cover
+ _, win_venv, _ = cmd_output('cygpath', '-w', venv)
+ install_prefix = fr'{win_venv.strip()}\bin'
+ lib_dir = 'lib'
+ elif sys.platform == 'win32': # pragma: no cover
+ install_prefix = bin_dir(venv)
+ lib_dir = 'Scripts'
+ else: # pragma: win32 no cover
+ install_prefix = venv
+ lib_dir = 'lib'
+ return (
+ ('NODE_VIRTUAL_ENV', venv),
+ ('NPM_CONFIG_PREFIX', install_prefix),
+ ('npm_config_prefix', install_prefix),
+ ('NODE_PATH', os.path.join(venv, lib_dir, 'node_modules')),
+ ('PATH', (bin_dir(venv), os.pathsep, Var('PATH'))),
+ )
+
+
+@contextlib.contextmanager
+def in_env(
+ prefix: Prefix,
+ language_version: str,
+) -> Generator[None, None, None]:
+ with envcontext(get_env_patch(_envdir(prefix, language_version))):
+ yield
+
+
+def install_environment(
+ prefix: Prefix, version: str, additional_dependencies: Sequence[str],
+) -> None:
+ additional_dependencies = tuple(additional_dependencies)
+ assert prefix.exists('package.json')
+ envdir = _envdir(prefix, version)
+
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx?f=255&MSPPError=-2147217396#maxpath
+ if sys.platform == 'win32': # pragma: no cover
+ envdir = f'\\\\?\\{os.path.normpath(envdir)}'
+ with clean_path_on_failure(envdir):
+ cmd = [
+ sys.executable, '-mnodeenv', '--prebuilt', '--clean-src', envdir,
+ ]
+ if version != C.DEFAULT:
+ cmd.extend(['-n', version])
+ cmd_output_b(*cmd)
+
+ with in_env(prefix, version):
+ # https://npm.community/t/npm-install-g-git-vs-git-clone-cd-npm-install-g/5449
+ # install as if we installed from git
+ helpers.run_setup_cmd(prefix, ('npm', 'install'))
+ helpers.run_setup_cmd(
+ prefix,
+ ('npm', 'install', '-g', '.', *additional_dependencies),
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ with in_env(hook.prefix, hook.language_version):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/perl.py b/pre_commit/languages/perl.py
new file mode 100644
index 0000000..bbf5504
--- /dev/null
+++ b/pre_commit/languages/perl.py
@@ -0,0 +1,67 @@
+import contextlib
+import os
+import shlex
+from typing import Generator
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import clean_path_on_failure
+
+ENVIRONMENT_DIR = 'perl_env'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def _envdir(prefix: Prefix, version: str) -> str:
+ directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
+ return prefix.path(directory)
+
+
+def get_env_patch(venv: str) -> PatchesT:
+ return (
+ ('PATH', (os.path.join(venv, 'bin'), os.pathsep, Var('PATH'))),
+ ('PERL5LIB', os.path.join(venv, 'lib', 'perl5')),
+ ('PERL_MB_OPT', f'--install_base {shlex.quote(venv)}'),
+ (
+ 'PERL_MM_OPT', (
+ f'INSTALL_BASE={shlex.quote(venv)} '
+ f'INSTALLSITEMAN1DIR=none INSTALLSITEMAN3DIR=none'
+ ),
+ ),
+ )
+
+
+@contextlib.contextmanager
+def in_env(
+ prefix: Prefix,
+ language_version: str,
+) -> Generator[None, None, None]:
+ with envcontext(get_env_patch(_envdir(prefix, language_version))):
+ yield
+
+
+def install_environment(
+ prefix: Prefix, version: str, additional_dependencies: Sequence[str],
+) -> None:
+ helpers.assert_version_default('perl', version)
+
+ with clean_path_on_failure(_envdir(prefix, version)):
+ with in_env(prefix, version):
+ helpers.run_setup_cmd(
+ prefix, ('cpan', '-T', '.', *additional_dependencies),
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ with in_env(hook.prefix, hook.language_version):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/pygrep.py b/pre_commit/languages/pygrep.py
new file mode 100644
index 0000000..40adba0
--- /dev/null
+++ b/pre_commit/languages/pygrep.py
@@ -0,0 +1,87 @@
+import argparse
+import re
+import sys
+from typing import Optional
+from typing import Pattern
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit import output
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.xargs import xargs
+
+ENVIRONMENT_DIR = None
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+install_environment = helpers.no_install
+
+
+def _process_filename_by_line(pattern: Pattern[bytes], filename: str) -> int:
+ retv = 0
+ with open(filename, 'rb') as f:
+ for line_no, line in enumerate(f, start=1):
+ if pattern.search(line):
+ retv = 1
+ output.write(f'{filename}:{line_no}:')
+ output.write_line_b(line.rstrip(b'\r\n'))
+ return retv
+
+
+def _process_filename_at_once(pattern: Pattern[bytes], filename: str) -> int:
+ retv = 0
+ with open(filename, 'rb') as f:
+ contents = f.read()
+ match = pattern.search(contents)
+ if match:
+ retv = 1
+ line_no = contents[:match.start()].count(b'\n')
+ output.write(f'{filename}:{line_no + 1}:')
+
+ matched_lines = match[0].split(b'\n')
+ matched_lines[0] = contents.split(b'\n')[line_no]
+
+ output.write_line_b(b'\n'.join(matched_lines))
+ return retv
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ exe = (sys.executable, '-m', __name__) + tuple(hook.args) + (hook.entry,)
+ return xargs(exe, file_args, color=color)
+
+
+def main(argv: Optional[Sequence[str]] = None) -> int:
+ parser = argparse.ArgumentParser(
+ description=(
+ 'grep-like finder using python regexes. Unlike grep, this tool '
+ 'returns nonzero when it finds a match and zero otherwise. The '
+ 'idea here being that matches are "problems".'
+ ),
+ )
+ parser.add_argument('-i', '--ignore-case', action='store_true')
+ parser.add_argument('--multiline', action='store_true')
+ parser.add_argument('pattern', help='python regex pattern.')
+ parser.add_argument('filenames', nargs='*')
+ args = parser.parse_args(argv)
+
+ flags = re.IGNORECASE if args.ignore_case else 0
+ if args.multiline:
+ flags |= re.MULTILINE | re.DOTALL
+
+ pattern = re.compile(args.pattern.encode(), flags)
+
+ retv = 0
+ for filename in args.filenames:
+ if args.multiline:
+ retv |= _process_filename_at_once(pattern, filename)
+ else:
+ retv |= _process_filename_by_line(pattern, filename)
+ return retv
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/languages/python.py b/pre_commit/languages/python.py
new file mode 100644
index 0000000..5073a8b
--- /dev/null
+++ b/pre_commit/languages/python.py
@@ -0,0 +1,210 @@
+import contextlib
+import functools
+import os
+import sys
+from typing import Callable
+from typing import ContextManager
+from typing import Generator
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import UNSET
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.parse_shebang import find_executable
+from pre_commit.prefix import Prefix
+from pre_commit.util import CalledProcessError
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'py_env'
+
+
+def bin_dir(venv: str) -> str:
+ """On windows there's a different directory for the virtualenv"""
+ bin_part = 'Scripts' if os.name == 'nt' else 'bin'
+ return os.path.join(venv, bin_part)
+
+
+def get_env_patch(venv: str) -> PatchesT:
+ return (
+ ('PYTHONHOME', UNSET),
+ ('VIRTUAL_ENV', venv),
+ ('PATH', (bin_dir(venv), os.pathsep, Var('PATH'))),
+ )
+
+
+def _find_by_py_launcher(
+ version: str,
+) -> Optional[str]: # pragma: no cover (windows only)
+ if version.startswith('python'):
+ num = version[len('python'):]
+ try:
+ cmd = ('py', f'-{num}', '-c', 'import sys; print(sys.executable)')
+ return cmd_output(*cmd)[1].strip()
+ except CalledProcessError:
+ pass
+ return None
+
+
+def _find_by_sys_executable() -> Optional[str]:
+ def _norm(path: str) -> Optional[str]:
+ _, exe = os.path.split(path.lower())
+ exe, _, _ = exe.partition('.exe')
+ if exe not in {'python', 'pythonw'} and find_executable(exe):
+ return exe
+ return None
+
+ # On linux, I see these common sys.executables:
+ #
+ # system `python`: /usr/bin/python -> python2.7
+ # system `python2`: /usr/bin/python2 -> python2.7
+ # virtualenv v: v/bin/python (will not return from this loop)
+ # virtualenv v -ppython2: v/bin/python -> python2
+ # virtualenv v -ppython2.7: v/bin/python -> python2.7
+ # virtualenv v -ppypy: v/bin/python -> v/bin/pypy
+ for path in (sys.executable, os.path.realpath(sys.executable)):
+ exe = _norm(path)
+ if exe:
+ return exe
+ return None
+
+
+@functools.lru_cache(maxsize=1)
+def get_default_version() -> str: # pragma: no cover (platform dependent)
+ # First attempt from `sys.executable` (or the realpath)
+ exe = _find_by_sys_executable()
+ if exe:
+ return exe
+
+ # Next try the `pythonX.X` executable
+ exe = f'python{sys.version_info[0]}.{sys.version_info[1]}'
+ if find_executable(exe):
+ return exe
+
+ if _find_by_py_launcher(exe):
+ return exe
+
+ # Give a best-effort try for windows
+ default_folder_name = exe.replace('.', '')
+ if os.path.exists(fr'C:\{default_folder_name}\python.exe'):
+ return exe
+
+ # We tried!
+ return C.DEFAULT
+
+
+def _sys_executable_matches(version: str) -> bool:
+ if version == 'python':
+ return True
+ elif not version.startswith('python'):
+ return False
+
+ try:
+ info = tuple(int(p) for p in version[len('python'):].split('.'))
+ except ValueError:
+ return False
+
+ return sys.version_info[:len(info)] == info
+
+
+def norm_version(version: str) -> str:
+ # first see if our current executable is appropriate
+ if _sys_executable_matches(version):
+ return sys.executable
+
+ if os.name == 'nt': # pragma: no cover (windows)
+ version_exec = _find_by_py_launcher(version)
+ if version_exec:
+ return version_exec
+
+ # Try looking up by name
+ version_exec = find_executable(version)
+ if version_exec and version_exec != version:
+ return version_exec
+
+ # If it is in the form pythonx.x search in the default
+ # place on windows
+ if version.startswith('python'):
+ default_folder_name = version.replace('.', '')
+ return fr'C:\{default_folder_name}\python.exe'
+
+ # Otherwise assume it is a path
+ return os.path.expanduser(version)
+
+
+def py_interface(
+ _dir: str,
+ _make_venv: Callable[[str, str], None],
+) -> Tuple[
+ Callable[[Prefix, str], ContextManager[None]],
+ Callable[[Prefix, str], bool],
+ Callable[[Hook, Sequence[str], bool], Tuple[int, bytes]],
+ Callable[[Prefix, str, Sequence[str]], None],
+]:
+ @contextlib.contextmanager
+ def in_env(
+ prefix: Prefix,
+ language_version: str,
+ ) -> Generator[None, None, None]:
+ envdir = prefix.path(helpers.environment_dir(_dir, language_version))
+ with envcontext(get_env_patch(envdir)):
+ yield
+
+ def healthy(prefix: Prefix, language_version: str) -> bool:
+ envdir = helpers.environment_dir(_dir, language_version)
+ exe_name = 'python.exe' if sys.platform == 'win32' else 'python'
+ py_exe = prefix.path(bin_dir(envdir), exe_name)
+ with in_env(prefix, language_version):
+ retcode, _, _ = cmd_output_b(
+ py_exe, '-c', 'import ctypes, datetime, io, os, ssl, weakref',
+ cwd='/',
+ retcode=None,
+ )
+ return retcode == 0
+
+ def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+ ) -> Tuple[int, bytes]:
+ with in_env(hook.prefix, hook.language_version):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
+
+ def install_environment(
+ prefix: Prefix,
+ version: str,
+ additional_dependencies: Sequence[str],
+ ) -> None:
+ additional_dependencies = tuple(additional_dependencies)
+ directory = helpers.environment_dir(_dir, version)
+
+ env_dir = prefix.path(directory)
+ with clean_path_on_failure(env_dir):
+ if version != C.DEFAULT:
+ python = norm_version(version)
+ else:
+ python = os.path.realpath(sys.executable)
+ _make_venv(env_dir, python)
+ with in_env(prefix, version):
+ helpers.run_setup_cmd(
+ prefix, ('pip', 'install', '.') + additional_dependencies,
+ )
+
+ return in_env, healthy, run_hook, install_environment
+
+
+def make_venv(envdir: str, python: str) -> None:
+ env = dict(os.environ, VIRTUALENV_NO_DOWNLOAD='1')
+ cmd = (sys.executable, '-mvirtualenv', envdir, '-p', python)
+ cmd_output_b(*cmd, env=env, cwd='/')
+
+
+_interface = py_interface(ENVIRONMENT_DIR, make_venv)
+in_env, healthy, run_hook, install_environment = _interface
diff --git a/pre_commit/languages/python_venv.py b/pre_commit/languages/python_venv.py
new file mode 100644
index 0000000..5404c8b
--- /dev/null
+++ b/pre_commit/languages/python_venv.py
@@ -0,0 +1,46 @@
+import os.path
+
+from pre_commit.languages import python
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'py_venv'
+get_default_version = python.get_default_version
+
+
+def orig_py_exe(exe: str) -> str: # pragma: no cover (platform specific)
+ """A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
+ packages to the incorrect location. Attempt to find the _original_ exe
+ and invoke `-mvenv` from there.
+
+ See:
+ - https://github.com/pre-commit/pre-commit/issues/755
+ - https://github.com/pypa/virtualenv/issues/1095
+ - https://bugs.python.org/issue30811
+ """
+ try:
+ prefix_script = 'import sys; print(sys.real_prefix)'
+ _, prefix, _ = cmd_output(exe, '-c', prefix_script)
+ prefix = prefix.strip()
+ except CalledProcessError:
+ # not created from -mvirtualenv
+ return exe
+
+ if os.name == 'nt':
+ expected = os.path.join(prefix, 'python.exe')
+ else:
+ expected = os.path.join(prefix, 'bin', os.path.basename(exe))
+
+ if os.path.exists(expected):
+ return expected
+ else:
+ return exe
+
+
+def make_venv(envdir: str, python: str) -> None:
+ cmd_output_b(orig_py_exe(python), '-mvenv', envdir, cwd='/')
+
+
+_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)
+in_env, healthy, run_hook, install_environment = _interface
diff --git a/pre_commit/languages/ruby.py b/pre_commit/languages/ruby.py
new file mode 100644
index 0000000..61241f8
--- /dev/null
+++ b/pre_commit/languages/ruby.py
@@ -0,0 +1,126 @@
+import contextlib
+import os.path
+import shutil
+import tarfile
+from typing import Generator
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import CalledProcessError
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import resource_bytesio
+
+ENVIRONMENT_DIR = 'rbenv'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def get_env_patch(
+ venv: str,
+ language_version: str,
+) -> PatchesT: # pragma: win32 no cover
+ patches: PatchesT = (
+ ('GEM_HOME', os.path.join(venv, 'gems')),
+ ('RBENV_ROOT', venv),
+ ('BUNDLE_IGNORE_CONFIG', '1'),
+ (
+ 'PATH', (
+ os.path.join(venv, 'gems', 'bin'), os.pathsep,
+ os.path.join(venv, 'shims'), os.pathsep,
+ os.path.join(venv, 'bin'), os.pathsep, Var('PATH'),
+ ),
+ ),
+ )
+ if language_version != C.DEFAULT:
+ patches += (('RBENV_VERSION', language_version),)
+ return patches
+
+
+@contextlib.contextmanager # pragma: win32 no cover
+def in_env(
+ prefix: Prefix,
+ language_version: str,
+) -> Generator[None, None, None]:
+ envdir = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, language_version),
+ )
+ with envcontext(get_env_patch(envdir, language_version)):
+ yield
+
+
+def _extract_resource(filename: str, dest: str) -> None:
+ with resource_bytesio(filename) as bio:
+ with tarfile.open(fileobj=bio) as tf:
+ tf.extractall(dest)
+
+
+def _install_rbenv(
+ prefix: Prefix,
+ version: str = C.DEFAULT,
+) -> None: # pragma: win32 no cover
+ directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
+
+ _extract_resource('rbenv.tar.gz', prefix.path('.'))
+ shutil.move(prefix.path('rbenv'), prefix.path(directory))
+
+ # Only install ruby-build if the version is specified
+ if version != C.DEFAULT:
+ plugins_dir = prefix.path(directory, 'plugins')
+ _extract_resource('ruby-download.tar.gz', plugins_dir)
+ _extract_resource('ruby-build.tar.gz', plugins_dir)
+
+
+def _install_ruby(
+ prefix: Prefix,
+ version: str,
+) -> None: # pragma: win32 no cover
+ try:
+ helpers.run_setup_cmd(prefix, ('rbenv', 'download', version))
+ except CalledProcessError: # pragma: no cover (usually find with download)
+ # Failed to download from mirror for some reason, build it instead
+ helpers.run_setup_cmd(prefix, ('rbenv', 'install', version))
+
+
+def install_environment(
+ prefix: Prefix, version: str, additional_dependencies: Sequence[str],
+) -> None: # pragma: win32 no cover
+ additional_dependencies = tuple(additional_dependencies)
+ directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
+ with clean_path_on_failure(prefix.path(directory)):
+ # TODO: this currently will fail if there's no version specified and
+ # there's no system ruby installed. Is this ok?
+ _install_rbenv(prefix, version=version)
+ with in_env(prefix, version):
+ # Need to call this before installing so rbenv's directories are
+ # set up
+ helpers.run_setup_cmd(prefix, ('rbenv', 'init', '-'))
+ if version != C.DEFAULT:
+ _install_ruby(prefix, version)
+ # Need to call this after installing to set up the shims
+ helpers.run_setup_cmd(prefix, ('rbenv', 'rehash'))
+ helpers.run_setup_cmd(
+ prefix, ('gem', 'build', *prefix.star('.gemspec')),
+ )
+ helpers.run_setup_cmd(
+ prefix,
+ (
+ 'gem', 'install', '--no-document',
+ *prefix.star('.gem'), *additional_dependencies,
+ ),
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]: # pragma: win32 no cover
+ with in_env(hook.prefix, hook.language_version):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/rust.py b/pre_commit/languages/rust.py
new file mode 100644
index 0000000..7ea3f54
--- /dev/null
+++ b/pre_commit/languages/rust.py
@@ -0,0 +1,106 @@
+import contextlib
+import os.path
+from typing import Generator
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+
+import toml
+
+import pre_commit.constants as C
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'rustenv'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+
+
+def get_env_patch(target_dir: str) -> PatchesT:
+ return (
+ ('PATH', (os.path.join(target_dir, 'bin'), os.pathsep, Var('PATH'))),
+ )
+
+
+@contextlib.contextmanager
+def in_env(prefix: Prefix) -> Generator[None, None, None]:
+ target_dir = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+ with envcontext(get_env_patch(target_dir)):
+ yield
+
+
+def _add_dependencies(
+ cargo_toml_path: str,
+ additional_dependencies: Set[str],
+) -> None:
+ with open(cargo_toml_path, 'r+') as f:
+ cargo_toml = toml.load(f)
+ cargo_toml.setdefault('dependencies', {})
+ for dep in additional_dependencies:
+ name, _, spec = dep.partition(':')
+ cargo_toml['dependencies'][name] = spec or '*'
+ f.seek(0)
+ toml.dump(cargo_toml, f)
+ f.truncate()
+
+
+def install_environment(
+ prefix: Prefix,
+ version: str,
+ additional_dependencies: Sequence[str],
+) -> None:
+ helpers.assert_version_default('rust', version)
+ directory = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+
+ # There are two cases where we might want to specify more dependencies:
+ # as dependencies for the library being built, and as binary packages
+ # to be `cargo install`'d.
+ #
+ # Unlike e.g. Python, if we just `cargo install` a library, it won't be
+ # used for compilation. And if we add a crate providing a binary to the
+ # `Cargo.toml`, the binary won't be built.
+ #
+ # Because of this, we allow specifying "cli" dependencies by prefixing
+ # with 'cli:'.
+ cli_deps = {
+ dep for dep in additional_dependencies if dep.startswith('cli:')
+ }
+ lib_deps = set(additional_dependencies) - cli_deps
+
+ if len(lib_deps) > 0:
+ _add_dependencies(prefix.path('Cargo.toml'), lib_deps)
+
+ with clean_path_on_failure(directory):
+ packages_to_install: Set[Tuple[str, ...]] = {('--path', '.')}
+ for cli_dep in cli_deps:
+ cli_dep = cli_dep[len('cli:'):]
+ package, _, version = cli_dep.partition(':')
+ if version != '':
+ packages_to_install.add((package, '--version', version))
+ else:
+ packages_to_install.add((package,))
+
+ for args in packages_to_install:
+ cmd_output_b(
+ 'cargo', 'install', '--bins', '--root', directory, *args,
+ cwd=prefix.prefix_dir,
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ with in_env(hook.prefix):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/script.py b/pre_commit/languages/script.py
new file mode 100644
index 0000000..a5e1365
--- /dev/null
+++ b/pre_commit/languages/script.py
@@ -0,0 +1,19 @@
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+
+ENVIRONMENT_DIR = None
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+install_environment = helpers.no_install
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ cmd = (hook.prefix.path(hook.cmd[0]), *hook.cmd[1:])
+ return helpers.run_xargs(hook, cmd, file_args, color=color)
diff --git a/pre_commit/languages/swift.py b/pre_commit/languages/swift.py
new file mode 100644
index 0000000..66aadc8
--- /dev/null
+++ b/pre_commit/languages/swift.py
@@ -0,0 +1,64 @@
+import contextlib
+import os
+from typing import Generator
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import PatchesT
+from pre_commit.envcontext import Var
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output_b
+
+ENVIRONMENT_DIR = 'swift_env'
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+BUILD_DIR = '.build'
+BUILD_CONFIG = 'release'
+
+
+def get_env_patch(venv: str) -> PatchesT: # pragma: win32 no cover
+ bin_path = os.path.join(venv, BUILD_DIR, BUILD_CONFIG)
+ return (('PATH', (bin_path, os.pathsep, Var('PATH'))),)
+
+
+@contextlib.contextmanager # pragma: win32 no cover
+def in_env(prefix: Prefix) -> Generator[None, None, None]:
+ envdir = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+ with envcontext(get_env_patch(envdir)):
+ yield
+
+
+def install_environment(
+ prefix: Prefix, version: str, additional_dependencies: Sequence[str],
+) -> None: # pragma: win32 no cover
+ helpers.assert_version_default('swift', version)
+ helpers.assert_no_additional_deps('swift', additional_dependencies)
+ directory = prefix.path(
+ helpers.environment_dir(ENVIRONMENT_DIR, C.DEFAULT),
+ )
+
+ # Build the swift package
+ with clean_path_on_failure(directory):
+ os.mkdir(directory)
+ cmd_output_b(
+ 'swift', 'build',
+ '-C', prefix.prefix_dir,
+ '-c', BUILD_CONFIG,
+ '--build-path', os.path.join(directory, BUILD_DIR),
+ )
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]: # pragma: win32 no cover
+ with in_env(hook.prefix):
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/languages/system.py b/pre_commit/languages/system.py
new file mode 100644
index 0000000..139f45d
--- /dev/null
+++ b/pre_commit/languages/system.py
@@ -0,0 +1,19 @@
+from typing import Sequence
+from typing import Tuple
+
+from pre_commit.hook import Hook
+from pre_commit.languages import helpers
+
+
+ENVIRONMENT_DIR = None
+get_default_version = helpers.basic_get_default_version
+healthy = helpers.basic_healthy
+install_environment = helpers.no_install
+
+
+def run_hook(
+ hook: Hook,
+ file_args: Sequence[str],
+ color: bool,
+) -> Tuple[int, bytes]:
+ return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
diff --git a/pre_commit/logging_handler.py b/pre_commit/logging_handler.py
new file mode 100644
index 0000000..ba05295
--- /dev/null
+++ b/pre_commit/logging_handler.py
@@ -0,0 +1,40 @@
+import contextlib
+import logging
+from typing import Generator
+
+from pre_commit import color
+from pre_commit import output
+
+logger = logging.getLogger('pre_commit')
+
+LOG_LEVEL_COLORS = {
+ 'DEBUG': '',
+ 'INFO': '',
+ 'WARNING': color.YELLOW,
+ 'ERROR': color.RED,
+}
+
+
+class LoggingHandler(logging.Handler):
+ def __init__(self, use_color: bool) -> None:
+ super().__init__()
+ self.use_color = use_color
+
+ def emit(self, record: logging.LogRecord) -> None:
+ level_msg = color.format_color(
+ f'[{record.levelname}]',
+ LOG_LEVEL_COLORS[record.levelname],
+ self.use_color,
+ )
+ output.write_line(f'{level_msg} {record.getMessage()}')
+
+
+@contextlib.contextmanager
+def logging_handler(use_color: bool) -> Generator[None, None, None]:
+ handler = LoggingHandler(use_color)
+ logger.addHandler(handler)
+ logger.setLevel(logging.INFO)
+ try:
+ yield
+ finally:
+ logger.removeHandler(handler)
diff --git a/pre_commit/main.py b/pre_commit/main.py
new file mode 100644
index 0000000..790b347
--- /dev/null
+++ b/pre_commit/main.py
@@ -0,0 +1,410 @@
+import argparse
+import logging
+import os
+import sys
+from typing import Any
+from typing import Optional
+from typing import Sequence
+from typing import Union
+
+import pre_commit.constants as C
+from pre_commit import color
+from pre_commit import git
+from pre_commit.commands.autoupdate import autoupdate
+from pre_commit.commands.clean import clean
+from pre_commit.commands.gc import gc
+from pre_commit.commands.hook_impl import hook_impl
+from pre_commit.commands.init_templatedir import init_templatedir
+from pre_commit.commands.install_uninstall import install
+from pre_commit.commands.install_uninstall import install_hooks
+from pre_commit.commands.install_uninstall import uninstall
+from pre_commit.commands.migrate_config import migrate_config
+from pre_commit.commands.run import run
+from pre_commit.commands.sample_config import sample_config
+from pre_commit.commands.try_repo import try_repo
+from pre_commit.error_handler import error_handler
+from pre_commit.error_handler import FatalError
+from pre_commit.logging_handler import logging_handler
+from pre_commit.store import Store
+from pre_commit.util import CalledProcessError
+
+
+logger = logging.getLogger('pre_commit')
+
+# https://github.com/pre-commit/pre-commit/issues/217
+# On OSX, making a virtualenv using pyvenv at . causes `virtualenv` and `pip`
+# to install packages to the wrong place. We don't want anything to deal with
+# pyvenv
+os.environ.pop('__PYVENV_LAUNCHER__', None)
+
+
+COMMANDS_NO_GIT = {'clean', 'gc', 'init-templatedir', 'sample-config'}
+
+
+def _add_color_option(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument(
+ '--color', default=os.environ.get('PRE_COMMIT_COLOR', 'auto'),
+ type=color.use_color,
+ metavar='{' + ','.join(color.COLOR_CHOICES) + '}',
+ help='Whether to use color in output. Defaults to `%(default)s`.',
+ )
+
+
+def _add_config_option(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument(
+ '-c', '--config', default=C.CONFIG_FILE,
+ help='Path to alternate config file',
+ )
+
+
+class AppendReplaceDefault(argparse.Action):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.appended = False
+
+ def __call__(
+ self,
+ parser: argparse.ArgumentParser,
+ namespace: argparse.Namespace,
+ values: Union[str, Sequence[str], None],
+ option_string: Optional[str] = None,
+ ) -> None:
+ if not self.appended:
+ setattr(namespace, self.dest, [])
+ self.appended = True
+ getattr(namespace, self.dest).append(values)
+
+
+def _add_hook_type_option(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument(
+ '-t', '--hook-type', choices=(
+ 'pre-commit', 'pre-merge-commit', 'pre-push',
+ 'prepare-commit-msg', 'commit-msg', 'post-checkout',
+ ),
+ action=AppendReplaceDefault,
+ default=['pre-commit'],
+ dest='hook_types',
+ )
+
+
+def _add_run_options(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument('hook', nargs='?', help='A single hook-id to run')
+ parser.add_argument('--verbose', '-v', action='store_true', default=False)
+ mutex_group = parser.add_mutually_exclusive_group(required=False)
+ mutex_group.add_argument(
+ '--all-files', '-a', action='store_true', default=False,
+ help='Run on all the files in the repo.',
+ )
+ mutex_group.add_argument(
+ '--files', nargs='*', default=[],
+ help='Specific filenames to run hooks on.',
+ )
+ parser.add_argument(
+ '--show-diff-on-failure', action='store_true',
+ help='When hooks fail, run `git diff` directly afterward.',
+ )
+ parser.add_argument(
+ '--hook-stage', choices=C.STAGES, default='commit',
+ help='The stage during which the hook is fired. One of %(choices)s',
+ )
+ parser.add_argument(
+ '--from-ref', '--source', '-s',
+ help=(
+ '(for usage with `--from-ref`) -- this option represents the '
+ 'original ref in a `from_ref...to_ref` diff expression. '
+ 'For `pre-push` hooks, this represents the branch you are pushing '
+ 'to. '
+ 'For `post-checkout` hooks, this represents the branch that was '
+ 'previously checked out.'
+ ),
+ )
+ parser.add_argument(
+ '--to-ref', '--origin', '-o',
+ help=(
+ '(for usage with `--to-ref`) -- this option represents the '
+ 'destination ref in a `from_ref...to_ref` diff expression. '
+ 'For `pre-push` hooks, this represents the branch being pushed. '
+ 'For `post-checkout` hooks, this represents the branch that is '
+ 'now checked out.'
+ ),
+ )
+ parser.add_argument(
+ '--commit-msg-filename',
+ help='Filename to check when running during `commit-msg`',
+ )
+ parser.add_argument(
+ '--remote-name', help='Remote name used by `git push`.',
+ )
+ parser.add_argument('--remote-url', help='Remote url used by `git push`.')
+ parser.add_argument(
+ '--checkout-type',
+ help=(
+ 'Indicates whether the checkout was a branch checkout '
+ '(changing branches, flag=1) or a file checkout (retrieving a '
+ 'file from the index, flag=0).'
+ ),
+ )
+
+
+def _adjust_args_and_chdir(args: argparse.Namespace) -> None:
+ # `--config` was specified relative to the non-root working directory
+ if os.path.exists(args.config):
+ args.config = os.path.abspath(args.config)
+ if args.command in {'run', 'try-repo'}:
+ args.files = [os.path.abspath(filename) for filename in args.files]
+ if args.command == 'try-repo' and os.path.exists(args.repo):
+ args.repo = os.path.abspath(args.repo)
+
+ try:
+ toplevel = git.get_root()
+ except CalledProcessError:
+ raise FatalError(
+ 'git failed. Is it installed, and are you in a Git repository '
+ 'directory?',
+ )
+ else:
+ if toplevel == '': # pragma: no cover (old git)
+ raise FatalError(
+ 'git toplevel unexpectedly empty! make sure you are not '
+ 'inside the `.git` directory of your repository.',
+ )
+ else:
+ os.chdir(toplevel)
+
+ args.config = os.path.relpath(args.config)
+ if args.command in {'run', 'try-repo'}:
+ args.files = [os.path.relpath(filename) for filename in args.files]
+ if args.command == 'try-repo' and os.path.exists(args.repo):
+ args.repo = os.path.relpath(args.repo)
+
+
+def main(argv: Optional[Sequence[str]] = None) -> int:
+ argv = argv if argv is not None else sys.argv[1:]
+ parser = argparse.ArgumentParser(prog='pre-commit')
+
+ # https://stackoverflow.com/a/8521644/812183
+ parser.add_argument(
+ '-V', '--version',
+ action='version',
+ version=f'%(prog)s {C.VERSION}',
+ )
+
+ subparsers = parser.add_subparsers(dest='command')
+
+ autoupdate_parser = subparsers.add_parser(
+ 'autoupdate',
+ help="Auto-update pre-commit config to the latest repos' versions.",
+ )
+ _add_color_option(autoupdate_parser)
+ _add_config_option(autoupdate_parser)
+ autoupdate_parser.add_argument(
+ '--bleeding-edge', action='store_true',
+ help=(
+ 'Update to the bleeding edge of `master` instead of the latest '
+ 'tagged version (the default behavior).'
+ ),
+ )
+ autoupdate_parser.add_argument(
+ '--freeze', action='store_true',
+ help='Store "frozen" hashes in `rev` instead of tag names',
+ )
+ autoupdate_parser.add_argument(
+ '--repo', dest='repos', action='append', metavar='REPO',
+ help='Only update this repository -- may be specified multiple times.',
+ )
+
+ clean_parser = subparsers.add_parser(
+ 'clean', help='Clean out pre-commit files.',
+ )
+ _add_color_option(clean_parser)
+ _add_config_option(clean_parser)
+
+ hook_impl_parser = subparsers.add_parser('hook-impl')
+ _add_color_option(hook_impl_parser)
+ _add_config_option(hook_impl_parser)
+ hook_impl_parser.add_argument('--hook-type')
+ hook_impl_parser.add_argument('--hook-dir')
+ hook_impl_parser.add_argument(
+ '--skip-on-missing-config', action='store_true',
+ )
+ hook_impl_parser.add_argument(dest='rest', nargs=argparse.REMAINDER)
+
+ gc_parser = subparsers.add_parser('gc', help='Clean unused cached repos.')
+ _add_color_option(gc_parser)
+ _add_config_option(gc_parser)
+
+ init_templatedir_parser = subparsers.add_parser(
+ 'init-templatedir',
+ help=(
+ 'Install hook script in a directory intended for use with '
+ '`git config init.templateDir`.'
+ ),
+ )
+ _add_color_option(init_templatedir_parser)
+ _add_config_option(init_templatedir_parser)
+ init_templatedir_parser.add_argument(
+ 'directory', help='The directory in which to write the hook script.',
+ )
+ _add_hook_type_option(init_templatedir_parser)
+
+ install_parser = subparsers.add_parser(
+ 'install', help='Install the pre-commit script.',
+ )
+ _add_color_option(install_parser)
+ _add_config_option(install_parser)
+ install_parser.add_argument(
+ '-f', '--overwrite', action='store_true',
+ help='Overwrite existing hooks / remove migration mode.',
+ )
+ install_parser.add_argument(
+ '--install-hooks', action='store_true',
+ help=(
+ 'Whether to install hook environments for all environments '
+ 'in the config file.'
+ ),
+ )
+ _add_hook_type_option(install_parser)
+ install_parser.add_argument(
+ '--allow-missing-config', action='store_true', default=False,
+ help=(
+ 'Whether to allow a missing `pre-commit` configuration file '
+ 'or exit with a failure code.'
+ ),
+ )
+
+ install_hooks_parser = subparsers.add_parser(
+ 'install-hooks',
+ help=(
+ 'Install hook environments for all environments in the config '
+ 'file. You may find `pre-commit install --install-hooks` more '
+ 'useful.'
+ ),
+ )
+ _add_color_option(install_hooks_parser)
+ _add_config_option(install_hooks_parser)
+
+ migrate_config_parser = subparsers.add_parser(
+ 'migrate-config',
+ help='Migrate list configuration to new map configuration.',
+ )
+ _add_color_option(migrate_config_parser)
+ _add_config_option(migrate_config_parser)
+
+ run_parser = subparsers.add_parser('run', help='Run hooks.')
+ _add_color_option(run_parser)
+ _add_config_option(run_parser)
+ _add_run_options(run_parser)
+
+ sample_config_parser = subparsers.add_parser(
+ 'sample-config', help=f'Produce a sample {C.CONFIG_FILE} file',
+ )
+ _add_color_option(sample_config_parser)
+ _add_config_option(sample_config_parser)
+
+ try_repo_parser = subparsers.add_parser(
+ 'try-repo',
+ help='Try the hooks in a repository, useful for developing new hooks.',
+ )
+ _add_color_option(try_repo_parser)
+ _add_config_option(try_repo_parser)
+ try_repo_parser.add_argument(
+ 'repo', help='Repository to source hooks from.',
+ )
+ try_repo_parser.add_argument(
+ '--ref', '--rev',
+ help=(
+ 'Manually select a rev to run against, otherwise the `HEAD` '
+ 'revision will be used.'
+ ),
+ )
+ _add_run_options(try_repo_parser)
+
+ uninstall_parser = subparsers.add_parser(
+ 'uninstall', help='Uninstall the pre-commit script.',
+ )
+ _add_color_option(uninstall_parser)
+ _add_config_option(uninstall_parser)
+ _add_hook_type_option(uninstall_parser)
+
+ help = subparsers.add_parser(
+ 'help', help='Show help for a specific command.',
+ )
+ help.add_argument('help_cmd', nargs='?', help='Command to show help for.')
+
+ # argparse doesn't really provide a way to use a `default` subparser
+ if len(argv) == 0:
+ argv = ['run']
+ args = parser.parse_args(argv)
+
+ if args.command == 'help' and args.help_cmd:
+ parser.parse_args([args.help_cmd, '--help'])
+ elif args.command == 'help':
+ parser.parse_args(['--help'])
+
+ with error_handler(), logging_handler(args.color):
+ if args.command not in COMMANDS_NO_GIT:
+ _adjust_args_and_chdir(args)
+
+ git.check_for_cygwin_mismatch()
+
+ store = Store()
+ store.mark_config_used(args.config)
+
+ if args.command == 'autoupdate':
+ return autoupdate(
+ args.config, store,
+ tags_only=not args.bleeding_edge,
+ freeze=args.freeze,
+ repos=args.repos,
+ )
+ elif args.command == 'clean':
+ return clean(store)
+ elif args.command == 'gc':
+ return gc(store)
+ elif args.command == 'hook-impl':
+ return hook_impl(
+ store,
+ config=args.config,
+ color=args.color,
+ hook_type=args.hook_type,
+ hook_dir=args.hook_dir,
+ skip_on_missing_config=args.skip_on_missing_config,
+ args=args.rest[1:],
+ )
+ elif args.command == 'install':
+ return install(
+ args.config, store,
+ hook_types=args.hook_types,
+ overwrite=args.overwrite,
+ hooks=args.install_hooks,
+ skip_on_missing_config=args.allow_missing_config,
+ )
+ elif args.command == 'init-templatedir':
+ return init_templatedir(
+ args.config, store, args.directory,
+ hook_types=args.hook_types,
+ )
+ elif args.command == 'install-hooks':
+ return install_hooks(args.config, store)
+ elif args.command == 'migrate-config':
+ return migrate_config(args.config)
+ elif args.command == 'run':
+ return run(args.config, store, args)
+ elif args.command == 'sample-config':
+ return sample_config()
+ elif args.command == 'try-repo':
+ return try_repo(args)
+ elif args.command == 'uninstall':
+ return uninstall(hook_types=args.hook_types)
+ else:
+ raise NotImplementedError(
+ f'Command {args.command} not implemented.',
+ )
+
+ raise AssertionError(
+ f'Command {args.command} failed to exit with a returncode',
+ )
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/make_archives.py b/pre_commit/make_archives.py
new file mode 100644
index 0000000..c31bcd7
--- /dev/null
+++ b/pre_commit/make_archives.py
@@ -0,0 +1,65 @@
+import argparse
+import os.path
+import tarfile
+from typing import Optional
+from typing import Sequence
+
+from pre_commit import output
+from pre_commit.util import cmd_output_b
+from pre_commit.util import rmtree
+from pre_commit.util import tmpdir
+
+
+# This is a script for generating the tarred resources for git repo
+# dependencies. Currently it's just for "vendoring" ruby support packages.
+
+
+REPOS = (
+ ('rbenv', 'git://github.com/rbenv/rbenv', 'a3fa9b7'),
+ ('ruby-build', 'git://github.com/rbenv/ruby-build', '1a902f3'),
+ (
+ 'ruby-download',
+ 'git://github.com/garnieretienne/rvm-download',
+ '09bd7c6',
+ ),
+)
+
+
+def make_archive(name: str, repo: str, ref: str, destdir: str) -> str:
+ """Makes an archive of a repository in the given destdir.
+
+ :param text name: Name to give the archive. For instance foo. The file
+ that is created will be called foo.tar.gz.
+ :param text repo: Repository to clone.
+ :param text ref: Tag/SHA/branch to check out.
+ :param text destdir: Directory to place archives in.
+ """
+ output_path = os.path.join(destdir, f'{name}.tar.gz')
+ with tmpdir() as tempdir:
+ # Clone the repository to the temporary directory
+ cmd_output_b('git', 'clone', repo, tempdir)
+ cmd_output_b('git', 'checkout', ref, cwd=tempdir)
+
+ # We don't want the '.git' directory
+ # It adds a bunch of size to the archive and we don't use it at
+ # runtime
+ rmtree(os.path.join(tempdir, '.git'))
+
+ with tarfile.open(output_path, 'w|gz') as tf:
+ tf.add(tempdir, name)
+
+ return output_path
+
+
+def main(argv: Optional[Sequence[str]] = None) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--dest', default='pre_commit/resources')
+ args = parser.parse_args(argv)
+ for archive_name, repo, ref in REPOS:
+ output.write_line(f'Making {archive_name}.tar.gz for {repo}@{ref}')
+ make_archive(archive_name, repo, ref, args.dest)
+ return 0
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/meta_hooks/__init__.py b/pre_commit/meta_hooks/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pre_commit/meta_hooks/__init__.py
diff --git a/pre_commit/meta_hooks/check_hooks_apply.py b/pre_commit/meta_hooks/check_hooks_apply.py
new file mode 100644
index 0000000..d0244a9
--- /dev/null
+++ b/pre_commit/meta_hooks/check_hooks_apply.py
@@ -0,0 +1,39 @@
+import argparse
+from typing import Optional
+from typing import Sequence
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.clientlib import load_config
+from pre_commit.commands.run import Classifier
+from pre_commit.repository import all_hooks
+from pre_commit.store import Store
+
+
+def check_all_hooks_match_files(config_file: str) -> int:
+ classifier = Classifier(git.get_all_files())
+ retv = 0
+
+ for hook in all_hooks(load_config(config_file), Store()):
+ if hook.always_run or hook.language == 'fail':
+ continue
+ elif not classifier.filenames_for_hook(hook):
+ print(f'{hook.id} does not apply to this repository')
+ retv = 1
+
+ return retv
+
+
+def main(argv: Optional[Sequence[str]] = None) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE])
+ args = parser.parse_args(argv)
+
+ retv = 0
+ for filename in args.filenames:
+ retv |= check_all_hooks_match_files(filename)
+ return retv
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/meta_hooks/check_useless_excludes.py b/pre_commit/meta_hooks/check_useless_excludes.py
new file mode 100644
index 0000000..30b8d81
--- /dev/null
+++ b/pre_commit/meta_hooks/check_useless_excludes.py
@@ -0,0 +1,72 @@
+import argparse
+import re
+from typing import Optional
+from typing import Sequence
+
+from cfgv import apply_defaults
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.clientlib import load_config
+from pre_commit.clientlib import MANIFEST_HOOK_DICT
+from pre_commit.commands.run import Classifier
+
+
+def exclude_matches_any(
+ filenames: Sequence[str],
+ include: str,
+ exclude: str,
+) -> bool:
+ if exclude == '^$':
+ return True
+ include_re, exclude_re = re.compile(include), re.compile(exclude)
+ for filename in filenames:
+ if include_re.search(filename) and exclude_re.search(filename):
+ return True
+ return False
+
+
+def check_useless_excludes(config_file: str) -> int:
+ config = load_config(config_file)
+ classifier = Classifier(git.get_all_files())
+ retv = 0
+
+ exclude = config['exclude']
+ if not exclude_matches_any(classifier.filenames, '', exclude):
+ print(
+ f'The global exclude pattern {exclude!r} does not match any files',
+ )
+ retv = 1
+
+ for repo in config['repos']:
+ for hook in repo['hooks']:
+ # Not actually a manifest dict, but this more accurately reflects
+ # the defaults applied during runtime
+ hook = apply_defaults(hook, MANIFEST_HOOK_DICT)
+ names = classifier.filenames
+ types, exclude_types = hook['types'], hook['exclude_types']
+ names = classifier.by_types(names, types, exclude_types)
+ include, exclude = hook['files'], hook['exclude']
+ if not exclude_matches_any(names, include, exclude):
+ print(
+ f'The exclude pattern {exclude!r} for {hook["id"]} does '
+ f'not match any files',
+ )
+ retv = 1
+
+ return retv
+
+
+def main(argv: Optional[Sequence[str]] = None) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('filenames', nargs='*', default=[C.CONFIG_FILE])
+ args = parser.parse_args(argv)
+
+ retv = 0
+ for filename in args.filenames:
+ retv |= check_useless_excludes(filename)
+ return retv
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/meta_hooks/identity.py b/pre_commit/meta_hooks/identity.py
new file mode 100644
index 0000000..730d0ec
--- /dev/null
+++ b/pre_commit/meta_hooks/identity.py
@@ -0,0 +1,16 @@
+import sys
+from typing import Optional
+from typing import Sequence
+
+from pre_commit import output
+
+
+def main(argv: Optional[Sequence[str]] = None) -> int:
+ argv = argv if argv is not None else sys.argv[1:]
+ for arg in argv:
+ output.write_line(arg)
+ return 0
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/pre_commit/output.py b/pre_commit/output.py
new file mode 100644
index 0000000..24f9d84
--- /dev/null
+++ b/pre_commit/output.py
@@ -0,0 +1,32 @@
+import contextlib
+import sys
+from typing import Any
+from typing import IO
+from typing import Optional
+
+
+def write(s: str, stream: IO[bytes] = sys.stdout.buffer) -> None:
+ stream.write(s.encode())
+ stream.flush()
+
+
+def write_line_b(
+ s: Optional[bytes] = None,
+ stream: IO[bytes] = sys.stdout.buffer,
+ logfile_name: Optional[str] = None,
+) -> None:
+ with contextlib.ExitStack() as exit_stack:
+ output_streams = [stream]
+ if logfile_name:
+ stream = exit_stack.enter_context(open(logfile_name, 'ab'))
+ output_streams.append(stream)
+
+ for output_stream in output_streams:
+ if s is not None:
+ output_stream.write(s)
+ output_stream.write(b'\n')
+ output_stream.flush()
+
+
+def write_line(s: Optional[str] = None, **kwargs: Any) -> None:
+ write_line_b(s.encode() if s is not None else s, **kwargs)
diff --git a/pre_commit/parse_shebang.py b/pre_commit/parse_shebang.py
new file mode 100644
index 0000000..d344a1d
--- /dev/null
+++ b/pre_commit/parse_shebang.py
@@ -0,0 +1,84 @@
+import os.path
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import TYPE_CHECKING
+
+from identify.identify import parse_shebang_from_file
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+
+
+class ExecutableNotFoundError(OSError):
+ def to_output(self) -> Tuple[int, bytes, None]:
+ return (1, self.args[0].encode(), None)
+
+
+def parse_filename(filename: str) -> Tuple[str, ...]:
+ if not os.path.exists(filename):
+ return ()
+ else:
+ return parse_shebang_from_file(filename)
+
+
+def find_executable(
+ exe: str, _environ: Optional[Mapping[str, str]] = None,
+) -> Optional[str]:
+ exe = os.path.normpath(exe)
+ if os.sep in exe:
+ return exe
+
+ environ = _environ if _environ is not None else os.environ
+
+ if 'PATHEXT' in environ:
+ exts = environ['PATHEXT'].split(os.pathsep)
+ possible_exe_names = tuple(f'{exe}{ext}' for ext in exts) + (exe,)
+ else:
+ possible_exe_names = (exe,)
+
+ for path in environ.get('PATH', '').split(os.pathsep):
+ for possible_exe_name in possible_exe_names:
+ joined = os.path.join(path, possible_exe_name)
+ if os.path.isfile(joined) and os.access(joined, os.X_OK):
+ return joined
+ else:
+ return None
+
+
+def normexe(orig: str) -> str:
+ def _error(msg: str) -> 'NoReturn':
+ raise ExecutableNotFoundError(f'Executable `{orig}` {msg}')
+
+ if os.sep not in orig and (not os.altsep or os.altsep not in orig):
+ exe = find_executable(orig)
+ if exe is None:
+ _error('not found')
+ return exe
+ elif os.path.isdir(orig):
+ _error('is a directory')
+ elif not os.path.isfile(orig):
+ _error('not found')
+ elif not os.access(orig, os.X_OK): # pragma: win32 no cover
+ _error('is not executable')
+ else:
+ return orig
+
+
+def normalize_cmd(cmd: Tuple[str, ...]) -> Tuple[str, ...]:
+ """Fixes for the following issues on windows
+ - https://bugs.python.org/issue8557
+ - windows does not parse shebangs
+
+ This function also makes deep-path shebangs work just fine
+ """
+ # Use PATH to determine the executable
+ exe = normexe(cmd[0])
+
+ # Figure out the shebang from the resulting command
+ cmd = parse_filename(exe) + (exe,) + cmd[1:]
+
+ # This could have given us back another bare executable
+ exe = normexe(cmd[0])
+
+ return (exe,) + cmd[1:]
diff --git a/pre_commit/prefix.py b/pre_commit/prefix.py
new file mode 100644
index 0000000..0e3ebbd
--- /dev/null
+++ b/pre_commit/prefix.py
@@ -0,0 +1,17 @@
+import os.path
+from typing import NamedTuple
+from typing import Tuple
+
+
+class Prefix(NamedTuple):
+ prefix_dir: str
+
+ def path(self, *parts: str) -> str:
+ return os.path.normpath(os.path.join(self.prefix_dir, *parts))
+
+ def exists(self, *parts: str) -> bool:
+ return os.path.exists(self.path(*parts))
+
+ def star(self, end: str) -> Tuple[str, ...]:
+ paths = os.listdir(self.prefix_dir)
+ return tuple(path for path in paths if path.endswith(end))
diff --git a/pre_commit/repository.py b/pre_commit/repository.py
new file mode 100644
index 0000000..77734ee
--- /dev/null
+++ b/pre_commit/repository.py
@@ -0,0 +1,208 @@
+import json
+import logging
+import os
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit.clientlib import load_manifest
+from pre_commit.clientlib import LOCAL
+from pre_commit.clientlib import META
+from pre_commit.hook import Hook
+from pre_commit.languages.all import languages
+from pre_commit.languages.helpers import environment_dir
+from pre_commit.prefix import Prefix
+from pre_commit.store import Store
+from pre_commit.util import parse_version
+from pre_commit.util import rmtree
+
+
+logger = logging.getLogger('pre_commit')
+
+
+def _state(additional_deps: Sequence[str]) -> object:
+ return {'additional_dependencies': sorted(additional_deps)}
+
+
+def _state_filename(prefix: Prefix, venv: str) -> str:
+ return prefix.path(venv, f'.install_state_v{C.INSTALLED_STATE_VERSION}')
+
+
+def _read_state(prefix: Prefix, venv: str) -> Optional[object]:
+ filename = _state_filename(prefix, venv)
+ if not os.path.exists(filename):
+ return None
+ else:
+ with open(filename) as f:
+ return json.load(f)
+
+
+def _write_state(prefix: Prefix, venv: str, state: object) -> None:
+ state_filename = _state_filename(prefix, venv)
+ staging = f'{state_filename}staging'
+ with open(staging, 'w') as state_file:
+ state_file.write(json.dumps(state))
+ # Move the file into place atomically to indicate we've installed
+ os.rename(staging, state_filename)
+
+
+def _hook_installed(hook: Hook) -> bool:
+ lang = languages[hook.language]
+ venv = environment_dir(lang.ENVIRONMENT_DIR, hook.language_version)
+ return (
+ venv is None or (
+ (
+ _read_state(hook.prefix, venv) ==
+ _state(hook.additional_dependencies)
+ ) and
+ lang.healthy(hook.prefix, hook.language_version)
+ )
+ )
+
+
+def _hook_install(hook: Hook) -> None:
+ logger.info(f'Installing environment for {hook.src}.')
+ logger.info('Once installed this environment will be reused.')
+ logger.info('This may take a few minutes...')
+
+ lang = languages[hook.language]
+ assert lang.ENVIRONMENT_DIR is not None
+ venv = environment_dir(lang.ENVIRONMENT_DIR, hook.language_version)
+
+ # There's potentially incomplete cleanup from previous runs
+ # Clean it up!
+ if hook.prefix.exists(venv):
+ rmtree(hook.prefix.path(venv))
+
+ lang.install_environment(
+ hook.prefix, hook.language_version, hook.additional_dependencies,
+ )
+ # Write our state to indicate we're installed
+ _write_state(hook.prefix, venv, _state(hook.additional_dependencies))
+
+
+def _hook(
+ *hook_dicts: Dict[str, Any],
+ root_config: Dict[str, Any],
+) -> Dict[str, Any]:
+ ret, rest = dict(hook_dicts[0]), hook_dicts[1:]
+ for dct in rest:
+ ret.update(dct)
+
+ version = ret['minimum_pre_commit_version']
+ if parse_version(version) > parse_version(C.VERSION):
+ logger.error(
+ f'The hook `{ret["id"]}` requires pre-commit version {version} '
+ f'but version {C.VERSION} is installed. '
+ f'Perhaps run `pip install --upgrade pre-commit`.',
+ )
+ exit(1)
+
+ lang = ret['language']
+ if ret['language_version'] == C.DEFAULT:
+ ret['language_version'] = root_config['default_language_version'][lang]
+ if ret['language_version'] == C.DEFAULT:
+ ret['language_version'] = languages[lang].get_default_version()
+
+ if not ret['stages']:
+ ret['stages'] = root_config['default_stages']
+
+ return ret
+
+
+def _non_cloned_repository_hooks(
+ repo_config: Dict[str, Any],
+ store: Store,
+ root_config: Dict[str, Any],
+) -> Tuple[Hook, ...]:
+ def _prefix(language_name: str, deps: Sequence[str]) -> Prefix:
+ language = languages[language_name]
+ # pygrep / script / system / docker_image do not have
+ # environments so they work out of the current directory
+ if language.ENVIRONMENT_DIR is None:
+ return Prefix(os.getcwd())
+ else:
+ return Prefix(store.make_local(deps))
+
+ return tuple(
+ Hook.create(
+ repo_config['repo'],
+ _prefix(hook['language'], hook['additional_dependencies']),
+ _hook(hook, root_config=root_config),
+ )
+ for hook in repo_config['hooks']
+ )
+
+
+def _cloned_repository_hooks(
+ repo_config: Dict[str, Any],
+ store: Store,
+ root_config: Dict[str, Any],
+) -> Tuple[Hook, ...]:
+ repo, rev = repo_config['repo'], repo_config['rev']
+ manifest_path = os.path.join(store.clone(repo, rev), C.MANIFEST_FILE)
+ by_id = {hook['id']: hook for hook in load_manifest(manifest_path)}
+
+ for hook in repo_config['hooks']:
+ if hook['id'] not in by_id:
+ logger.error(
+ f'`{hook["id"]}` is not present in repository {repo}. '
+ f'Typo? Perhaps it is introduced in a newer version? '
+ f'Often `pre-commit autoupdate` fixes this.',
+ )
+ exit(1)
+
+ hook_dcts = [
+ _hook(by_id[hook['id']], hook, root_config=root_config)
+ for hook in repo_config['hooks']
+ ]
+ return tuple(
+ Hook.create(
+ repo_config['repo'],
+ Prefix(store.clone(repo, rev, hook['additional_dependencies'])),
+ hook,
+ )
+ for hook in hook_dcts
+ )
+
+
+def _repository_hooks(
+ repo_config: Dict[str, Any],
+ store: Store,
+ root_config: Dict[str, Any],
+) -> Tuple[Hook, ...]:
+ if repo_config['repo'] in {LOCAL, META}:
+ return _non_cloned_repository_hooks(repo_config, store, root_config)
+ else:
+ return _cloned_repository_hooks(repo_config, store, root_config)
+
+
+def install_hook_envs(hooks: Sequence[Hook], store: Store) -> None:
+ def _need_installed() -> List[Hook]:
+ seen: Set[Tuple[Prefix, str, str, Tuple[str, ...]]] = set()
+ ret = []
+ for hook in hooks:
+ if hook.install_key not in seen and not _hook_installed(hook):
+ ret.append(hook)
+ seen.add(hook.install_key)
+ return ret
+
+ if not _need_installed():
+ return
+ with store.exclusive_lock():
+ # Another process may have already completed this work
+ for hook in _need_installed():
+ _hook_install(hook)
+
+
+def all_hooks(root_config: Dict[str, Any], store: Store) -> Tuple[Hook, ...]:
+ return tuple(
+ hook
+ for repo in root_config['repos']
+ for hook in _repository_hooks(repo, store, root_config)
+ )
diff --git a/pre_commit/resources/__init__.py b/pre_commit/resources/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pre_commit/resources/__init__.py
diff --git a/pre_commit/resources/empty_template_.npmignore b/pre_commit/resources/empty_template_.npmignore
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/pre_commit/resources/empty_template_.npmignore
@@ -0,0 +1 @@
+*
diff --git a/pre_commit/resources/empty_template_Cargo.toml b/pre_commit/resources/empty_template_Cargo.toml
new file mode 100644
index 0000000..3dfeffa
--- /dev/null
+++ b/pre_commit/resources/empty_template_Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "__fake_crate"
+version = "0.0.0"
+
+[[bin]]
+name = "__fake_cmd"
+path = "main.rs"
diff --git a/pre_commit/resources/empty_template_Makefile.PL b/pre_commit/resources/empty_template_Makefile.PL
new file mode 100644
index 0000000..ac75fe5
--- /dev/null
+++ b/pre_commit/resources/empty_template_Makefile.PL
@@ -0,0 +1,6 @@
+use ExtUtils::MakeMaker;
+
+WriteMakefile(
+ NAME => "PreCommitDummy",
+ VERSION => "0.0.1",
+);
diff --git a/pre_commit/resources/empty_template_environment.yml b/pre_commit/resources/empty_template_environment.yml
new file mode 100644
index 0000000..0f29f0c
--- /dev/null
+++ b/pre_commit/resources/empty_template_environment.yml
@@ -0,0 +1,9 @@
+channels:
+ - conda-forge
+ - defaults
+dependencies:
+ # This cannot be empty as otherwise no environment will be created.
+ # We're using openssl here as it is available on all system and will
+ # most likely be always installed anyways.
+ # See https://github.com/conda/conda/issues/9487
+ - openssl
diff --git a/pre_commit/resources/empty_template_main.go b/pre_commit/resources/empty_template_main.go
new file mode 100644
index 0000000..38dd16d
--- /dev/null
+++ b/pre_commit/resources/empty_template_main.go
@@ -0,0 +1,3 @@
+package main
+
+func main() {}
diff --git a/pre_commit/resources/empty_template_main.rs b/pre_commit/resources/empty_template_main.rs
new file mode 100644
index 0000000..f328e4d
--- /dev/null
+++ b/pre_commit/resources/empty_template_main.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/pre_commit/resources/empty_template_package.json b/pre_commit/resources/empty_template_package.json
new file mode 100644
index 0000000..ac7b725
--- /dev/null
+++ b/pre_commit/resources/empty_template_package.json
@@ -0,0 +1,4 @@
+{
+ "name": "pre_commit_dummy_package",
+ "version": "0.0.0"
+}
diff --git a/pre_commit/resources/empty_template_pre_commit_dummy_package.gemspec b/pre_commit/resources/empty_template_pre_commit_dummy_package.gemspec
new file mode 100644
index 0000000..8bfb40c
--- /dev/null
+++ b/pre_commit/resources/empty_template_pre_commit_dummy_package.gemspec
@@ -0,0 +1,6 @@
+Gem::Specification.new do |s|
+ s.name = 'pre_commit_dummy_package'
+ s.version = '0.0.0'
+ s.summary = 'dummy gem for pre-commit hooks'
+ s.authors = ['Anthony Sottile']
+end
diff --git a/pre_commit/resources/empty_template_setup.py b/pre_commit/resources/empty_template_setup.py
new file mode 100644
index 0000000..6886064
--- /dev/null
+++ b/pre_commit/resources/empty_template_setup.py
@@ -0,0 +1,4 @@
+from setuptools import setup
+
+
+setup(name='pre-commit-dummy-package', version='0.0.0')
diff --git a/pre_commit/resources/hook-tmpl b/pre_commit/resources/hook-tmpl
new file mode 100755
index 0000000..299144e
--- /dev/null
+++ b/pre_commit/resources/hook-tmpl
@@ -0,0 +1,44 @@
+#!/usr/bin/env python3
+# File generated by pre-commit: https://pre-commit.com
+# ID: 138fd403232d2ddd5efb44317e38bf03
+import os
+import sys
+
+# we try our best, but the shebang of this script is difficult to determine:
+# - macos doesn't ship with python3
+# - windows executables are almost always `python.exe`
+# therefore we continue to support python2 for this small script
+if sys.version_info < (3, 3):
+ from distutils.spawn import find_executable as which
+else:
+ from shutil import which
+
+# work around https://github.com/Homebrew/homebrew-core/issues/30445
+os.environ.pop('__PYVENV_LAUNCHER__', None)
+
+# start templated
+INSTALL_PYTHON = ''
+ARGS = ['hook-impl']
+# end templated
+ARGS.extend(('--hook-dir', os.path.realpath(os.path.dirname(__file__))))
+ARGS.append('--')
+ARGS.extend(sys.argv[1:])
+
+DNE = '`pre-commit` not found. Did you forget to activate your virtualenv?'
+if os.access(INSTALL_PYTHON, os.X_OK):
+ CMD = [INSTALL_PYTHON, '-mpre_commit']
+elif which('pre-commit'):
+ CMD = ['pre-commit']
+else:
+ raise SystemExit(DNE)
+
+CMD.extend(ARGS)
+if sys.platform == 'win32': # https://bugs.python.org/issue19124
+ import subprocess
+
+ if sys.version_info < (3, 7): # https://bugs.python.org/issue25942
+ raise SystemExit(subprocess.Popen(CMD).wait())
+ else:
+ raise SystemExit(subprocess.call(CMD))
+else:
+ os.execvp(CMD[0], CMD)
diff --git a/pre_commit/resources/rbenv.tar.gz b/pre_commit/resources/rbenv.tar.gz
new file mode 100644
index 0000000..5307b19
--- /dev/null
+++ b/pre_commit/resources/rbenv.tar.gz
Binary files differ
diff --git a/pre_commit/resources/ruby-build.tar.gz b/pre_commit/resources/ruby-build.tar.gz
new file mode 100644
index 0000000..4a69a09
--- /dev/null
+++ b/pre_commit/resources/ruby-build.tar.gz
Binary files differ
diff --git a/pre_commit/resources/ruby-download.tar.gz b/pre_commit/resources/ruby-download.tar.gz
new file mode 100644
index 0000000..7ccfb6c
--- /dev/null
+++ b/pre_commit/resources/ruby-download.tar.gz
Binary files differ
diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py
new file mode 100644
index 0000000..09d323d
--- /dev/null
+++ b/pre_commit/staged_files_only.py
@@ -0,0 +1,90 @@
+import contextlib
+import logging
+import os.path
+import time
+from typing import Generator
+
+from pre_commit import git
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from pre_commit.xargs import xargs
+
+
+logger = logging.getLogger('pre_commit')
+
+
+def _git_apply(patch: str) -> None:
+ args = ('apply', '--whitespace=nowarn', patch)
+ try:
+ cmd_output_b('git', *args)
+ except CalledProcessError:
+ # Retry with autocrlf=false -- see #570
+ cmd_output_b('git', '-c', 'core.autocrlf=false', *args)
+
+
+@contextlib.contextmanager
+def _intent_to_add_cleared() -> Generator[None, None, None]:
+ intent_to_add = git.intent_to_add_files()
+ if intent_to_add:
+ logger.warning('Unstaged intent-to-add files detected.')
+
+ xargs(('git', 'rm', '--cached', '--'), intent_to_add)
+ try:
+ yield
+ finally:
+ xargs(('git', 'add', '--intent-to-add', '--'), intent_to_add)
+ else:
+ yield
+
+
+@contextlib.contextmanager
+def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
+ tree = cmd_output('git', 'write-tree')[1].strip()
+ retcode, diff_stdout_binary, _ = cmd_output_b(
+ 'git', 'diff-index', '--ignore-submodules', '--binary',
+ '--exit-code', '--no-color', '--no-ext-diff', tree, '--',
+ retcode=None,
+ )
+ if retcode and diff_stdout_binary.strip():
+ patch_filename = f'patch{int(time.time())}'
+ patch_filename = os.path.join(patch_dir, patch_filename)
+ logger.warning('Unstaged files detected.')
+ logger.info(f'Stashing unstaged files to {patch_filename}.')
+ # Save the current unstaged changes as a patch
+ os.makedirs(patch_dir, exist_ok=True)
+ with open(patch_filename, 'wb') as patch_file:
+ patch_file.write(diff_stdout_binary)
+
+ # Clear the working directory of unstaged changes
+ cmd_output_b('git', 'checkout', '--', '.')
+ try:
+ yield
+ finally:
+ # Try to apply the patch we saved
+ try:
+ _git_apply(patch_filename)
+ except CalledProcessError:
+ logger.warning(
+ 'Stashed changes conflicted with hook auto-fixes... '
+ 'Rolling back fixes...',
+ )
+ # We failed to apply the patch, presumably due to fixes made
+ # by hooks.
+ # Roll back the changes made by hooks.
+ cmd_output_b('git', 'checkout', '--', '.')
+ _git_apply(patch_filename)
+ logger.info(f'Restored changes from {patch_filename}.')
+ else:
+ # There weren't any staged files so we don't need to do anything
+ # special
+ yield
+
+
+@contextlib.contextmanager
+def staged_files_only(patch_dir: str) -> Generator[None, None, None]:
+ """Clear any unstaged changes from the git working directory inside this
+ context.
+ """
+ with _intent_to_add_cleared(), _unstaged_changes_cleared(patch_dir):
+ yield
diff --git a/pre_commit/store.py b/pre_commit/store.py
new file mode 100644
index 0000000..760b37a
--- /dev/null
+++ b/pre_commit/store.py
@@ -0,0 +1,250 @@
+import contextlib
+import logging
+import os.path
+import sqlite3
+import tempfile
+from typing import Callable
+from typing import Generator
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+import pre_commit.constants as C
+from pre_commit import file_lock
+from pre_commit import git
+from pre_commit.util import CalledProcessError
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output_b
+from pre_commit.util import resource_text
+from pre_commit.util import rmtree
+
+
+logger = logging.getLogger('pre_commit')
+
+
+def _get_default_directory() -> str:
+ """Returns the default directory for the Store. This is intentionally
+ underscored to indicate that `Store.get_default_directory` is the intended
+ way to get this information. This is also done so
+ `Store.get_default_directory` can be mocked in tests and
+ `_get_default_directory` can be tested.
+ """
+ return os.environ.get('PRE_COMMIT_HOME') or os.path.join(
+ os.environ.get('XDG_CACHE_HOME') or os.path.expanduser('~/.cache'),
+ 'pre-commit',
+ )
+
+
+class Store:
+ get_default_directory = staticmethod(_get_default_directory)
+
+ def __init__(self, directory: Optional[str] = None) -> None:
+ self.directory = directory or Store.get_default_directory()
+ self.db_path = os.path.join(self.directory, 'db.db')
+
+ if not os.path.exists(self.directory):
+ os.makedirs(self.directory, exist_ok=True)
+ with open(os.path.join(self.directory, 'README'), 'w') as f:
+ f.write(
+ 'This directory is maintained by the pre-commit project.\n'
+ 'Learn more: https://github.com/pre-commit/pre-commit\n',
+ )
+
+ if os.path.exists(self.db_path):
+ return
+ with self.exclusive_lock():
+ # Another process may have already completed this work
+ if os.path.exists(self.db_path): # pragma: no cover (race)
+ return
+ # To avoid a race where someone ^Cs between db creation and
+ # execution of the CREATE TABLE statement
+ fd, tmpfile = tempfile.mkstemp(dir=self.directory)
+ # We'll be managing this file ourselves
+ os.close(fd)
+ with self.connect(db_path=tmpfile) as db:
+ db.executescript(
+ 'CREATE TABLE repos ('
+ ' repo TEXT NOT NULL,'
+ ' ref TEXT NOT NULL,'
+ ' path TEXT NOT NULL,'
+ ' PRIMARY KEY (repo, ref)'
+ ');',
+ )
+ self._create_config_table(db)
+
+ # Atomic file move
+ os.rename(tmpfile, self.db_path)
+
+ @contextlib.contextmanager
+ def exclusive_lock(self) -> Generator[None, None, None]:
+ def blocked_cb() -> None: # pragma: no cover (tests are in-process)
+ logger.info('Locking pre-commit directory')
+
+ with file_lock.lock(os.path.join(self.directory, '.lock'), blocked_cb):
+ yield
+
+ @contextlib.contextmanager
+ def connect(
+ self,
+ db_path: Optional[str] = None,
+ ) -> Generator[sqlite3.Connection, None, None]:
+ db_path = db_path or self.db_path
+ # sqlite doesn't close its fd with its contextmanager >.<
+ # contextlib.closing fixes this.
+ # See: https://stackoverflow.com/a/28032829/812183
+ with contextlib.closing(sqlite3.connect(db_path)) as db:
+ # this creates a transaction
+ with db:
+ yield db
+
+ @classmethod
+ def db_repo_name(cls, repo: str, deps: Sequence[str]) -> str:
+ if deps:
+ return f'{repo}:{",".join(sorted(deps))}'
+ else:
+ return repo
+
+ def _new_repo(
+ self,
+ repo: str,
+ ref: str,
+ deps: Sequence[str],
+ make_strategy: Callable[[str], None],
+ ) -> str:
+ repo = self.db_repo_name(repo, deps)
+
+ def _get_result() -> Optional[str]:
+ # Check if we already exist
+ with self.connect() as db:
+ result = db.execute(
+ 'SELECT path FROM repos WHERE repo = ? AND ref = ?',
+ (repo, ref),
+ ).fetchone()
+ return result[0] if result else None
+
+ result = _get_result()
+ if result:
+ return result
+ with self.exclusive_lock():
+ # Another process may have already completed this work
+ result = _get_result()
+ if result: # pragma: no cover (race)
+ return result
+
+ logger.info(f'Initializing environment for {repo}.')
+
+ directory = tempfile.mkdtemp(prefix='repo', dir=self.directory)
+ with clean_path_on_failure(directory):
+ make_strategy(directory)
+
+ # Update our db with the created repo
+ with self.connect() as db:
+ db.execute(
+ 'INSERT INTO repos (repo, ref, path) VALUES (?, ?, ?)',
+ [repo, ref, directory],
+ )
+ return directory
+
+ def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:
+ """Perform a complete clone of a repository and its submodules """
+
+ git_cmd('fetch', 'origin', '--tags')
+ git_cmd('checkout', ref)
+ git_cmd('submodule', 'update', '--init', '--recursive')
+
+ def _shallow_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:
+ """Perform a shallow clone of a repository and its submodules """
+
+ git_config = 'protocol.version=2'
+ git_cmd('-c', git_config, 'fetch', 'origin', ref, '--depth=1')
+ git_cmd('checkout', 'FETCH_HEAD')
+ git_cmd(
+ '-c', git_config, 'submodule', 'update', '--init', '--recursive',
+ '--depth=1',
+ )
+
+ def clone(self, repo: str, ref: str, deps: Sequence[str] = ()) -> str:
+ """Clone the given url and checkout the specific ref."""
+
+ def clone_strategy(directory: str) -> None:
+ git.init_repo(directory, repo)
+ env = git.no_git_env()
+
+ def _git_cmd(*args: str) -> None:
+ cmd_output_b('git', *args, cwd=directory, env=env)
+
+ try:
+ self._shallow_clone(ref, _git_cmd)
+ except CalledProcessError:
+ self._complete_clone(ref, _git_cmd)
+
+ return self._new_repo(repo, ref, deps, clone_strategy)
+
+ LOCAL_RESOURCES = (
+ 'Cargo.toml', 'main.go', 'main.rs', '.npmignore', 'package.json',
+ 'pre_commit_dummy_package.gemspec', 'setup.py', 'environment.yml',
+ 'Makefile.PL',
+ )
+
+ def make_local(self, deps: Sequence[str]) -> str:
+ def make_local_strategy(directory: str) -> None:
+ for resource in self.LOCAL_RESOURCES:
+ contents = resource_text(f'empty_template_{resource}')
+ with open(os.path.join(directory, resource), 'w') as f:
+ f.write(contents)
+
+ env = git.no_git_env()
+
+ # initialize the git repository so it looks more like cloned repos
+ def _git_cmd(*args: str) -> None:
+ cmd_output_b('git', *args, cwd=directory, env=env)
+
+ git.init_repo(directory, '<<unknown>>')
+ _git_cmd('add', '.')
+ git.commit(repo=directory)
+
+ return self._new_repo(
+ 'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy,
+ )
+
+ def _create_config_table(self, db: sqlite3.Connection) -> None:
+ db.executescript(
+ 'CREATE TABLE IF NOT EXISTS configs ('
+ ' path TEXT NOT NULL,'
+ ' PRIMARY KEY (path)'
+ ');',
+ )
+
+ def mark_config_used(self, path: str) -> None:
+ path = os.path.realpath(path)
+ # don't insert config files that do not exist
+ if not os.path.exists(path):
+ return
+ with self.connect() as db:
+ # TODO: eventually remove this and only create in _create
+ self._create_config_table(db)
+ db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,))
+
+ def select_all_configs(self) -> List[str]:
+ with self.connect() as db:
+ self._create_config_table(db)
+ rows = db.execute('SELECT path FROM configs').fetchall()
+ return [path for path, in rows]
+
+ def delete_configs(self, configs: List[str]) -> None:
+ with self.connect() as db:
+ rows = [(path,) for path in configs]
+ db.executemany('DELETE FROM configs WHERE path = ?', rows)
+
+ def select_all_repos(self) -> List[Tuple[str, str, str]]:
+ with self.connect() as db:
+ return db.execute('SELECT repo, ref, path from repos').fetchall()
+
+ def delete_repo(self, db_repo_name: str, ref: str, path: str) -> None:
+ with self.connect() as db:
+ db.execute(
+ 'DELETE FROM repos WHERE repo = ? and ref = ?',
+ (db_repo_name, ref),
+ )
+ rmtree(path)
diff --git a/pre_commit/util.py b/pre_commit/util.py
new file mode 100644
index 0000000..2db579a
--- /dev/null
+++ b/pre_commit/util.py
@@ -0,0 +1,272 @@
+import contextlib
+import errno
+import functools
+import os.path
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generator
+from typing import IO
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import Union
+
+import yaml
+
+from pre_commit import parse_shebang
+
+if sys.version_info >= (3, 7): # pragma: no cover (PY37+)
+ from importlib.resources import open_binary
+ from importlib.resources import read_text
+else: # pragma: no cover (<PY37)
+ from importlib_resources import open_binary
+ from importlib_resources import read_text
+
+EnvironT = Union[Dict[str, str], 'os._Environ']
+
+Loader = getattr(yaml, 'CSafeLoader', yaml.SafeLoader)
+yaml_load = functools.partial(yaml.load, Loader=Loader)
+Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper)
+
+
+def yaml_dump(o: Any) -> str:
+ # when python/mypy#1484 is solved, this can be `functools.partial`
+ return yaml.dump(
+ o, Dumper=Dumper, default_flow_style=False, indent=4, sort_keys=False,
+ )
+
+
+def force_bytes(exc: Any) -> bytes:
+ with contextlib.suppress(TypeError):
+ return bytes(exc)
+ with contextlib.suppress(Exception):
+ return str(exc).encode()
+ return f'<unprintable {type(exc).__name__} object>'.encode()
+
+
+@contextlib.contextmanager
+def clean_path_on_failure(path: str) -> Generator[None, None, None]:
+ """Cleans up the directory on an exceptional failure."""
+ try:
+ yield
+ except BaseException:
+ if os.path.exists(path):
+ rmtree(path)
+ raise
+
+
+@contextlib.contextmanager
+def tmpdir() -> Generator[str, None, None]:
+ """Contextmanager to create a temporary directory. It will be cleaned up
+ afterwards.
+ """
+ tempdir = tempfile.mkdtemp()
+ try:
+ yield tempdir
+ finally:
+ rmtree(tempdir)
+
+
+def resource_bytesio(filename: str) -> IO[bytes]:
+ return open_binary('pre_commit.resources', filename)
+
+
+def resource_text(filename: str) -> str:
+ return read_text('pre_commit.resources', filename)
+
+
+def make_executable(filename: str) -> None:
+ original_mode = os.stat(filename).st_mode
+ new_mode = original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ os.chmod(filename, new_mode)
+
+
+class CalledProcessError(RuntimeError):
+ def __init__(
+ self,
+ returncode: int,
+ cmd: Tuple[str, ...],
+ expected_returncode: int,
+ stdout: bytes,
+ stderr: Optional[bytes],
+ ) -> None:
+ super().__init__(returncode, cmd, expected_returncode, stdout, stderr)
+ self.returncode = returncode
+ self.cmd = cmd
+ self.expected_returncode = expected_returncode
+ self.stdout = stdout
+ self.stderr = stderr
+
+ def __bytes__(self) -> bytes:
+ def _indent_or_none(part: Optional[bytes]) -> bytes:
+ if part:
+ return b'\n ' + part.replace(b'\n', b'\n ')
+ else:
+ return b' (none)'
+
+ return b''.join((
+ f'command: {self.cmd!r}\n'.encode(),
+ f'return code: {self.returncode}\n'.encode(),
+ f'expected return code: {self.expected_returncode}\n'.encode(),
+ b'stdout:', _indent_or_none(self.stdout), b'\n',
+ b'stderr:', _indent_or_none(self.stderr),
+ ))
+
+ def __str__(self) -> str:
+ return self.__bytes__().decode()
+
+
+def _setdefault_kwargs(kwargs: Dict[str, Any]) -> None:
+ for arg in ('stdin', 'stdout', 'stderr'):
+ kwargs.setdefault(arg, subprocess.PIPE)
+
+
+def _oserror_to_output(e: OSError) -> Tuple[int, bytes, None]:
+ return 1, force_bytes(e).rstrip(b'\n') + b'\n', None
+
+
+def cmd_output_b(
+ *cmd: str,
+ retcode: Optional[int] = 0,
+ **kwargs: Any,
+) -> Tuple[int, bytes, Optional[bytes]]:
+ _setdefault_kwargs(kwargs)
+
+ try:
+ cmd = parse_shebang.normalize_cmd(cmd)
+ except parse_shebang.ExecutableNotFoundError as e:
+ returncode, stdout_b, stderr_b = e.to_output()
+ else:
+ try:
+ proc = subprocess.Popen(cmd, **kwargs)
+ except OSError as e:
+ returncode, stdout_b, stderr_b = _oserror_to_output(e)
+ else:
+ stdout_b, stderr_b = proc.communicate()
+ returncode = proc.returncode
+
+ if retcode is not None and retcode != returncode:
+ raise CalledProcessError(returncode, cmd, retcode, stdout_b, stderr_b)
+
+ return returncode, stdout_b, stderr_b
+
+
+def cmd_output(*cmd: str, **kwargs: Any) -> Tuple[int, str, Optional[str]]:
+ returncode, stdout_b, stderr_b = cmd_output_b(*cmd, **kwargs)
+ stdout = stdout_b.decode() if stdout_b is not None else None
+ stderr = stderr_b.decode() if stderr_b is not None else None
+ return returncode, stdout, stderr
+
+
+if os.name != 'nt': # pragma: win32 no cover
+ from os import openpty
+ import termios
+
+ class Pty:
+ def __init__(self) -> None:
+ self.r: Optional[int] = None
+ self.w: Optional[int] = None
+
+ def __enter__(self) -> 'Pty':
+ self.r, self.w = openpty()
+
+ # tty flags normally change \n to \r\n
+ attrs = termios.tcgetattr(self.r)
+ assert isinstance(attrs[1], int)
+ attrs[1] &= ~(termios.ONLCR | termios.OPOST)
+ termios.tcsetattr(self.r, termios.TCSANOW, attrs)
+
+ return self
+
+ def close_w(self) -> None:
+ if self.w is not None:
+ os.close(self.w)
+ self.w = None
+
+ def close_r(self) -> None:
+ assert self.r is not None
+ os.close(self.r)
+ self.r = None
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ self.close_w()
+ self.close_r()
+
+ def cmd_output_p(
+ *cmd: str,
+ retcode: Optional[int] = 0,
+ **kwargs: Any,
+ ) -> Tuple[int, bytes, Optional[bytes]]:
+ assert retcode is None
+ assert kwargs['stderr'] == subprocess.STDOUT, kwargs['stderr']
+ _setdefault_kwargs(kwargs)
+
+ try:
+ cmd = parse_shebang.normalize_cmd(cmd)
+ except parse_shebang.ExecutableNotFoundError as e:
+ return e.to_output()
+
+ with open(os.devnull) as devnull, Pty() as pty:
+ assert pty.r is not None
+ kwargs.update({'stdin': devnull, 'stdout': pty.w, 'stderr': pty.w})
+ try:
+ proc = subprocess.Popen(cmd, **kwargs)
+ except OSError as e:
+ return _oserror_to_output(e)
+
+ pty.close_w()
+
+ buf = b''
+ while True:
+ try:
+ bts = os.read(pty.r, 4096)
+ except OSError as e:
+ if e.errno == errno.EIO:
+ bts = b''
+ else:
+ raise
+ else:
+ buf += bts
+ if not bts:
+ break
+
+ return proc.wait(), buf, None
+else: # pragma: no cover
+ cmd_output_p = cmd_output_b
+
+
+def rmtree(path: str) -> None:
+ """On windows, rmtree fails for readonly dirs."""
+ def handle_remove_readonly(
+ func: Callable[..., Any],
+ path: str,
+ exc: Tuple[Type[OSError], OSError, TracebackType],
+ ) -> None:
+ excvalue = exc[1]
+ if (
+ func in (os.rmdir, os.remove, os.unlink) and
+ excvalue.errno == errno.EACCES
+ ):
+ for p in (path, os.path.dirname(path)):
+ os.chmod(p, os.stat(p).st_mode | stat.S_IWUSR)
+ func(path)
+ else:
+ raise
+ shutil.rmtree(path, ignore_errors=False, onerror=handle_remove_readonly)
+
+
+def parse_version(s: str) -> Tuple[int, ...]:
+ """poor man's version comparison"""
+ return tuple(int(p) for p in s.split('.'))
diff --git a/pre_commit/xargs.py b/pre_commit/xargs.py
new file mode 100644
index 0000000..5235dc6
--- /dev/null
+++ b/pre_commit/xargs.py
@@ -0,0 +1,157 @@
+import concurrent.futures
+import contextlib
+import math
+import os
+import subprocess
+import sys
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TypeVar
+
+from pre_commit import parse_shebang
+from pre_commit.util import cmd_output_b
+from pre_commit.util import cmd_output_p
+from pre_commit.util import EnvironT
+
+TArg = TypeVar('TArg')
+TRet = TypeVar('TRet')
+
+
+def _environ_size(_env: Optional[EnvironT] = None) -> int:
+ environ = _env if _env is not None else getattr(os, 'environb', os.environ)
+ size = 8 * len(environ) # number of pointers in `envp`
+ for k, v in environ.items():
+ size += len(k) + len(v) + 2 # c strings in `envp`
+ return size
+
+
+def _get_platform_max_length() -> int: # pragma: no cover (platform specific)
+ if os.name == 'posix':
+ maximum = os.sysconf('SC_ARG_MAX') - 2048 - _environ_size()
+ maximum = max(min(maximum, 2 ** 17), 2 ** 12)
+ return maximum
+ elif os.name == 'nt':
+ return 2 ** 15 - 2048 # UNICODE_STRING max - headroom
+ else:
+ # posix minimum
+ return 2 ** 12
+
+
+def _command_length(*cmd: str) -> int:
+ full_cmd = ' '.join(cmd)
+
+ # win32 uses the amount of characters, more details at:
+ # https://github.com/pre-commit/pre-commit/pull/839
+ if sys.platform == 'win32':
+ return len(full_cmd.encode('utf-16le')) // 2
+ else:
+ return len(full_cmd.encode(sys.getfilesystemencoding()))
+
+
+class ArgumentTooLongError(RuntimeError):
+ pass
+
+
+def partition(
+ cmd: Sequence[str],
+ varargs: Sequence[str],
+ target_concurrency: int,
+ _max_length: Optional[int] = None,
+) -> Tuple[Tuple[str, ...], ...]:
+ _max_length = _max_length or _get_platform_max_length()
+
+ # Generally, we try to partition evenly into at least `target_concurrency`
+ # partitions, but we don't want a bunch of tiny partitions.
+ max_args = max(4, math.ceil(len(varargs) / target_concurrency))
+
+ cmd = tuple(cmd)
+ ret = []
+
+ ret_cmd: List[str] = []
+ # Reversed so arguments are in order
+ varargs = list(reversed(varargs))
+
+ total_length = _command_length(*cmd) + 1
+ while varargs:
+ arg = varargs.pop()
+
+ arg_length = _command_length(arg) + 1
+ if (
+ total_length + arg_length <= _max_length and
+ len(ret_cmd) < max_args
+ ):
+ ret_cmd.append(arg)
+ total_length += arg_length
+ elif not ret_cmd:
+ raise ArgumentTooLongError(arg)
+ else:
+ # We've exceeded the length, yield a command
+ ret.append(cmd + tuple(ret_cmd))
+ ret_cmd = []
+ total_length = _command_length(*cmd) + 1
+ varargs.append(arg)
+
+ ret.append(cmd + tuple(ret_cmd))
+
+ return tuple(ret)
+
+
+@contextlib.contextmanager
+def _thread_mapper(maxsize: int) -> Generator[
+ Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]],
+ None, None,
+]:
+ if maxsize == 1:
+ yield map
+ else:
+ with concurrent.futures.ThreadPoolExecutor(maxsize) as ex:
+ yield ex.map
+
+
+def xargs(
+ cmd: Tuple[str, ...],
+ varargs: Sequence[str],
+ *,
+ color: bool = False,
+ target_concurrency: int = 1,
+ _max_length: int = _get_platform_max_length(),
+ **kwargs: Any,
+) -> Tuple[int, bytes]:
+ """A simplified implementation of xargs.
+
+ color: Make a pty if on a platform that supports it
+ target_concurrency: Target number of partitions to run concurrently
+ """
+ cmd_fn = cmd_output_p if color else cmd_output_b
+ retcode = 0
+ stdout = b''
+
+ try:
+ cmd = parse_shebang.normalize_cmd(cmd)
+ except parse_shebang.ExecutableNotFoundError as e:
+ return e.to_output()[:2]
+
+ partitions = partition(cmd, varargs, target_concurrency, _max_length)
+
+ def run_cmd_partition(
+ run_cmd: Tuple[str, ...],
+ ) -> Tuple[int, bytes, Optional[bytes]]:
+ return cmd_fn(
+ *run_cmd, retcode=None, stderr=subprocess.STDOUT, **kwargs,
+ )
+
+ threads = min(len(partitions), target_concurrency)
+ with _thread_mapper(threads) as thread_map:
+ results = thread_map(run_cmd_partition, partitions)
+
+ for proc_retcode, proc_out, _ in results:
+ retcode = max(retcode, proc_retcode)
+ stdout += proc_out
+
+ return retcode, stdout
diff --git a/requirements-dev.txt b/requirements-dev.txt
new file mode 100644
index 0000000..d6a13dc
--- /dev/null
+++ b/requirements-dev.txt
@@ -0,0 +1,4 @@
+covdefaults
+coverage
+pytest
+pytest-env
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..a02fab1
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,70 @@
+[metadata]
+name = pre_commit
+version = 2.2.0
+description = A framework for managing and maintaining multi-language pre-commit hooks.
+long_description = file: README.md
+long_description_content_type = text/markdown
+url = https://github.com/pre-commit/pre-commit
+author = Anthony Sottile
+author_email = asottile@umich.edu
+license = MIT
+license_file = LICENSE
+classifiers =
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+
+[options]
+packages = find:
+install_requires =
+ cfgv>=2.0.0
+ identify>=1.0.0
+ nodeenv>=0.11.1
+ pyyaml>=5.1
+ toml
+ virtualenv>=15.2
+ importlib-metadata;python_version<"3.8"
+ importlib-resources;python_version<"3.7"
+python_requires = >=3.6.1
+
+[options.entry_points]
+console_scripts =
+ pre-commit = pre_commit.main:main
+ pre-commit-validate-config = pre_commit.clientlib:validate_config_main
+ pre-commit-validate-manifest = pre_commit.clientlib:validate_manifest_main
+
+[options.package_data]
+pre_commit.resources =
+ *.tar.gz
+ empty_template_*
+ hook-tmpl
+
+[options.packages.find]
+exclude =
+ tests*
+ testing*
+
+[bdist_wheel]
+universal = True
+
+[coverage:run]
+plugins = covdefaults
+omit = pre_commit/resources/*
+
+[mypy]
+check_untyped_defs = true
+disallow_any_generics = true
+disallow_incomplete_defs = true
+disallow_untyped_defs = true
+no_implicit_optional = true
+
+[mypy-testing.*]
+disallow_untyped_defs = false
+
+[mypy-tests.*]
+disallow_untyped_defs = false
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..8bf1ba9
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,2 @@
+from setuptools import setup
+setup()
diff --git a/testing/__init__.py b/testing/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/__init__.py
diff --git a/testing/auto_namedtuple.py b/testing/auto_namedtuple.py
new file mode 100644
index 0000000..0841094
--- /dev/null
+++ b/testing/auto_namedtuple.py
@@ -0,0 +1,11 @@
+import collections
+
+
+def auto_namedtuple(classname='auto_namedtuple', **kwargs):
+ """Returns an automatic namedtuple object.
+
+ Args:
+ classname - The class name for the returned object.
+ **kwargs - Properties to give the returned object.
+ """
+ return (collections.namedtuple(classname, kwargs.keys())(**kwargs))
diff --git a/testing/fixtures.py b/testing/fixtures.py
new file mode 100644
index 0000000..f7def08
--- /dev/null
+++ b/testing/fixtures.py
@@ -0,0 +1,146 @@
+import contextlib
+import os.path
+import shutil
+
+from cfgv import apply_defaults
+from cfgv import validate
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.clientlib import CONFIG_SCHEMA
+from pre_commit.clientlib import load_manifest
+from pre_commit.util import cmd_output
+from pre_commit.util import yaml_dump
+from pre_commit.util import yaml_load
+from testing.util import get_resource_path
+from testing.util import git_commit
+
+
+def copy_tree_to_path(src_dir, dest_dir):
+ """Copies all of the things inside src_dir to an already existing dest_dir.
+
+ This looks eerily similar to shutil.copytree, but copytree has no option
+ for not creating dest_dir.
+ """
+ names = os.listdir(src_dir)
+
+ for name in names:
+ srcname = os.path.join(src_dir, name)
+ destname = os.path.join(dest_dir, name)
+
+ if os.path.isdir(srcname):
+ shutil.copytree(srcname, destname)
+ else:
+ shutil.copy(srcname, destname)
+
+
+def git_dir(tempdir_factory):
+ path = tempdir_factory.get()
+ cmd_output('git', 'init', path)
+ return path
+
+
+def make_repo(tempdir_factory, repo_source):
+ path = git_dir(tempdir_factory)
+ copy_tree_to_path(get_resource_path(repo_source), path)
+ cmd_output('git', 'add', '.', cwd=path)
+ git_commit(msg=make_repo.__name__, cwd=path)
+ return path
+
+
+@contextlib.contextmanager
+def modify_manifest(path, commit=True):
+ """Modify the manifest yielded by this context to write to
+ .pre-commit-hooks.yaml.
+ """
+ manifest_path = os.path.join(path, C.MANIFEST_FILE)
+ with open(manifest_path) as f:
+ manifest = yaml_load(f.read())
+ yield manifest
+ with open(manifest_path, 'w') as manifest_file:
+ manifest_file.write(yaml_dump(manifest))
+ if commit:
+ git_commit(msg=modify_manifest.__name__, cwd=path)
+
+
+@contextlib.contextmanager
+def modify_config(path='.', commit=True):
+ """Modify the config yielded by this context to write to
+ .pre-commit-config.yaml
+ """
+ config_path = os.path.join(path, C.CONFIG_FILE)
+ with open(config_path) as f:
+ config = yaml_load(f.read())
+ yield config
+ with open(config_path, 'w', encoding='UTF-8') as config_file:
+ config_file.write(yaml_dump(config))
+ if commit:
+ git_commit(msg=modify_config.__name__, cwd=path)
+
+
+def sample_local_config():
+ return {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'do_not_commit',
+ 'name': 'Block if "DO NOT COMMIT" is found',
+ 'entry': 'DO NOT COMMIT',
+ 'language': 'pygrep',
+ }],
+ }
+
+
+def sample_meta_config():
+ return {'repo': 'meta', 'hooks': [{'id': 'check-useless-excludes'}]}
+
+
+def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
+ manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
+ config = {
+ 'repo': f'file://{repo_path}',
+ 'rev': rev or git.head_rev(repo_path),
+ 'hooks': hooks or [{'id': hook['id']} for hook in manifest],
+ }
+
+ if check:
+ wrapped = validate({'repos': [config]}, CONFIG_SCHEMA)
+ wrapped = apply_defaults(wrapped, CONFIG_SCHEMA)
+ config, = wrapped['repos']
+ return config
+ else:
+ return config
+
+
+def read_config(directory, config_file=C.CONFIG_FILE):
+ config_path = os.path.join(directory, config_file)
+ with open(config_path) as f:
+ config = yaml_load(f.read())
+ return config
+
+
+def write_config(directory, config, config_file=C.CONFIG_FILE):
+ if type(config) is not list and 'repos' not in config:
+ assert isinstance(config, dict), config
+ config = {'repos': [config]}
+ with open(os.path.join(directory, config_file), 'w') as outfile:
+ outfile.write(yaml_dump(config))
+
+
+def add_config_to_repo(git_path, config, config_file=C.CONFIG_FILE):
+ write_config(git_path, config, config_file=config_file)
+ cmd_output('git', 'add', config_file, cwd=git_path)
+ git_commit(msg=add_config_to_repo.__name__, cwd=git_path)
+ return git_path
+
+
+def remove_config_from_repo(git_path, config_file=C.CONFIG_FILE):
+ cmd_output('git', 'rm', config_file, cwd=git_path)
+ git_commit(msg=remove_config_from_repo.__name__, cwd=git_path)
+ return git_path
+
+
+def make_consuming_repo(tempdir_factory, repo_source):
+ path = make_repo(tempdir_factory, repo_source)
+ config = make_config_from_repo(path)
+ git_path = git_dir(tempdir_factory)
+ return add_config_to_repo(git_path, config)
diff --git a/testing/gen-languages-all b/testing/gen-languages-all
new file mode 100755
index 0000000..6d0b26f
--- /dev/null
+++ b/testing/gen-languages-all
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+import sys
+
+LANGUAGES = [
+ 'conda', 'docker', 'docker_image', 'fail', 'golang', 'node', 'perl',
+ 'pygrep', 'python', 'python_venv', 'ruby', 'rust', 'script', 'swift',
+ 'system',
+]
+FIELDS = [
+ 'ENVIRONMENT_DIR', 'get_default_version', 'healthy', 'install_environment',
+ 'run_hook',
+]
+
+
+def main() -> int:
+ print(f' # BEGIN GENERATED ({sys.argv[0]})')
+ for lang in LANGUAGES:
+ parts = [f' {lang!r}: Language(name={lang!r}']
+ for k in FIELDS:
+ parts.append(f', {k}={lang}.{k}')
+ parts.append('), # noqa: E501')
+ print(''.join(parts))
+ print(' # END GENERATED')
+ return 0
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/testing/get-swift.sh b/testing/get-swift.sh
new file mode 100755
index 0000000..e205d44
--- /dev/null
+++ b/testing/get-swift.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+# This is a script used in CI to install swift
+set -euxo pipefail
+
+. /etc/lsb-release
+if [ "$DISTRIB_CODENAME" = "bionic" ]; then
+ SWIFT_URL='https://swift.org/builds/swift-5.1.3-release/ubuntu1804/swift-5.1.3-RELEASE/swift-5.1.3-RELEASE-ubuntu18.04.tar.gz'
+ SWIFT_HASH='ac82ccd773fe3d586fc340814e31e120da1ff695c6a712f6634e9cc720769610'
+else
+ echo "unknown dist: ${DISTRIB_CODENAME}" 1>&2
+ exit 1
+fi
+
+check() {
+ echo "$SWIFT_HASH $TGZ" | sha256sum --check
+}
+
+TGZ="$HOME/.swift/swift.tar.gz"
+mkdir -p "$(dirname "$TGZ")"
+if ! check >& /dev/null; then
+ rm -f "$TGZ"
+ curl --location --silent --output "$TGZ" "$SWIFT_URL"
+ check
+fi
+
+mkdir -p /tmp/swift
+tar -xf "$TGZ" --strip 1 --directory /tmp/swift
diff --git a/testing/resources/arbitrary_bytes_repo/.pre-commit-hooks.yaml b/testing/resources/arbitrary_bytes_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..c2aec9b
--- /dev/null
+++ b/testing/resources/arbitrary_bytes_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: hook
+ name: hook
+ entry: ./hook.sh
+ language: script
+ files: \.py$
diff --git a/testing/resources/arbitrary_bytes_repo/hook.sh b/testing/resources/arbitrary_bytes_repo/hook.sh
new file mode 100755
index 0000000..9df0c5a
--- /dev/null
+++ b/testing/resources/arbitrary_bytes_repo/hook.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+# Intentionally write mixed encoding to the output. This should not crash
+# pre-commit and should write bytes to the output.
+# 'β˜ƒ'.encode() + 'Β²'.encode('latin1')
+echo -e '\xe2\x98\x83\xb2'
+# exit 1 to trigger printing
+exit 1
diff --git a/testing/resources/arg_per_line_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/arg_per_line_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..4c101db
--- /dev/null
+++ b/testing/resources/arg_per_line_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: arg-per-line
+ name: Args per line hook
+ entry: bin/hook.sh
+ language: script
+ files: ''
+ args: [hello, world]
diff --git a/testing/resources/arg_per_line_hooks_repo/bin/hook.sh b/testing/resources/arg_per_line_hooks_repo/bin/hook.sh
new file mode 100755
index 0000000..47fd21d
--- /dev/null
+++ b/testing/resources/arg_per_line_hooks_repo/bin/hook.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+for i in "$@"; do
+ echo "arg: $i"
+done
diff --git a/testing/resources/conda_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/conda_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..a0d274c
--- /dev/null
+++ b/testing/resources/conda_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,10 @@
+- id: sys-exec
+ name: sys-exec
+ entry: python -c 'import os; import sys; print(sys.executable.split(os.path.sep)[-2]) if os.name == "nt" else print(sys.executable.split(os.path.sep)[-3])'
+ language: conda
+ files: \.py$
+- id: additional-deps
+ name: additional-deps
+ entry: python
+ language: conda
+ files: \.py$
diff --git a/testing/resources/conda_hooks_repo/environment.yml b/testing/resources/conda_hooks_repo/environment.yml
new file mode 100644
index 0000000..e23c079
--- /dev/null
+++ b/testing/resources/conda_hooks_repo/environment.yml
@@ -0,0 +1,6 @@
+channels:
+ - conda-forge
+ - defaults
+dependencies:
+ - python
+ - pip
diff --git a/testing/resources/docker_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/docker_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..5295739
--- /dev/null
+++ b/testing/resources/docker_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,17 @@
+- id: docker-hook
+ name: Docker test hook
+ entry: echo
+ language: docker
+ files: \.txt$
+
+- id: docker-hook-arg
+ name: Docker test hook
+ entry: echo -n
+ language: docker
+ files: \.txt$
+
+- id: docker-hook-failing
+ name: Docker test hook with nonzero exit code
+ entry: bork
+ language: docker
+ files: \.txt$
diff --git a/testing/resources/docker_hooks_repo/Dockerfile b/testing/resources/docker_hooks_repo/Dockerfile
new file mode 100644
index 0000000..841b151
--- /dev/null
+++ b/testing/resources/docker_hooks_repo/Dockerfile
@@ -0,0 +1,3 @@
+FROM cogniteev/echo
+
+CMD ["echo", "This is overwritten by the .pre-commit-hooks.yaml 'entry'"]
diff --git a/testing/resources/docker_image_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/docker_image_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..1b385aa
--- /dev/null
+++ b/testing/resources/docker_image_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,8 @@
+- id: echo-entrypoint
+ name: echo (via --entrypoint)
+ language: docker_image
+ entry: --entrypoint echo cogniteev/echo
+- id: echo-cmd
+ name: echo (via cmd)
+ language: docker_image
+ entry: cogniteev/echo echo
diff --git a/testing/resources/exclude_types_repo/.pre-commit-hooks.yaml b/testing/resources/exclude_types_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..ed8794f
--- /dev/null
+++ b/testing/resources/exclude_types_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: python-files
+ name: Python files
+ entry: bin/hook.sh
+ language: script
+ types: [python]
+ exclude_types: [python3]
diff --git a/testing/resources/exclude_types_repo/bin/hook.sh b/testing/resources/exclude_types_repo/bin/hook.sh
new file mode 100755
index 0000000..bdade51
--- /dev/null
+++ b/testing/resources/exclude_types_repo/bin/hook.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+echo $@
+exit 1
diff --git a/testing/resources/failing_hook_repo/.pre-commit-hooks.yaml b/testing/resources/failing_hook_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..118cc8b
--- /dev/null
+++ b/testing/resources/failing_hook_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: failing_hook
+ name: Failing hook
+ entry: bin/hook.sh
+ language: script
+ files: .
diff --git a/testing/resources/failing_hook_repo/bin/hook.sh b/testing/resources/failing_hook_repo/bin/hook.sh
new file mode 100755
index 0000000..229ccaf
--- /dev/null
+++ b/testing/resources/failing_hook_repo/bin/hook.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+echo 'Fail'
+echo $@
+exit 1
diff --git a/testing/resources/golang_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/golang_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..206733b
--- /dev/null
+++ b/testing/resources/golang_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: golang-hook
+ name: golang example hook
+ entry: golang-hello-world
+ language: golang
+ files: ''
diff --git a/testing/resources/golang_hooks_repo/golang-hello-world/main.go b/testing/resources/golang_hooks_repo/golang-hello-world/main.go
new file mode 100644
index 0000000..1e3c591
--- /dev/null
+++ b/testing/resources/golang_hooks_repo/golang-hello-world/main.go
@@ -0,0 +1,17 @@
+package main
+
+
+import (
+ "fmt"
+ "github.com/BurntSushi/toml"
+)
+
+type Config struct {
+ What string
+}
+
+func main() {
+ var conf Config
+ toml.Decode("What = 'world'\n", &conf)
+ fmt.Printf("hello %v\n", conf.What)
+}
diff --git a/testing/resources/img1.jpg b/testing/resources/img1.jpg
new file mode 100644
index 0000000..dea4262
--- /dev/null
+++ b/testing/resources/img1.jpg
Binary files differ
diff --git a/testing/resources/img2.jpg b/testing/resources/img2.jpg
new file mode 100644
index 0000000..68568e5
--- /dev/null
+++ b/testing/resources/img2.jpg
Binary files differ
diff --git a/testing/resources/img3.jpg b/testing/resources/img3.jpg
new file mode 100644
index 0000000..392d2cf
--- /dev/null
+++ b/testing/resources/img3.jpg
Binary files differ
diff --git a/testing/resources/logfile_repo/.pre-commit-hooks.yaml b/testing/resources/logfile_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..dcaba2e
--- /dev/null
+++ b/testing/resources/logfile_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: logfile test hook
+ name: Logfile test hook
+ entry: bin/hook.sh
+ language: script
+ files: .
+ log_file: test.log
diff --git a/testing/resources/logfile_repo/bin/hook.sh b/testing/resources/logfile_repo/bin/hook.sh
new file mode 100755
index 0000000..890d941
--- /dev/null
+++ b/testing/resources/logfile_repo/bin/hook.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+echo "This is STDOUT output"
+echo "This is STDERR output" 1>&2
+
+exit 1
diff --git a/testing/resources/modified_file_returns_zero_repo/.pre-commit-hooks.yaml b/testing/resources/modified_file_returns_zero_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..8d79ef3
--- /dev/null
+++ b/testing/resources/modified_file_returns_zero_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,15 @@
+- id: bash_hook
+ name: Bash hook
+ entry: bin/hook.sh
+ language: script
+ files: 'foo.py'
+- id: bash_hook2
+ name: Bash hook
+ entry: bin/hook2.sh
+ language: script
+ files: ''
+- id: bash_hook3
+ name: Bash hook
+ entry: bin/hook3.sh
+ language: script
+ files: 'bar.py'
diff --git a/testing/resources/modified_file_returns_zero_repo/bin/hook.sh b/testing/resources/modified_file_returns_zero_repo/bin/hook.sh
new file mode 100755
index 0000000..98b05f9
--- /dev/null
+++ b/testing/resources/modified_file_returns_zero_repo/bin/hook.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+for f in $@; do
+ # Non UTF-8 bytes
+ echo -e '\x01\x97' > "$f"
+ echo "Modified: $f!"
+done
diff --git a/testing/resources/modified_file_returns_zero_repo/bin/hook2.sh b/testing/resources/modified_file_returns_zero_repo/bin/hook2.sh
new file mode 100755
index 0000000..5af177a
--- /dev/null
+++ b/testing/resources/modified_file_returns_zero_repo/bin/hook2.sh
@@ -0,0 +1,2 @@
+#!/usr/bin/env bash
+echo $@
diff --git a/testing/resources/modified_file_returns_zero_repo/bin/hook3.sh b/testing/resources/modified_file_returns_zero_repo/bin/hook3.sh
new file mode 100755
index 0000000..3180eb3
--- /dev/null
+++ b/testing/resources/modified_file_returns_zero_repo/bin/hook3.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+for f in $@; do
+ # Non UTF-8 bytes
+ echo -e '\x01\x97' > "$f"
+done
diff --git a/testing/resources/node_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/node_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..257698a
--- /dev/null
+++ b/testing/resources/node_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: foo
+ name: Foo
+ entry: foo
+ language: node
+ files: \.js$
diff --git a/testing/resources/node_hooks_repo/bin/main.js b/testing/resources/node_hooks_repo/bin/main.js
new file mode 100644
index 0000000..8e0f025
--- /dev/null
+++ b/testing/resources/node_hooks_repo/bin/main.js
@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+
+console.log('Hello World');
diff --git a/testing/resources/node_hooks_repo/package.json b/testing/resources/node_hooks_repo/package.json
new file mode 100644
index 0000000..050b630
--- /dev/null
+++ b/testing/resources/node_hooks_repo/package.json
@@ -0,0 +1,5 @@
+{
+ "name": "foo",
+ "version": "0.0.1",
+ "bin": {"foo": "./bin/main.js"}
+}
diff --git a/testing/resources/node_versioned_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/node_versioned_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..e7ad5ea
--- /dev/null
+++ b/testing/resources/node_versioned_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: versioned-node-hook
+ name: Versioned node hook
+ entry: versioned-node-hook
+ language: node
+ language_version: 9.3.0
+ files: \.js$
diff --git a/testing/resources/node_versioned_hooks_repo/bin/main.js b/testing/resources/node_versioned_hooks_repo/bin/main.js
new file mode 100644
index 0000000..df12cbe
--- /dev/null
+++ b/testing/resources/node_versioned_hooks_repo/bin/main.js
@@ -0,0 +1,4 @@
+#!/usr/bin/env node
+
+console.log(process.version);
+console.log('Hello World');
diff --git a/testing/resources/node_versioned_hooks_repo/package.json b/testing/resources/node_versioned_hooks_repo/package.json
new file mode 100644
index 0000000..18c7787
--- /dev/null
+++ b/testing/resources/node_versioned_hooks_repo/package.json
@@ -0,0 +1,5 @@
+{
+ "name": "versioned-node-hook",
+ "version": "0.0.1",
+ "bin": {"versioned-node-hook": "./bin/main.js"}
+}
diff --git a/testing/resources/not_found_exe/.pre-commit-hooks.yaml b/testing/resources/not_found_exe/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..566f3c1
--- /dev/null
+++ b/testing/resources/not_found_exe/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: not-found-exe
+ name: Not found exe
+ entry: i-dont-exist-lol
+ language: system
+ files: ''
diff --git a/testing/resources/perl_hooks_repo/.gitignore b/testing/resources/perl_hooks_repo/.gitignore
new file mode 100644
index 0000000..7af9940
--- /dev/null
+++ b/testing/resources/perl_hooks_repo/.gitignore
@@ -0,0 +1,7 @@
+/MYMETA.json
+/MYMETA.yml
+/Makefile
+/PreCommitHello-*.tar.*
+/PreCommitHello-*/
+/blib/
+/pm_to_blib
diff --git a/testing/resources/perl_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/perl_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..11e6f6c
--- /dev/null
+++ b/testing/resources/perl_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: perl-hook
+ name: perl example hook
+ entry: pre-commit-perl-hello
+ language: perl
+ files: ''
diff --git a/testing/resources/perl_hooks_repo/MANIFEST b/testing/resources/perl_hooks_repo/MANIFEST
new file mode 100644
index 0000000..4a20084
--- /dev/null
+++ b/testing/resources/perl_hooks_repo/MANIFEST
@@ -0,0 +1,4 @@
+MANIFEST
+Makefile.PL
+bin/pre-commit-perl-hello
+lib/PreCommitHello.pm
diff --git a/testing/resources/perl_hooks_repo/Makefile.PL b/testing/resources/perl_hooks_repo/Makefile.PL
new file mode 100644
index 0000000..6c70e10
--- /dev/null
+++ b/testing/resources/perl_hooks_repo/Makefile.PL
@@ -0,0 +1,10 @@
+use strict;
+use warnings;
+
+use ExtUtils::MakeMaker;
+
+WriteMakefile(
+ NAME => "PreCommitHello",
+ VERSION_FROM => "lib/PreCommitHello.pm",
+ EXE_FILES => [qw(bin/pre-commit-perl-hello)],
+);
diff --git a/testing/resources/perl_hooks_repo/bin/pre-commit-perl-hello b/testing/resources/perl_hooks_repo/bin/pre-commit-perl-hello
new file mode 100755
index 0000000..9474009
--- /dev/null
+++ b/testing/resources/perl_hooks_repo/bin/pre-commit-perl-hello
@@ -0,0 +1,7 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use PreCommitHello;
+
+PreCommitHello::hello();
diff --git a/testing/resources/perl_hooks_repo/lib/PreCommitHello.pm b/testing/resources/perl_hooks_repo/lib/PreCommitHello.pm
new file mode 100644
index 0000000..c76521c
--- /dev/null
+++ b/testing/resources/perl_hooks_repo/lib/PreCommitHello.pm
@@ -0,0 +1,12 @@
+package PreCommitHello;
+
+use strict;
+use warnings;
+
+our $VERSION = "0.1.0";
+
+sub hello {
+ print "Hello from perl-commit Perl!\n";
+}
+
+1;
diff --git a/testing/resources/prints_cwd_repo/.pre-commit-hooks.yaml b/testing/resources/prints_cwd_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..7092379
--- /dev/null
+++ b/testing/resources/prints_cwd_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: prints_cwd
+ name: Prints Cwd
+ entry: pwd
+ language: system
+ files: \.sh$
diff --git a/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..2c23700
--- /dev/null
+++ b/testing/resources/python3_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: python3-hook
+ name: Python 3 Hook
+ entry: python3-hook
+ language: python
+ language_version: python3
+ files: \.py$
diff --git a/testing/resources/python3_hooks_repo/py3_hook.py b/testing/resources/python3_hooks_repo/py3_hook.py
new file mode 100644
index 0000000..8c9cda4
--- /dev/null
+++ b/testing/resources/python3_hooks_repo/py3_hook.py
@@ -0,0 +1,8 @@
+import sys
+
+
+def main():
+ print(sys.version_info[0])
+ print(repr(sys.argv[1:]))
+ print('Hello World')
+ return 0
diff --git a/testing/resources/python3_hooks_repo/setup.py b/testing/resources/python3_hooks_repo/setup.py
new file mode 100644
index 0000000..9125dc1
--- /dev/null
+++ b/testing/resources/python3_hooks_repo/setup.py
@@ -0,0 +1,8 @@
+from setuptools import setup
+
+setup(
+ name='python3_hook',
+ version='0.0.0',
+ py_modules=['py3_hook'],
+ entry_points={'console_scripts': ['python3-hook = py3_hook:main']},
+)
diff --git a/testing/resources/python_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/python_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..e10ad50
--- /dev/null
+++ b/testing/resources/python_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: foo
+ name: Foo
+ entry: foo
+ language: python
+ files: \.py$
diff --git a/testing/resources/python_hooks_repo/foo.py b/testing/resources/python_hooks_repo/foo.py
new file mode 100644
index 0000000..9c4368e
--- /dev/null
+++ b/testing/resources/python_hooks_repo/foo.py
@@ -0,0 +1,7 @@
+import sys
+
+
+def main():
+ print(repr(sys.argv[1:]))
+ print('Hello World')
+ return 0
diff --git a/testing/resources/python_hooks_repo/setup.py b/testing/resources/python_hooks_repo/setup.py
new file mode 100644
index 0000000..0559271
--- /dev/null
+++ b/testing/resources/python_hooks_repo/setup.py
@@ -0,0 +1,8 @@
+from setuptools import setup
+
+setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['foo'],
+ entry_points={'console_scripts': ['foo = foo:main']},
+)
diff --git a/testing/resources/python_venv_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/python_venv_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..a666ed8
--- /dev/null
+++ b/testing/resources/python_venv_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: foo
+ name: Foo
+ entry: foo
+ language: python_venv
+ files: \.py$
diff --git a/testing/resources/python_venv_hooks_repo/foo.py b/testing/resources/python_venv_hooks_repo/foo.py
new file mode 100644
index 0000000..9c4368e
--- /dev/null
+++ b/testing/resources/python_venv_hooks_repo/foo.py
@@ -0,0 +1,7 @@
+import sys
+
+
+def main():
+ print(repr(sys.argv[1:]))
+ print('Hello World')
+ return 0
diff --git a/testing/resources/python_venv_hooks_repo/setup.py b/testing/resources/python_venv_hooks_repo/setup.py
new file mode 100644
index 0000000..0559271
--- /dev/null
+++ b/testing/resources/python_venv_hooks_repo/setup.py
@@ -0,0 +1,8 @@
+from setuptools import setup
+
+setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['foo'],
+ entry_points={'console_scripts': ['foo = foo:main']},
+)
diff --git a/testing/resources/ruby_hooks_repo/.gitignore b/testing/resources/ruby_hooks_repo/.gitignore
new file mode 100644
index 0000000..c111b33
--- /dev/null
+++ b/testing/resources/ruby_hooks_repo/.gitignore
@@ -0,0 +1 @@
+*.gem
diff --git a/testing/resources/ruby_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/ruby_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..aa15872
--- /dev/null
+++ b/testing/resources/ruby_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: ruby_hook
+ name: Ruby Hook
+ entry: ruby_hook
+ language: ruby
+ files: \.rb$
diff --git a/testing/resources/ruby_hooks_repo/bin/ruby_hook b/testing/resources/ruby_hooks_repo/bin/ruby_hook
new file mode 100755
index 0000000..5a7e5ed
--- /dev/null
+++ b/testing/resources/ruby_hooks_repo/bin/ruby_hook
@@ -0,0 +1,3 @@
+#!/usr/bin/env ruby
+
+puts 'Hello world from a ruby hook'
diff --git a/testing/resources/ruby_hooks_repo/lib/.gitignore b/testing/resources/ruby_hooks_repo/lib/.gitignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/resources/ruby_hooks_repo/lib/.gitignore
diff --git a/testing/resources/ruby_hooks_repo/ruby_hook.gemspec b/testing/resources/ruby_hooks_repo/ruby_hook.gemspec
new file mode 100644
index 0000000..75f4e8f
--- /dev/null
+++ b/testing/resources/ruby_hooks_repo/ruby_hook.gemspec
@@ -0,0 +1,9 @@
+Gem::Specification.new do |s|
+ s.name = 'ruby_hook'
+ s.version = '0.1.0'
+ s.authors = ['Anthony Sottile']
+ s.summary = 'A ruby hook!'
+ s.description = 'A ruby hook!'
+ s.files = ['bin/ruby_hook']
+ s.executables = ['ruby_hook']
+end
diff --git a/testing/resources/ruby_versioned_hooks_repo/.gitignore b/testing/resources/ruby_versioned_hooks_repo/.gitignore
new file mode 100644
index 0000000..c111b33
--- /dev/null
+++ b/testing/resources/ruby_versioned_hooks_repo/.gitignore
@@ -0,0 +1 @@
+*.gem
diff --git a/testing/resources/ruby_versioned_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/ruby_versioned_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..63e1dd4
--- /dev/null
+++ b/testing/resources/ruby_versioned_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: ruby_hook
+ name: Ruby Hook
+ entry: ruby_hook
+ language: ruby
+ language_version: 2.5.1
+ files: \.rb$
diff --git a/testing/resources/ruby_versioned_hooks_repo/bin/ruby_hook b/testing/resources/ruby_versioned_hooks_repo/bin/ruby_hook
new file mode 100755
index 0000000..2406f04
--- /dev/null
+++ b/testing/resources/ruby_versioned_hooks_repo/bin/ruby_hook
@@ -0,0 +1,4 @@
+#!/usr/bin/env ruby
+
+puts RUBY_VERSION
+puts 'Hello world from a ruby hook'
diff --git a/testing/resources/ruby_versioned_hooks_repo/lib/.gitignore b/testing/resources/ruby_versioned_hooks_repo/lib/.gitignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/resources/ruby_versioned_hooks_repo/lib/.gitignore
diff --git a/testing/resources/ruby_versioned_hooks_repo/ruby_hook.gemspec b/testing/resources/ruby_versioned_hooks_repo/ruby_hook.gemspec
new file mode 100644
index 0000000..75f4e8f
--- /dev/null
+++ b/testing/resources/ruby_versioned_hooks_repo/ruby_hook.gemspec
@@ -0,0 +1,9 @@
+Gem::Specification.new do |s|
+ s.name = 'ruby_hook'
+ s.version = '0.1.0'
+ s.authors = ['Anthony Sottile']
+ s.summary = 'A ruby hook!'
+ s.description = 'A ruby hook!'
+ s.files = ['bin/ruby_hook']
+ s.executables = ['ruby_hook']
+end
diff --git a/testing/resources/rust_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/rust_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..df1269f
--- /dev/null
+++ b/testing/resources/rust_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: rust-hook
+ name: rust example hook
+ entry: rust-hello-world
+ language: rust
+ files: ''
diff --git a/testing/resources/rust_hooks_repo/Cargo.lock b/testing/resources/rust_hooks_repo/Cargo.lock
new file mode 100644
index 0000000..36fbfda
--- /dev/null
+++ b/testing/resources/rust_hooks_repo/Cargo.lock
@@ -0,0 +1,3 @@
+[[package]]
+name = "rust-hello-world"
+version = "0.1.0"
diff --git a/testing/resources/rust_hooks_repo/Cargo.toml b/testing/resources/rust_hooks_repo/Cargo.toml
new file mode 100644
index 0000000..cd83b43
--- /dev/null
+++ b/testing/resources/rust_hooks_repo/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "rust-hello-world"
+version = "0.1.0"
diff --git a/testing/resources/rust_hooks_repo/src/main.rs b/testing/resources/rust_hooks_repo/src/main.rs
new file mode 100644
index 0000000..ad379d6
--- /dev/null
+++ b/testing/resources/rust_hooks_repo/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("hello world");
+}
diff --git a/testing/resources/script_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/script_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..21cad4a
--- /dev/null
+++ b/testing/resources/script_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: bash_hook
+ name: Bash hook
+ entry: bin/hook.sh
+ language: script
+ files: ''
diff --git a/testing/resources/script_hooks_repo/bin/hook.sh b/testing/resources/script_hooks_repo/bin/hook.sh
new file mode 100755
index 0000000..6565ee4
--- /dev/null
+++ b/testing/resources/script_hooks_repo/bin/hook.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+echo $@
+echo 'Hello World'
diff --git a/testing/resources/stdout_stderr_repo/.pre-commit-hooks.yaml b/testing/resources/stdout_stderr_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..6800d25
--- /dev/null
+++ b/testing/resources/stdout_stderr_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,8 @@
+- id: stdout-stderr
+ name: stdout-stderr
+ language: script
+ entry: ./stdout-stderr-entry
+- id: tty-check
+ name: tty-check
+ language: script
+ entry: ./tty-check-entry
diff --git a/testing/resources/stdout_stderr_repo/stdout-stderr-entry b/testing/resources/stdout_stderr_repo/stdout-stderr-entry
new file mode 100755
index 0000000..7563df5
--- /dev/null
+++ b/testing/resources/stdout_stderr_repo/stdout-stderr-entry
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+echo 0
+echo 1 1>&2
+echo 2
+echo 3 1>&2
+echo 4
+echo 5 1>&2
diff --git a/testing/resources/stdout_stderr_repo/tty-check-entry b/testing/resources/stdout_stderr_repo/tty-check-entry
new file mode 100755
index 0000000..01a9d38
--- /dev/null
+++ b/testing/resources/stdout_stderr_repo/tty-check-entry
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+t() {
+ if [ -t "$1" ]; then
+ echo "$2: True"
+ else
+ echo "$2: False"
+ fi
+}
+t 0 stdin
+t 1 stdout
+t 2 stderr
diff --git a/testing/resources/swift_hooks_repo/.gitignore b/testing/resources/swift_hooks_repo/.gitignore
new file mode 100644
index 0000000..02c0875
--- /dev/null
+++ b/testing/resources/swift_hooks_repo/.gitignore
@@ -0,0 +1,4 @@
+.DS_Store
+/.build
+/Packages
+/*.xcodeproj
diff --git a/testing/resources/swift_hooks_repo/.pre-commit-hooks.yaml b/testing/resources/swift_hooks_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..c08df87
--- /dev/null
+++ b/testing/resources/swift_hooks_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,6 @@
+- id: swift-hooks-repo
+ name: Swift hooks repo example
+ description: Runs the hello world app generated by swift package init --type executable (binary called swift_hooks_repo here)
+ entry: swift_hooks_repo
+ language: swift
+ files: \.(swift)$
diff --git a/testing/resources/swift_hooks_repo/Package.swift b/testing/resources/swift_hooks_repo/Package.swift
new file mode 100644
index 0000000..04976d3
--- /dev/null
+++ b/testing/resources/swift_hooks_repo/Package.swift
@@ -0,0 +1,7 @@
+// swift-tools-version:5.0
+import PackageDescription
+
+let package = Package(
+ name: "swift_hooks_repo",
+ targets: [.target(name: "swift_hooks_repo")]
+)
diff --git a/testing/resources/swift_hooks_repo/Sources/swift_hooks_repo/main.swift b/testing/resources/swift_hooks_repo/Sources/swift_hooks_repo/main.swift
new file mode 100644
index 0000000..f7cf60e
--- /dev/null
+++ b/testing/resources/swift_hooks_repo/Sources/swift_hooks_repo/main.swift
@@ -0,0 +1 @@
+print("Hello, world!")
diff --git a/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml b/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..b2c347c
--- /dev/null
+++ b/testing/resources/system_hook_with_spaces_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: system-hook-with-spaces
+ name: System hook with spaces
+ entry: bash -c 'echo "Hello World"'
+ language: system
+ files: \.sh$
diff --git a/testing/resources/types_repo/.pre-commit-hooks.yaml b/testing/resources/types_repo/.pre-commit-hooks.yaml
new file mode 100644
index 0000000..2e5e4a6
--- /dev/null
+++ b/testing/resources/types_repo/.pre-commit-hooks.yaml
@@ -0,0 +1,5 @@
+- id: python-files
+ name: Python files
+ entry: bin/hook.sh
+ language: script
+ types: [python]
diff --git a/testing/resources/types_repo/bin/hook.sh b/testing/resources/types_repo/bin/hook.sh
new file mode 100755
index 0000000..bdade51
--- /dev/null
+++ b/testing/resources/types_repo/bin/hook.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+echo $@
+exit 1
diff --git a/testing/util.py b/testing/util.py
new file mode 100644
index 0000000..439bee7
--- /dev/null
+++ b/testing/util.py
@@ -0,0 +1,113 @@
+import contextlib
+import os.path
+import subprocess
+
+import pytest
+
+from pre_commit import parse_shebang
+from pre_commit.languages.docker import docker_is_running
+from pre_commit.util import cmd_output
+from testing.auto_namedtuple import auto_namedtuple
+
+
+TESTING_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_resource_path(path):
+ return os.path.join(TESTING_DIR, 'resources', path)
+
+
+def cmd_output_mocked_pre_commit_home(
+ *args, tempdir_factory, pre_commit_home=None, env=None, **kwargs,
+):
+ if pre_commit_home is None:
+ pre_commit_home = tempdir_factory.get()
+ env = env if env is not None else os.environ
+ kwargs.setdefault('stderr', subprocess.STDOUT)
+ # Don't want to write to the home directory
+ env = dict(env, PRE_COMMIT_HOME=pre_commit_home)
+ ret, out, _ = cmd_output(*args, env=env, **kwargs)
+ return ret, out.replace('\r\n', '\n'), None
+
+
+skipif_cant_run_docker = pytest.mark.skipif(
+ os.name == 'nt' or not docker_is_running(),
+ reason="Docker isn't running or can't be accessed",
+)
+skipif_cant_run_swift = pytest.mark.skipif(
+ parse_shebang.find_executable('swift') is None,
+ reason="swift isn't installed or can't be found",
+)
+xfailif_windows_no_ruby = pytest.mark.xfail(
+ os.name == 'nt',
+ reason='Ruby support not yet implemented on windows.',
+)
+xfailif_windows = pytest.mark.xfail(os.name == 'nt', reason='windows')
+
+
+def supports_venv(): # pragma: no cover (platform specific)
+ try:
+ __import__('ensurepip')
+ __import__('venv')
+ return True
+ except ImportError:
+ return False
+
+
+xfailif_no_venv = pytest.mark.xfail(
+ not supports_venv(), reason='Does not support venv module',
+)
+
+
+def run_opts(
+ all_files=False,
+ files=(),
+ color=False,
+ verbose=False,
+ hook=None,
+ from_ref='',
+ to_ref='',
+ remote_name='',
+ remote_url='',
+ hook_stage='commit',
+ show_diff_on_failure=False,
+ commit_msg_filename='',
+ checkout_type='',
+):
+ # These are mutually exclusive
+ assert not (all_files and files)
+ return auto_namedtuple(
+ all_files=all_files,
+ files=files,
+ color=color,
+ verbose=verbose,
+ hook=hook,
+ from_ref=from_ref,
+ to_ref=to_ref,
+ remote_name=remote_name,
+ remote_url=remote_url,
+ hook_stage=hook_stage,
+ show_diff_on_failure=show_diff_on_failure,
+ commit_msg_filename=commit_msg_filename,
+ checkout_type=checkout_type,
+ )
+
+
+@contextlib.contextmanager
+def cwd(path):
+ original_cwd = os.getcwd()
+ os.chdir(path)
+ try:
+ yield
+ finally:
+ os.chdir(original_cwd)
+
+
+def git_commit(*args, fn=cmd_output, msg='commit!', **kwargs):
+ kwargs.setdefault('stderr', subprocess.STDOUT)
+
+ cmd = ('git', 'commit', '--allow-empty', '--no-gpg-sign', '-a') + args
+ if msg is not None: # allow skipping `-m` with `msg=None`
+ cmd += ('-m', msg)
+ ret, out, _ = fn(*cmd, **kwargs)
+ return ret, out.replace('\r\n', '\n')
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/__init__.py
diff --git a/tests/clientlib_test.py b/tests/clientlib_test.py
new file mode 100644
index 0000000..c48adbd
--- /dev/null
+++ b/tests/clientlib_test.py
@@ -0,0 +1,313 @@
+import logging
+
+import cfgv
+import pytest
+
+import pre_commit.constants as C
+from pre_commit.clientlib import check_type_tag
+from pre_commit.clientlib import CONFIG_HOOK_DICT
+from pre_commit.clientlib import CONFIG_REPO_DICT
+from pre_commit.clientlib import CONFIG_SCHEMA
+from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION
+from pre_commit.clientlib import MANIFEST_SCHEMA
+from pre_commit.clientlib import MigrateShaToRev
+from pre_commit.clientlib import validate_config_main
+from pre_commit.clientlib import validate_manifest_main
+from testing.fixtures import sample_local_config
+
+
+def is_valid_according_to_schema(obj, obj_schema):
+ try:
+ cfgv.validate(obj, obj_schema)
+ return True
+ except cfgv.ValidationError:
+ return False
+
+
+@pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel'))
+def test_check_type_tag_failures(value):
+ with pytest.raises(cfgv.ValidationError):
+ check_type_tag(value)
+
+
+@pytest.mark.parametrize(
+ ('config_obj', 'expected'), (
+ (
+ {
+ 'repos': [{
+ 'repo': 'git@github.com:pre-commit/pre-commit-hooks',
+ 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
+ 'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
+ }],
+ },
+ True,
+ ),
+ (
+ {
+ 'repos': [{
+ 'repo': 'git@github.com:pre-commit/pre-commit-hooks',
+ 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
+ 'hooks': [
+ {
+ 'id': 'pyflakes',
+ 'files': '\\.py$',
+ 'args': ['foo', 'bar', 'baz'],
+ },
+ ],
+ }],
+ },
+ True,
+ ),
+ (
+ {
+ 'repos': [{
+ 'repo': 'git@github.com:pre-commit/pre-commit-hooks',
+ 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
+ 'hooks': [
+ {
+ 'id': 'pyflakes',
+ 'files': '\\.py$',
+ # Exclude pattern must be a string
+ 'exclude': 0,
+ 'args': ['foo', 'bar', 'baz'],
+ },
+ ],
+ }],
+ },
+ False,
+ ),
+ ),
+)
+def test_config_valid(config_obj, expected):
+ ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA)
+ assert ret is expected
+
+
+def test_local_hooks_with_rev_fails():
+ config_obj = {'repos': [dict(sample_local_config(), rev='foo')]}
+ with pytest.raises(cfgv.ValidationError):
+ cfgv.validate(config_obj, CONFIG_SCHEMA)
+
+
+def test_config_with_local_hooks_definition_passes():
+ config_obj = {'repos': [sample_local_config()]}
+ cfgv.validate(config_obj, CONFIG_SCHEMA)
+
+
+def test_config_schema_does_not_contain_defaults():
+ """Due to the way our merging works, if this schema has any defaults they
+ will clobber potentially useful values in the backing manifest. #227
+ """
+ for item in CONFIG_HOOK_DICT.items:
+ assert not isinstance(item, cfgv.Optional)
+
+
+def test_validate_manifest_main_ok():
+ assert not validate_manifest_main(('.pre-commit-hooks.yaml',))
+
+
+def test_validate_config_main_ok():
+ assert not validate_config_main(('.pre-commit-config.yaml',))
+
+
+def test_validate_config_old_list_format_ok(tmpdir):
+ f = tmpdir.join('cfg.yaml')
+ f.write('- {repo: meta, hooks: [{id: identity}]}')
+ assert not validate_config_main((f.strpath,))
+
+
+def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog):
+ f = tmpdir.join('cfg.yaml')
+ f.write(
+ '- repo: https://gitlab.com/pycqa/flake8\n'
+ ' rev: 3.7.7\n'
+ ' hooks:\n'
+ ' - id: flake8\n'
+ ' args: [--some-args]\n',
+ )
+ ret_val = validate_config_main((f.strpath,))
+ assert not ret_val
+ assert caplog.record_tuples == [
+ (
+ 'pre_commit',
+ logging.WARNING,
+ 'Unexpected key(s) present on https://gitlab.com/pycqa/flake8: '
+ 'args',
+ ),
+ ]
+
+
+def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog):
+ f = tmpdir.join('cfg.yaml')
+ f.write(
+ 'repos:\n'
+ '- repo: https://gitlab.com/pycqa/flake8\n'
+ ' rev: 3.7.7\n'
+ ' hooks:\n'
+ ' - id: flake8\n'
+ 'foo:\n'
+ ' id: 1.0.0\n',
+ )
+ ret_val = validate_config_main((f.strpath,))
+ assert not ret_val
+ assert caplog.record_tuples == [
+ (
+ 'pre_commit',
+ logging.WARNING,
+ 'Unexpected key(s) present at root: foo',
+ ),
+ ]
+
+
+@pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main))
+def test_mains_not_ok(tmpdir, fn):
+ not_yaml = tmpdir.join('f.notyaml')
+ not_yaml.write('{')
+ not_schema = tmpdir.join('notconfig.yaml')
+ not_schema.write('{}')
+
+ assert fn(('does-not-exist',))
+ assert fn((not_yaml.strpath,))
+ assert fn((not_schema.strpath,))
+
+
+@pytest.mark.parametrize(
+ ('manifest_obj', 'expected'),
+ (
+ (
+ [{
+ 'id': 'a',
+ 'name': 'b',
+ 'entry': 'c',
+ 'language': 'python',
+ 'files': r'\.py$',
+ }],
+ True,
+ ),
+ (
+ [{
+ 'id': 'a',
+ 'name': 'b',
+ 'entry': 'c',
+ 'language': 'python',
+ 'language_version': 'python3.4',
+ 'files': r'\.py$',
+ }],
+ True,
+ ),
+ (
+ # A regression in 0.13.5: always_run and files are permissible
+ [{
+ 'id': 'a',
+ 'name': 'b',
+ 'entry': 'c',
+ 'language': 'python',
+ 'files': '',
+ 'always_run': True,
+ }],
+ True,
+ ),
+ ),
+)
+def test_valid_manifests(manifest_obj, expected):
+ ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA)
+ assert ret is expected
+
+
+@pytest.mark.parametrize(
+ 'dct',
+ (
+ {'repo': 'local'}, {'repo': 'meta'},
+ {'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'},
+ ),
+)
+def test_migrate_sha_to_rev_ok(dct):
+ MigrateShaToRev().check(dct)
+
+
+def test_migrate_sha_to_rev_dont_specify_both():
+ with pytest.raises(cfgv.ValidationError) as excinfo:
+ MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'})
+ msg, = excinfo.value.args
+ assert msg == 'Cannot specify both sha and rev'
+
+
+@pytest.mark.parametrize(
+ 'dct',
+ (
+ {'repo': 'a'},
+ {'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'},
+ ),
+)
+def test_migrate_sha_to_rev_conditional_check_failures(dct):
+ with pytest.raises(cfgv.ValidationError):
+ MigrateShaToRev().check(dct)
+
+
+def test_migrate_to_sha_apply_default():
+ dct = {'repo': 'a', 'sha': 'b'}
+ MigrateShaToRev().apply_default(dct)
+ assert dct == {'repo': 'a', 'rev': 'b'}
+
+
+def test_migrate_to_sha_ok():
+ dct = {'repo': 'a', 'rev': 'b'}
+ MigrateShaToRev().apply_default(dct)
+ assert dct == {'repo': 'a', 'rev': 'b'}
+
+
+@pytest.mark.parametrize(
+ 'config_repo',
+ (
+ # i-dont-exist isn't a valid hook
+ {'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]},
+ # invalid to set a language for a meta hook
+ {'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]},
+ # name override must be string
+ {'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]},
+ ),
+)
+def test_meta_hook_invalid(config_repo):
+ with pytest.raises(cfgv.ValidationError):
+ cfgv.validate(config_repo, CONFIG_REPO_DICT)
+
+
+@pytest.mark.parametrize(
+ 'mapping',
+ (
+ # invalid language key
+ {'pony': '1.0'},
+ # not a string for version
+ {'python': 3},
+ ),
+)
+def test_default_language_version_invalid(mapping):
+ with pytest.raises(cfgv.ValidationError):
+ cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION)
+
+
+def test_minimum_pre_commit_version_failing():
+ with pytest.raises(cfgv.ValidationError) as excinfo:
+ cfg = {'repos': [], 'minimum_pre_commit_version': '999'}
+ cfgv.validate(cfg, CONFIG_SCHEMA)
+ assert str(excinfo.value) == (
+ f'\n'
+ f'==> At Config()\n'
+ f'==> At key: minimum_pre_commit_version\n'
+ f'=====> pre-commit version 999 is required but version {C.VERSION} '
+ f'is installed. Perhaps run `pip install --upgrade pre-commit`.'
+ )
+
+
+def test_minimum_pre_commit_version_passing():
+ cfg = {'repos': [], 'minimum_pre_commit_version': '0'}
+ cfgv.validate(cfg, CONFIG_SCHEMA)
+
+
+@pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT))
+def test_warn_additional(schema):
+ allowed_keys = {item.key for item in schema.items if hasattr(item, 'key')}
+ warn_additional, = [
+ x for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys)
+ ]
+ assert allowed_keys == set(warn_additional.keys)
diff --git a/tests/color_test.py b/tests/color_test.py
new file mode 100644
index 0000000..98b39c1
--- /dev/null
+++ b/tests/color_test.py
@@ -0,0 +1,59 @@
+import sys
+from unittest import mock
+
+import pytest
+
+from pre_commit import envcontext
+from pre_commit.color import format_color
+from pre_commit.color import GREEN
+from pre_commit.color import use_color
+
+
+@pytest.mark.parametrize(
+ ('in_text', 'in_color', 'in_use_color', 'expected'), (
+ ('foo', GREEN, True, f'{GREEN}foo\033[m'),
+ ('foo', GREEN, False, 'foo'),
+ ),
+)
+def test_format_color(in_text, in_color, in_use_color, expected):
+ ret = format_color(in_text, in_color, in_use_color)
+ assert ret == expected
+
+
+def test_use_color_never():
+ assert use_color('never') is False
+
+
+def test_use_color_always():
+ assert use_color('always') is True
+
+
+def test_use_color_no_tty():
+ with mock.patch.object(sys.stdout, 'isatty', return_value=False):
+ assert use_color('auto') is False
+
+
+def test_use_color_tty_with_color_support():
+ with mock.patch.object(sys.stdout, 'isatty', return_value=True):
+ with mock.patch('pre_commit.color.terminal_supports_color', True):
+ with envcontext.envcontext((('TERM', envcontext.UNSET),)):
+ assert use_color('auto') is True
+
+
+def test_use_color_tty_without_color_support():
+ with mock.patch.object(sys.stdout, 'isatty', return_value=True):
+ with mock.patch('pre_commit.color.terminal_supports_color', False):
+ with envcontext.envcontext((('TERM', envcontext.UNSET),)):
+ assert use_color('auto') is False
+
+
+def test_use_color_dumb_term():
+ with mock.patch.object(sys.stdout, 'isatty', return_value=True):
+ with mock.patch('pre_commit.color.terminal_supports_color', True):
+ with envcontext.envcontext((('TERM', 'dumb'),)):
+ assert use_color('auto') is False
+
+
+def test_use_color_raises_if_given_shenanigans():
+ with pytest.raises(ValueError):
+ use_color('herpaderp')
diff --git a/tests/commands/__init__.py b/tests/commands/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/commands/__init__.py
diff --git a/tests/commands/autoupdate_test.py b/tests/commands/autoupdate_test.py
new file mode 100644
index 0000000..2c7b2f1
--- /dev/null
+++ b/tests/commands/autoupdate_test.py
@@ -0,0 +1,437 @@
+import shlex
+
+import pytest
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.commands.autoupdate import _check_hooks_still_exist_at_rev
+from pre_commit.commands.autoupdate import autoupdate
+from pre_commit.commands.autoupdate import RepositoryCannotBeUpdatedError
+from pre_commit.commands.autoupdate import RevInfo
+from pre_commit.util import cmd_output
+from testing.auto_namedtuple import auto_namedtuple
+from testing.fixtures import add_config_to_repo
+from testing.fixtures import make_config_from_repo
+from testing.fixtures import make_repo
+from testing.fixtures import modify_manifest
+from testing.fixtures import read_config
+from testing.fixtures import sample_local_config
+from testing.fixtures import write_config
+from testing.util import git_commit
+
+
+@pytest.fixture
+def up_to_date(tempdir_factory):
+ yield make_repo(tempdir_factory, 'python_hooks_repo')
+
+
+@pytest.fixture
+def out_of_date(tempdir_factory):
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ original_rev = git.head_rev(path)
+
+ git_commit(cwd=path)
+ head_rev = git.head_rev(path)
+
+ yield auto_namedtuple(
+ path=path, original_rev=original_rev, head_rev=head_rev,
+ )
+
+
+@pytest.fixture
+def tagged(out_of_date):
+ cmd_output('git', 'tag', 'v1.2.3', cwd=out_of_date.path)
+ yield out_of_date
+
+
+@pytest.fixture
+def hook_disappearing(tempdir_factory):
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ original_rev = git.head_rev(path)
+
+ with modify_manifest(path) as manifest:
+ manifest[0]['id'] = 'bar'
+
+ yield auto_namedtuple(path=path, original_rev=original_rev)
+
+
+def test_rev_info_from_config():
+ info = RevInfo.from_config({'repo': 'repo/path', 'rev': 'v1.2.3'})
+ assert info == RevInfo('repo/path', 'v1.2.3', None)
+
+
+def test_rev_info_update_up_to_date_repo(up_to_date):
+ config = make_config_from_repo(up_to_date)
+ info = RevInfo.from_config(config)
+ new_info = info.update(tags_only=False, freeze=False)
+ assert info == new_info
+
+
+def test_rev_info_update_out_of_date_repo(out_of_date):
+ config = make_config_from_repo(
+ out_of_date.path, rev=out_of_date.original_rev,
+ )
+ info = RevInfo.from_config(config)
+ new_info = info.update(tags_only=False, freeze=False)
+ assert new_info.rev == out_of_date.head_rev
+
+
+def test_rev_info_update_non_master_default_branch(out_of_date):
+ # change the default branch to be not-master
+ cmd_output('git', '-C', out_of_date.path, 'branch', '-m', 'dev')
+ test_rev_info_update_out_of_date_repo(out_of_date)
+
+
+def test_rev_info_update_tags_even_if_not_tags_only(tagged):
+ config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
+ info = RevInfo.from_config(config)
+ new_info = info.update(tags_only=False, freeze=False)
+ assert new_info.rev == 'v1.2.3'
+
+
+def test_rev_info_update_tags_only_does_not_pick_tip(tagged):
+ git_commit(cwd=tagged.path)
+ config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
+ info = RevInfo.from_config(config)
+ new_info = info.update(tags_only=True, freeze=False)
+ assert new_info.rev == 'v1.2.3'
+
+
+def test_rev_info_update_freeze_tag(tagged):
+ git_commit(cwd=tagged.path)
+ config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
+ info = RevInfo.from_config(config)
+ new_info = info.update(tags_only=True, freeze=True)
+ assert new_info.rev == tagged.head_rev
+ assert new_info.frozen == 'v1.2.3'
+
+
+def test_rev_info_update_does_not_freeze_if_already_sha(out_of_date):
+ config = make_config_from_repo(
+ out_of_date.path, rev=out_of_date.original_rev,
+ )
+ info = RevInfo.from_config(config)
+ new_info = info.update(tags_only=True, freeze=True)
+ assert new_info.rev == out_of_date.head_rev
+ assert new_info.frozen is None
+
+
+def test_autoupdate_up_to_date_repo(up_to_date, tmpdir, store):
+ contents = (
+ f'repos:\n'
+ f'- repo: {up_to_date}\n'
+ f' rev: {git.head_rev(up_to_date)}\n'
+ f' hooks:\n'
+ f' - id: foo\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(contents)
+
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
+ assert cfg.read() == contents
+
+
+def test_autoupdate_old_revision_broken(tempdir_factory, in_tmpdir, store):
+ """In $FUTURE_VERSION, hooks.yaml will no longer be supported. This
+ asserts that when that day comes, pre-commit will be able to autoupdate
+ despite not being able to read hooks.yaml in that repository.
+ """
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ config = make_config_from_repo(path, check=False)
+
+ cmd_output('git', 'mv', C.MANIFEST_FILE, 'nope.yaml', cwd=path)
+ git_commit(cwd=path)
+ # Assume this is the revision the user's old repository was at
+ rev = git.head_rev(path)
+ cmd_output('git', 'mv', 'nope.yaml', C.MANIFEST_FILE, cwd=path)
+ git_commit(cwd=path)
+ update_rev = git.head_rev(path)
+
+ config['rev'] = rev
+ write_config('.', config)
+ with open(C.CONFIG_FILE) as f:
+ before = f.read()
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
+ with open(C.CONFIG_FILE) as f:
+ after = f.read()
+ assert before != after
+ assert update_rev in after
+
+
+def test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store):
+ fmt = (
+ 'repos:\n'
+ '- repo: {}\n'
+ ' rev: {}\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(fmt.format(out_of_date.path, out_of_date.original_rev))
+
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
+ assert cfg.read() == fmt.format(out_of_date.path, out_of_date.head_rev)
+
+
+def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir, store):
+ fmt = (
+ 'repos:\n'
+ '- repo: {}\n'
+ ' rev: {}\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ '- repo: {}\n'
+ ' rev: {}\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ before = fmt.format(
+ up_to_date, git.head_rev(up_to_date),
+ out_of_date.path, out_of_date.original_rev,
+ )
+ cfg.write(before)
+
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
+ assert cfg.read() == fmt.format(
+ up_to_date, git.head_rev(up_to_date),
+ out_of_date.path, out_of_date.head_rev,
+ )
+
+
+def test_autoupdate_out_of_date_repo_with_correct_repo_name(
+ out_of_date, in_tmpdir, store,
+):
+ stale_config = make_config_from_repo(
+ out_of_date.path, rev=out_of_date.original_rev, check=False,
+ )
+ local_config = sample_local_config()
+ config = {'repos': [stale_config, local_config]}
+ write_config('.', config)
+
+ with open(C.CONFIG_FILE) as f:
+ before = f.read()
+ repo_name = f'file://{out_of_date.path}'
+ ret = autoupdate(
+ C.CONFIG_FILE, store, freeze=False, tags_only=False,
+ repos=(repo_name,),
+ )
+ with open(C.CONFIG_FILE) as f:
+ after = f.read()
+ assert ret == 0
+ assert before != after
+ assert out_of_date.head_rev in after
+ assert 'local' in after
+
+
+def test_autoupdate_out_of_date_repo_with_wrong_repo_name(
+ out_of_date, in_tmpdir, store,
+):
+ config = make_config_from_repo(
+ out_of_date.path, rev=out_of_date.original_rev, check=False,
+ )
+ write_config('.', config)
+
+ with open(C.CONFIG_FILE) as f:
+ before = f.read()
+ # It will not update it, because the name doesn't match
+ ret = autoupdate(
+ C.CONFIG_FILE, store, freeze=False, tags_only=False,
+ repos=('dne',),
+ )
+ with open(C.CONFIG_FILE) as f:
+ after = f.read()
+ assert ret == 0
+ assert before == after
+
+
+def test_does_not_reformat(tmpdir, out_of_date, store):
+ fmt = (
+ 'repos:\n'
+ '- repo: {}\n'
+ ' rev: {} # definitely the version I want!\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' # These args are because reasons!\n'
+ ' args: [foo, bar, baz]\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(fmt.format(out_of_date.path, out_of_date.original_rev))
+
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
+ expected = fmt.format(out_of_date.path, out_of_date.head_rev)
+ assert cfg.read() == expected
+
+
+def test_loses_formatting_when_not_detectable(out_of_date, store, tmpdir):
+ """A best-effort attempt is made at updating rev without rewriting
+ formatting. When the original formatting cannot be detected, this
+ is abandoned.
+ """
+ config = (
+ 'repos: [\n'
+ ' {{\n'
+ ' repo: {}, rev: {},\n'
+ ' hooks: [\n'
+ ' # A comment!\n'
+ ' {{id: foo}},\n'
+ ' ],\n'
+ ' }}\n'
+ ']\n'.format(
+ shlex.quote(out_of_date.path), out_of_date.original_rev,
+ )
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(config)
+
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
+ expected = (
+ f'repos:\n'
+ f'- repo: {out_of_date.path}\n'
+ f' rev: {out_of_date.head_rev}\n'
+ f' hooks:\n'
+ f' - id: foo\n'
+ )
+ assert cfg.read() == expected
+
+
+def test_autoupdate_tagged_repo(tagged, in_tmpdir, store):
+ config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
+ write_config('.', config)
+
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
+ with open(C.CONFIG_FILE) as f:
+ assert 'v1.2.3' in f.read()
+
+
+def test_autoupdate_freeze(tagged, in_tmpdir, store):
+ config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
+ write_config('.', config)
+
+ assert autoupdate(C.CONFIG_FILE, store, freeze=True, tags_only=False) == 0
+ with open(C.CONFIG_FILE) as f:
+ expected = f'rev: {tagged.head_rev} # frozen: v1.2.3'
+ assert expected in f.read()
+
+ # if we un-freeze it should remove the frozen comment
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
+ with open(C.CONFIG_FILE) as f:
+ assert 'rev: v1.2.3\n' in f.read()
+
+
+def test_autoupdate_tags_only(tagged, in_tmpdir, store):
+ # add some commits after the tag
+ git_commit(cwd=tagged.path)
+
+ config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
+ write_config('.', config)
+
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=True) == 0
+ with open(C.CONFIG_FILE) as f:
+ assert 'v1.2.3' in f.read()
+
+
+def test_autoupdate_latest_no_config(out_of_date, in_tmpdir, store):
+ config = make_config_from_repo(
+ out_of_date.path, rev=out_of_date.original_rev,
+ )
+ write_config('.', config)
+
+ cmd_output('git', 'rm', '-r', ':/', cwd=out_of_date.path)
+ git_commit(cwd=out_of_date.path)
+
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 1
+ with open(C.CONFIG_FILE) as f:
+ assert out_of_date.original_rev in f.read()
+
+
+def test_hook_disppearing_repo_raises(hook_disappearing, store):
+ config = make_config_from_repo(
+ hook_disappearing.path,
+ rev=hook_disappearing.original_rev,
+ hooks=[{'id': 'foo'}],
+ )
+ info = RevInfo.from_config(config).update(tags_only=False, freeze=False)
+ with pytest.raises(RepositoryCannotBeUpdatedError):
+ _check_hooks_still_exist_at_rev(config, info, store)
+
+
+def test_autoupdate_hook_disappearing_repo(hook_disappearing, tmpdir, store):
+ contents = (
+ f'repos:\n'
+ f'- repo: {hook_disappearing.path}\n'
+ f' rev: {hook_disappearing.original_rev}\n'
+ f' hooks:\n'
+ f' - id: foo\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(contents)
+
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 1
+ assert cfg.read() == contents
+
+
+def test_autoupdate_local_hooks(in_git_dir, store):
+ config = sample_local_config()
+ add_config_to_repo('.', config)
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
+ new_config_writen = read_config('.')
+ assert len(new_config_writen['repos']) == 1
+ assert new_config_writen['repos'][0] == config
+
+
+def test_autoupdate_local_hooks_with_out_of_date_repo(
+ out_of_date, in_tmpdir, store,
+):
+ stale_config = make_config_from_repo(
+ out_of_date.path, rev=out_of_date.original_rev, check=False,
+ )
+ local_config = sample_local_config()
+ config = {'repos': [local_config, stale_config]}
+ write_config('.', config)
+ assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
+ new_config_writen = read_config('.')
+ assert len(new_config_writen['repos']) == 2
+ assert new_config_writen['repos'][0] == local_config
+
+
+def test_autoupdate_meta_hooks(tmpdir, store):
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(
+ 'repos:\n'
+ '- repo: meta\n'
+ ' hooks:\n'
+ ' - id: check-useless-excludes\n',
+ )
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=True) == 0
+ assert cfg.read() == (
+ 'repos:\n'
+ '- repo: meta\n'
+ ' hooks:\n'
+ ' - id: check-useless-excludes\n'
+ )
+
+
+def test_updates_old_format_to_new_format(tmpdir, capsys, store):
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n',
+ )
+ assert autoupdate(str(cfg), store, freeze=False, tags_only=True) == 0
+ contents = cfg.read()
+ assert contents == (
+ 'repos:\n'
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n'
+ )
+ out, _ = capsys.readouterr()
+ assert out == 'Configuration has been migrated.\n'
diff --git a/tests/commands/clean_test.py b/tests/commands/clean_test.py
new file mode 100644
index 0000000..955a6bc
--- /dev/null
+++ b/tests/commands/clean_test.py
@@ -0,0 +1,33 @@
+import os.path
+from unittest import mock
+
+import pytest
+
+from pre_commit.commands.clean import clean
+
+
+@pytest.fixture(autouse=True)
+def fake_old_dir(tempdir_factory):
+ fake_old_dir = tempdir_factory.get()
+
+ def _expanduser(path, *args, **kwargs):
+ assert path == '~/.pre-commit'
+ return fake_old_dir
+
+ with mock.patch.object(os.path, 'expanduser', side_effect=_expanduser):
+ yield fake_old_dir
+
+
+def test_clean(store, fake_old_dir):
+ assert os.path.exists(fake_old_dir)
+ assert os.path.exists(store.directory)
+ clean(store)
+ assert not os.path.exists(fake_old_dir)
+ assert not os.path.exists(store.directory)
+
+
+def test_clean_idempotent(store):
+ clean(store)
+ assert not os.path.exists(store.directory)
+ clean(store)
+ assert not os.path.exists(store.directory)
diff --git a/tests/commands/gc_test.py b/tests/commands/gc_test.py
new file mode 100644
index 0000000..02b3694
--- /dev/null
+++ b/tests/commands/gc_test.py
@@ -0,0 +1,161 @@
+import os
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.clientlib import load_config
+from pre_commit.commands.autoupdate import autoupdate
+from pre_commit.commands.gc import gc
+from pre_commit.commands.install_uninstall import install_hooks
+from pre_commit.repository import all_hooks
+from testing.fixtures import make_config_from_repo
+from testing.fixtures import make_repo
+from testing.fixtures import modify_config
+from testing.fixtures import sample_local_config
+from testing.fixtures import sample_meta_config
+from testing.fixtures import write_config
+from testing.util import git_commit
+
+
+def _repo_count(store):
+ return len(store.select_all_repos())
+
+
+def _config_count(store):
+ return len(store.select_all_configs())
+
+
+def _remove_config_assert_cleared(store, cap_out):
+ os.remove(C.CONFIG_FILE)
+ assert not gc(store)
+ assert _config_count(store) == 0
+ assert _repo_count(store) == 0
+ assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
+
+
+def test_gc(tempdir_factory, store, in_git_dir, cap_out):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ old_rev = git.head_rev(path)
+ git_commit(cwd=path)
+
+ write_config('.', make_config_from_repo(path, rev=old_rev))
+ store.mark_config_used(C.CONFIG_FILE)
+
+ # update will clone both the old and new repo, making the old one gc-able
+ install_hooks(C.CONFIG_FILE, store)
+ assert not autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False)
+
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 2
+ assert not gc(store)
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 1
+ assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
+
+ _remove_config_assert_cleared(store, cap_out)
+
+
+def test_gc_repo_not_cloned(tempdir_factory, store, in_git_dir, cap_out):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ write_config('.', make_config_from_repo(path))
+ store.mark_config_used(C.CONFIG_FILE)
+
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 0
+ assert not gc(store)
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 0
+ assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
+
+
+def test_gc_meta_repo_does_not_crash(store, in_git_dir, cap_out):
+ write_config('.', sample_meta_config())
+ store.mark_config_used(C.CONFIG_FILE)
+ assert not gc(store)
+ assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
+
+
+def test_gc_local_repo_does_not_crash(store, in_git_dir, cap_out):
+ write_config('.', sample_local_config())
+ store.mark_config_used(C.CONFIG_FILE)
+ assert not gc(store)
+ assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
+
+
+def test_gc_unused_local_repo_with_env(store, in_git_dir, cap_out):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'flake8', 'name': 'flake8', 'entry': 'flake8',
+ # a `language: python` local hook will create an environment
+ 'types': ['python'], 'language': 'python',
+ }],
+ }
+ write_config('.', config)
+ store.mark_config_used(C.CONFIG_FILE)
+
+ # this causes the repositories to be created
+ all_hooks(load_config(C.CONFIG_FILE), store)
+
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 1
+ assert not gc(store)
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 1
+ assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
+
+ _remove_config_assert_cleared(store, cap_out)
+
+
+def test_gc_config_with_missing_hook(
+ tempdir_factory, store, in_git_dir, cap_out,
+):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ write_config('.', make_config_from_repo(path))
+ store.mark_config_used(C.CONFIG_FILE)
+ # to trigger a clone
+ all_hooks(load_config(C.CONFIG_FILE), store)
+
+ with modify_config() as config:
+ # add a hook which does not exist, make sure we don't crash
+ config['repos'][0]['hooks'].append({'id': 'does-not-exist'})
+
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 1
+ assert not gc(store)
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 1
+ assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
+
+ _remove_config_assert_cleared(store, cap_out)
+
+
+def test_gc_deletes_invalid_configs(store, in_git_dir, cap_out):
+ config = {'i am': 'invalid'}
+ write_config('.', config)
+ store.mark_config_used(C.CONFIG_FILE)
+
+ assert _config_count(store) == 1
+ assert not gc(store)
+ assert _config_count(store) == 0
+ assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
+
+
+def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out):
+ # clean up repos from old pre-commit versions
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ write_config('.', make_config_from_repo(path))
+ store.mark_config_used(C.CONFIG_FILE)
+
+ # trigger a clone
+ install_hooks(C.CONFIG_FILE, store)
+
+ # we'll "break" the manifest to simulate an old version clone
+ (_, _, path), = store.select_all_repos()
+ os.remove(os.path.join(path, C.MANIFEST_FILE))
+
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 1
+ assert not gc(store)
+ assert _config_count(store) == 1
+ assert _repo_count(store) == 0
+ assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
diff --git a/tests/commands/hook_impl_test.py b/tests/commands/hook_impl_test.py
new file mode 100644
index 0000000..032fa8f
--- /dev/null
+++ b/tests/commands/hook_impl_test.py
@@ -0,0 +1,235 @@
+import subprocess
+import sys
+from unittest import mock
+
+import pytest
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.commands import hook_impl
+from pre_commit.envcontext import envcontext
+from pre_commit.util import cmd_output
+from pre_commit.util import make_executable
+from testing.fixtures import git_dir
+from testing.fixtures import sample_local_config
+from testing.fixtures import write_config
+from testing.util import cwd
+from testing.util import git_commit
+
+
+def test_validate_config_file_exists(tmpdir):
+ cfg = tmpdir.join(C.CONFIG_FILE).ensure()
+ hook_impl._validate_config(0, cfg, True)
+
+
+def test_validate_config_missing(capsys):
+ with pytest.raises(SystemExit) as excinfo:
+ hook_impl._validate_config(123, 'DNE.yaml', False)
+ ret, = excinfo.value.args
+ assert ret == 1
+ assert capsys.readouterr().out == (
+ 'No DNE.yaml file was found\n'
+ '- To temporarily silence this, run '
+ '`PRE_COMMIT_ALLOW_NO_CONFIG=1 git ...`\n'
+ '- To permanently silence this, install pre-commit with the '
+ '--allow-missing-config option\n'
+ '- To uninstall pre-commit run `pre-commit uninstall`\n'
+ )
+
+
+def test_validate_config_skip_missing_config(capsys):
+ with pytest.raises(SystemExit) as excinfo:
+ hook_impl._validate_config(123, 'DNE.yaml', True)
+ ret, = excinfo.value.args
+ assert ret == 123
+ expected = '`DNE.yaml` config file not found. Skipping `pre-commit`.\n'
+ assert capsys.readouterr().out == expected
+
+
+def test_validate_config_skip_via_env_variable(capsys):
+ with pytest.raises(SystemExit) as excinfo:
+ with envcontext((('PRE_COMMIT_ALLOW_NO_CONFIG', '1'),)):
+ hook_impl._validate_config(0, 'DNE.yaml', False)
+ ret, = excinfo.value.args
+ assert ret == 0
+ expected = '`DNE.yaml` config file not found. Skipping `pre-commit`.\n'
+ assert capsys.readouterr().out == expected
+
+
+def test_run_legacy_does_not_exist(tmpdir):
+ retv, stdin = hook_impl._run_legacy('pre-commit', tmpdir, ())
+ assert (retv, stdin) == (0, b'')
+
+
+def test_run_legacy_executes_legacy_script(tmpdir, capfd):
+ hook = tmpdir.join('pre-commit.legacy')
+ hook.write('#!/usr/bin/env bash\necho hi "$@"\nexit 1\n')
+ make_executable(hook)
+ retv, stdin = hook_impl._run_legacy('pre-commit', tmpdir, ('arg1', 'arg2'))
+ assert capfd.readouterr().out.strip() == 'hi arg1 arg2'
+ assert (retv, stdin) == (1, b'')
+
+
+def test_run_legacy_pre_push_returns_stdin(tmpdir):
+ with mock.patch.object(sys.stdin.buffer, 'read', return_value=b'stdin'):
+ retv, stdin = hook_impl._run_legacy('pre-push', tmpdir, ())
+ assert (retv, stdin) == (0, b'stdin')
+
+
+def test_run_legacy_recursive(tmpdir):
+ hook = tmpdir.join('pre-commit.legacy').ensure()
+ make_executable(hook)
+
+ # simulate a call being recursive
+ def call(*_, **__):
+ return hook_impl._run_legacy('pre-commit', tmpdir, ())
+
+ with mock.patch.object(subprocess, 'run', call):
+ with pytest.raises(SystemExit):
+ call()
+
+
+def test_run_ns_pre_commit():
+ ns = hook_impl._run_ns('pre-commit', True, (), b'')
+ assert ns is not None
+ assert ns.hook_stage == 'commit'
+ assert ns.color is True
+
+
+def test_run_ns_commit_msg():
+ ns = hook_impl._run_ns('commit-msg', False, ('.git/COMMIT_MSG',), b'')
+ assert ns is not None
+ assert ns.hook_stage == 'commit-msg'
+ assert ns.color is False
+ assert ns.commit_msg_filename == '.git/COMMIT_MSG'
+
+
+def test_run_ns_post_checkout():
+ ns = hook_impl._run_ns('post-checkout', True, ('a', 'b', 'c'), b'')
+ assert ns is not None
+ assert ns.hook_stage == 'post-checkout'
+ assert ns.color is True
+ assert ns.from_ref == 'a'
+ assert ns.to_ref == 'b'
+ assert ns.checkout_type == 'c'
+
+
+@pytest.fixture
+def push_example(tempdir_factory):
+ src = git_dir(tempdir_factory)
+ git_commit(cwd=src)
+ src_head = git.head_rev(src)
+
+ clone = tempdir_factory.get()
+ cmd_output('git', 'clone', src, clone)
+ git_commit(cwd=clone)
+ clone_head = git.head_rev(clone)
+ return (src, src_head, clone, clone_head)
+
+
+def test_run_ns_pre_push_updating_branch(push_example):
+ src, src_head, clone, clone_head = push_example
+
+ with cwd(clone):
+ args = ('origin', src)
+ stdin = f'HEAD {clone_head} refs/heads/b {src_head}\n'.encode()
+ ns = hook_impl._run_ns('pre-push', False, args, stdin)
+
+ assert ns is not None
+ assert ns.hook_stage == 'push'
+ assert ns.color is False
+ assert ns.remote_name == 'origin'
+ assert ns.remote_url == src
+ assert ns.from_ref == src_head
+ assert ns.to_ref == clone_head
+ assert ns.all_files is False
+
+
+def test_run_ns_pre_push_new_branch(push_example):
+ src, src_head, clone, clone_head = push_example
+
+ with cwd(clone):
+ args = ('origin', src)
+ stdin = f'HEAD {clone_head} refs/heads/b {hook_impl.Z40}\n'.encode()
+ ns = hook_impl._run_ns('pre-push', False, args, stdin)
+
+ assert ns is not None
+ assert ns.from_ref == src_head
+ assert ns.to_ref == clone_head
+
+
+def test_run_ns_pre_push_new_branch_existing_rev(push_example):
+ src, src_head, clone, _ = push_example
+
+ with cwd(clone):
+ args = ('origin', src)
+ stdin = f'HEAD {src_head} refs/heads/b2 {hook_impl.Z40}\n'.encode()
+ ns = hook_impl._run_ns('pre-push', False, args, stdin)
+
+ assert ns is None
+
+
+def test_pushing_orphan_branch(push_example):
+ src, src_head, clone, _ = push_example
+
+ cmd_output('git', 'checkout', '--orphan', 'b2', cwd=clone)
+ git_commit(cwd=clone, msg='something else to get unique hash')
+ clone_rev = git.head_rev(clone)
+
+ with cwd(clone):
+ args = ('origin', src)
+ stdin = f'HEAD {clone_rev} refs/heads/b2 {hook_impl.Z40}\n'.encode()
+ ns = hook_impl._run_ns('pre-push', False, args, stdin)
+
+ assert ns is not None
+ assert ns.all_files is True
+
+
+def test_run_ns_pre_push_deleting_branch(push_example):
+ src, src_head, clone, _ = push_example
+
+ with cwd(clone):
+ args = ('origin', src)
+ stdin = f'(delete) {hook_impl.Z40} refs/heads/b {src_head}'.encode()
+ ns = hook_impl._run_ns('pre-push', False, args, stdin)
+
+ assert ns is None
+
+
+def test_hook_impl_main_noop_pre_push(cap_out, store, push_example):
+ src, src_head, clone, _ = push_example
+
+ stdin = f'(delete) {hook_impl.Z40} refs/heads/b {src_head}'.encode()
+ with mock.patch.object(sys.stdin.buffer, 'read', return_value=stdin):
+ with cwd(clone):
+ write_config('.', sample_local_config())
+ ret = hook_impl.hook_impl(
+ store,
+ config=C.CONFIG_FILE,
+ color=False,
+ hook_type='pre-push',
+ hook_dir='.git/hooks',
+ skip_on_missing_config=False,
+ args=('origin', src),
+ )
+ assert ret == 0
+ assert cap_out.get() == ''
+
+
+def test_hook_impl_main_runs_hooks(cap_out, tempdir_factory, store):
+ with cwd(git_dir(tempdir_factory)):
+ write_config('.', sample_local_config())
+ ret = hook_impl.hook_impl(
+ store,
+ config=C.CONFIG_FILE,
+ color=False,
+ hook_type='pre-commit',
+ hook_dir='.git/hooks',
+ skip_on_missing_config=False,
+ args=(),
+ )
+ assert ret == 0
+ expected = '''\
+Block if "DO NOT COMMIT" is found....................(no files to check)Skipped
+'''
+ assert cap_out.get() == expected
diff --git a/tests/commands/init_templatedir_test.py b/tests/commands/init_templatedir_test.py
new file mode 100644
index 0000000..d14a171
--- /dev/null
+++ b/tests/commands/init_templatedir_test.py
@@ -0,0 +1,92 @@
+import os.path
+from unittest import mock
+
+import pre_commit.constants as C
+from pre_commit.commands.init_templatedir import init_templatedir
+from pre_commit.envcontext import envcontext
+from pre_commit.util import cmd_output
+from testing.fixtures import git_dir
+from testing.fixtures import make_consuming_repo
+from testing.util import cmd_output_mocked_pre_commit_home
+from testing.util import cwd
+from testing.util import git_commit
+
+
+def test_init_templatedir(tmpdir, tempdir_factory, store, cap_out):
+ target = str(tmpdir.join('tmpl'))
+ init_templatedir(C.CONFIG_FILE, store, target, hook_types=['pre-commit'])
+ lines = cap_out.get().splitlines()
+ assert lines[0].startswith('pre-commit installed at ')
+ assert lines[1] == (
+ '[WARNING] `init.templateDir` not set to the target directory'
+ )
+ assert lines[2].startswith(
+ '[WARNING] maybe `git config --global init.templateDir',
+ )
+
+ with envcontext((('GIT_TEMPLATE_DIR', target),)):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+
+ with cwd(path):
+ retcode, output = git_commit(
+ fn=cmd_output_mocked_pre_commit_home,
+ tempdir_factory=tempdir_factory,
+ )
+ assert retcode == 0
+ assert 'Bash hook....' in output
+
+
+def test_init_templatedir_already_set(tmpdir, tempdir_factory, store, cap_out):
+ target = str(tmpdir.join('tmpl'))
+ tmp_git_dir = git_dir(tempdir_factory)
+ with cwd(tmp_git_dir):
+ cmd_output('git', 'config', 'init.templateDir', target)
+ init_templatedir(
+ C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
+ )
+
+ lines = cap_out.get().splitlines()
+ assert len(lines) == 1
+ assert lines[0].startswith('pre-commit installed at')
+
+
+def test_init_templatedir_not_set(tmpdir, store, cap_out):
+ # set HOME to ignore the current `.gitconfig`
+ with envcontext((('HOME', str(tmpdir)),)):
+ with tmpdir.join('tmpl').ensure_dir().as_cwd():
+ # we have not set init.templateDir so this should produce a warning
+ init_templatedir(
+ C.CONFIG_FILE, store, '.', hook_types=['pre-commit'],
+ )
+
+ lines = cap_out.get().splitlines()
+ assert len(lines) == 3
+ assert lines[1] == (
+ '[WARNING] `init.templateDir` not set to the target directory'
+ )
+
+
+def test_init_templatedir_expanduser(tmpdir, tempdir_factory, store, cap_out):
+ target = str(tmpdir.join('tmpl'))
+ tmp_git_dir = git_dir(tempdir_factory)
+ with cwd(tmp_git_dir):
+ cmd_output('git', 'config', 'init.templateDir', '~/templatedir')
+ with mock.patch.object(os.path, 'expanduser', return_value=target):
+ init_templatedir(
+ C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
+ )
+
+ lines = cap_out.get().splitlines()
+ assert len(lines) == 1
+ assert lines[0].startswith('pre-commit installed at')
+
+
+def test_init_templatedir_hookspath_set(tmpdir, tempdir_factory, store):
+ target = tmpdir.join('tmpl')
+ tmp_git_dir = git_dir(tempdir_factory)
+ with cwd(tmp_git_dir):
+ cmd_output('git', 'config', '--local', 'core.hooksPath', 'hooks')
+ init_templatedir(
+ C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
+ )
+ assert target.join('hooks/pre-commit').exists()
diff --git a/tests/commands/install_uninstall_test.py b/tests/commands/install_uninstall_test.py
new file mode 100644
index 0000000..66b9190
--- /dev/null
+++ b/tests/commands/install_uninstall_test.py
@@ -0,0 +1,901 @@
+import os.path
+import re
+import sys
+from unittest import mock
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit.commands import install_uninstall
+from pre_commit.commands.install_uninstall import CURRENT_HASH
+from pre_commit.commands.install_uninstall import install
+from pre_commit.commands.install_uninstall import install_hooks
+from pre_commit.commands.install_uninstall import is_our_script
+from pre_commit.commands.install_uninstall import PRIOR_HASHES
+from pre_commit.commands.install_uninstall import shebang
+from pre_commit.commands.install_uninstall import uninstall
+from pre_commit.parse_shebang import find_executable
+from pre_commit.util import cmd_output
+from pre_commit.util import make_executable
+from pre_commit.util import resource_text
+from testing.fixtures import add_config_to_repo
+from testing.fixtures import git_dir
+from testing.fixtures import make_consuming_repo
+from testing.fixtures import remove_config_from_repo
+from testing.fixtures import write_config
+from testing.util import cmd_output_mocked_pre_commit_home
+from testing.util import cwd
+from testing.util import git_commit
+
+
+def test_is_not_script():
+ assert is_our_script('setup.py') is False
+
+
+def test_is_script():
+ assert is_our_script('pre_commit/resources/hook-tmpl')
+
+
+def test_is_previous_pre_commit(tmpdir):
+ f = tmpdir.join('foo')
+ f.write(f'{PRIOR_HASHES[0]}\n')
+ assert is_our_script(f.strpath)
+
+
+def patch_platform(platform):
+ return mock.patch.object(sys, 'platform', platform)
+
+
+def patch_lookup_path(path):
+ return mock.patch.object(install_uninstall, 'POSIX_SEARCH_PATH', path)
+
+
+def patch_sys_exe(exe):
+ return mock.patch.object(install_uninstall, 'SYS_EXE', exe)
+
+
+def test_shebang_windows():
+ with patch_platform('win32'), patch_sys_exe('python.exe'):
+ assert shebang() == '#!/usr/bin/env python.exe'
+
+
+def test_shebang_posix_not_on_path():
+ with patch_platform('posix'), patch_lookup_path(()):
+ with patch_sys_exe('python3.6'):
+ assert shebang() == '#!/usr/bin/env python3.6'
+
+
+def test_shebang_posix_on_path(tmpdir):
+ exe = tmpdir.join(f'python{sys.version_info[0]}').ensure()
+ make_executable(exe)
+
+ with patch_platform('posix'), patch_lookup_path((tmpdir.strpath,)):
+ with patch_sys_exe('python'):
+ assert shebang() == f'#!/usr/bin/env python{sys.version_info[0]}'
+
+
+def test_install_pre_commit(in_git_dir, store):
+ assert not install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ assert os.access(in_git_dir.join('.git/hooks/pre-commit').strpath, os.X_OK)
+
+ assert not install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ assert os.access(in_git_dir.join('.git/hooks/pre-push').strpath, os.X_OK)
+
+
+def test_install_hooks_directory_not_present(in_git_dir, store):
+ # Simulate some git clients which don't make .git/hooks #234
+ if in_git_dir.join('.git/hooks').exists(): # pragma: no cover (odd git)
+ in_git_dir.join('.git/hooks').remove()
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ assert in_git_dir.join('.git/hooks/pre-commit').exists()
+
+
+def test_install_multiple_hooks_at_once(in_git_dir, store):
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit', 'pre-push'])
+ assert in_git_dir.join('.git/hooks/pre-commit').exists()
+ assert in_git_dir.join('.git/hooks/pre-push').exists()
+ uninstall(hook_types=['pre-commit', 'pre-push'])
+ assert not in_git_dir.join('.git/hooks/pre-commit').exists()
+ assert not in_git_dir.join('.git/hooks/pre-push').exists()
+
+
+def test_install_refuses_core_hookspath(in_git_dir, store):
+ cmd_output('git', 'config', '--local', 'core.hooksPath', 'hooks')
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+
+
+def test_install_hooks_dead_symlink(in_git_dir, store):
+ hook = in_git_dir.join('.git/hooks').ensure_dir().join('pre-commit')
+ os.symlink('/fake/baz', hook.strpath)
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ assert hook.exists()
+
+
+def test_uninstall_does_not_blow_up_when_not_there(in_git_dir):
+ assert uninstall(hook_types=['pre-commit']) == 0
+
+
+def test_uninstall(in_git_dir, store):
+ assert not in_git_dir.join('.git/hooks/pre-commit').exists()
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ assert in_git_dir.join('.git/hooks/pre-commit').exists()
+ uninstall(hook_types=['pre-commit'])
+ assert not in_git_dir.join('.git/hooks/pre-commit').exists()
+
+
+def _get_commit_output(tempdir_factory, touch_file='foo', **kwargs):
+ open(touch_file, 'a').close()
+ cmd_output('git', 'add', touch_file)
+ return git_commit(
+ fn=cmd_output_mocked_pre_commit_home,
+ retcode=None,
+ tempdir_factory=tempdir_factory,
+ **kwargs,
+ )
+
+
+# osx does this different :(
+FILES_CHANGED = (
+ r'('
+ r' 1 file changed, 0 insertions\(\+\), 0 deletions\(-\)\n'
+ r'|'
+ r' 0 files changed\n'
+ r')'
+)
+
+
+NORMAL_PRE_COMMIT_RUN = re.compile(
+ fr'^\[INFO\] Initializing environment for .+\.\n'
+ fr'Bash hook\.+Passed\n'
+ fr'\[master [a-f0-9]{{7}}\] commit!\n'
+ fr'{FILES_CHANGED}'
+ fr' create mode 100644 foo\n$',
+)
+
+
+def test_install_pre_commit_and_run(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_install_pre_commit_and_run_custom_path(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ cmd_output('git', 'mv', C.CONFIG_FILE, 'custom.yaml')
+ git_commit(cwd=path)
+ assert install('custom.yaml', store, hook_types=['pre-commit']) == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_install_in_submodule_and_run(tempdir_factory, store):
+ src_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ parent_path = git_dir(tempdir_factory)
+ cmd_output('git', 'submodule', 'add', src_path, 'sub', cwd=parent_path)
+ git_commit(cwd=parent_path)
+
+ sub_pth = os.path.join(parent_path, 'sub')
+ with cwd(sub_pth):
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_install_in_worktree_and_run(tempdir_factory, store):
+ src_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ path = tempdir_factory.get()
+ cmd_output('git', '-C', src_path, 'branch', '-m', 'notmaster')
+ cmd_output('git', '-C', src_path, 'worktree', 'add', path, '-b', 'master')
+
+ with cwd(path):
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_commit_am(tempdir_factory, store):
+ """Regression test for #322."""
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ # Make an unstaged change
+ open('unstaged', 'w').close()
+ cmd_output('git', 'add', '.')
+ git_commit(cwd=path)
+ with open('unstaged', 'w') as foo_file:
+ foo_file.write('Oh hai')
+
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+
+
+def test_unicode_merge_commit_message(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ cmd_output('git', 'checkout', 'master', '-b', 'foo')
+ git_commit('-n', cwd=path)
+ cmd_output('git', 'checkout', 'master')
+ cmd_output('git', 'merge', 'foo', '--no-ff', '--no-commit', '-m', 'β˜ƒ')
+ # Used to crash
+ git_commit(
+ '--no-edit',
+ msg=None,
+ fn=cmd_output_mocked_pre_commit_home,
+ tempdir_factory=tempdir_factory,
+ )
+
+
+def test_install_idempotent(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def _path_without_us():
+ # Choose a path which *probably* doesn't include us
+ env = dict(os.environ)
+ exe = find_executable('pre-commit', _environ=env)
+ while exe:
+ parts = env['PATH'].split(os.pathsep)
+ after = [x for x in parts if x.lower() != os.path.dirname(exe).lower()]
+ if parts == after:
+ raise AssertionError(exe, parts)
+ env['PATH'] = os.pathsep.join(after)
+ exe = find_executable('pre-commit', _environ=env)
+ return env['PATH']
+
+
+def test_environment_not_sourced(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ assert not install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ # simulate deleting the virtualenv by rewriting the exe
+ hook = os.path.join(path, '.git/hooks/pre-commit')
+ with open(hook) as f:
+ src = f.read()
+ src = re.sub(
+ '\nINSTALL_PYTHON =.*\n',
+ '\nINSTALL_PYTHON = "/dne"\n',
+ src,
+ )
+ with open(hook, 'w') as f:
+ f.write(src)
+
+ # Use a specific homedir to ignore --user installs
+ homedir = tempdir_factory.get()
+ ret, out = git_commit(
+ env={
+ 'HOME': homedir,
+ 'PATH': _path_without_us(),
+ # Git needs this to make a commit
+ 'GIT_AUTHOR_NAME': os.environ['GIT_AUTHOR_NAME'],
+ 'GIT_COMMITTER_NAME': os.environ['GIT_COMMITTER_NAME'],
+ 'GIT_AUTHOR_EMAIL': os.environ['GIT_AUTHOR_EMAIL'],
+ 'GIT_COMMITTER_EMAIL': os.environ['GIT_COMMITTER_EMAIL'],
+ },
+ retcode=None,
+ )
+ assert ret == 1
+ assert out == (
+ '`pre-commit` not found. '
+ 'Did you forget to activate your virtualenv?\n'
+ )
+
+
+FAILING_PRE_COMMIT_RUN = re.compile(
+ r'^\[INFO\] Initializing environment for .+\.\n'
+ r'Failing hook\.+Failed\n'
+ r'- hook id: failing_hook\n'
+ r'- exit code: 1\n'
+ r'\n'
+ r'Fail\n'
+ r'foo\n'
+ r'\n$',
+)
+
+
+def test_failing_hooks_returns_nonzero(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'failing_hook_repo')
+ with cwd(path):
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 1
+ assert FAILING_PRE_COMMIT_RUN.match(output)
+
+
+EXISTING_COMMIT_RUN = re.compile(
+ fr'^legacy hook\n'
+ fr'\[master [a-f0-9]{{7}}\] commit!\n'
+ fr'{FILES_CHANGED}'
+ fr' create mode 100644 baz\n$',
+)
+
+
+def _write_legacy_hook(path):
+ os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
+ with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
+ f.write(f'{shebang()}\nprint("legacy hook")\n')
+ make_executable(f.name)
+
+
+def test_install_existing_hooks_no_overwrite(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ _write_legacy_hook(path)
+
+ # Make sure we installed the "old" hook correctly
+ ret, output = _get_commit_output(tempdir_factory, touch_file='baz')
+ assert ret == 0
+ assert EXISTING_COMMIT_RUN.match(output)
+
+ # Now install pre-commit (no-overwrite)
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ # We should run both the legacy and pre-commit hooks
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert output.startswith('legacy hook\n')
+ assert NORMAL_PRE_COMMIT_RUN.match(output[len('legacy hook\n'):])
+
+
+def test_legacy_overwriting_legacy_hook(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ _write_legacy_hook(path)
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ _write_legacy_hook(path)
+ # this previously crashed on windows. See #1010
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+
+def test_install_existing_hook_no_overwrite_idempotent(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ _write_legacy_hook(path)
+
+ # Install twice
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ # We should run both the legacy and pre-commit hooks
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert output.startswith('legacy hook\n')
+ assert NORMAL_PRE_COMMIT_RUN.match(output[len('legacy hook\n'):])
+
+
+FAIL_OLD_HOOK = re.compile(
+ r'fail!\n'
+ r'\[INFO\] Initializing environment for .+\.\n'
+ r'Bash hook\.+Passed\n',
+)
+
+
+def test_failing_existing_hook_returns_1(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ # Write out a failing "old" hook
+ os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
+ with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
+ f.write('#!/usr/bin/env bash\necho "fail!"\nexit 1\n')
+ make_executable(f.name)
+
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ # We should get a failure from the legacy hook
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 1
+ assert FAIL_OLD_HOOK.match(output)
+
+
+def test_install_overwrite_no_existing_hooks(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ assert not install(
+ C.CONFIG_FILE, store, hook_types=['pre-commit'], overwrite=True,
+ )
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_install_overwrite(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ _write_legacy_hook(path)
+ assert not install(
+ C.CONFIG_FILE, store, hook_types=['pre-commit'], overwrite=True,
+ )
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_uninstall_restores_legacy_hooks(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ _write_legacy_hook(path)
+
+ # Now install and uninstall pre-commit
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+ assert uninstall(hook_types=['pre-commit']) == 0
+
+ # Make sure we installed the "old" hook correctly
+ ret, output = _get_commit_output(tempdir_factory, touch_file='baz')
+ assert ret == 0
+ assert EXISTING_COMMIT_RUN.match(output)
+
+
+def test_replace_old_commit_script(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ # Install a script that looks like our old script
+ pre_commit_contents = resource_text('hook-tmpl')
+ new_contents = pre_commit_contents.replace(
+ CURRENT_HASH, PRIOR_HASHES[-1],
+ )
+
+ os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
+ with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
+ f.write(new_contents)
+ make_executable(f.name)
+
+ # Install normally
+ assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def test_uninstall_doesnt_remove_not_our_hooks(in_git_dir):
+ pre_commit = in_git_dir.join('.git/hooks').ensure_dir().join('pre-commit')
+ pre_commit.write('#!/usr/bin/env bash\necho 1\n')
+ make_executable(pre_commit.strpath)
+
+ assert uninstall(hook_types=['pre-commit']) == 0
+
+ assert pre_commit.exists()
+
+
+PRE_INSTALLED = re.compile(
+ fr'Bash hook\.+Passed\n'
+ fr'\[master [a-f0-9]{{7}}\] commit!\n'
+ fr'{FILES_CHANGED}'
+ fr' create mode 100644 foo\n$',
+)
+
+
+def test_installs_hooks_with_hooks_True(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'], hooks=True)
+ ret, output = _get_commit_output(
+ tempdir_factory, pre_commit_home=store.directory,
+ )
+
+ assert ret == 0
+ assert PRE_INSTALLED.match(output)
+
+
+def test_install_hooks_command(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ install_hooks(C.CONFIG_FILE, store)
+ ret, output = _get_commit_output(
+ tempdir_factory, pre_commit_home=store.directory,
+ )
+
+ assert ret == 0
+ assert PRE_INSTALLED.match(output)
+
+
+def test_installed_from_venv(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+ # No environment so pre-commit is not on the path when running!
+ # Should still pick up the python from when we installed
+ ret, output = _get_commit_output(
+ tempdir_factory,
+ env={
+ 'HOME': os.path.expanduser('~'),
+ 'PATH': _path_without_us(),
+ 'TERM': os.environ.get('TERM', ''),
+ # Windows needs this to import `random`
+ 'SYSTEMROOT': os.environ.get('SYSTEMROOT', ''),
+ # Windows needs this to resolve executables
+ 'PATHEXT': os.environ.get('PATHEXT', ''),
+ # Git needs this to make a commit
+ 'GIT_AUTHOR_NAME': os.environ['GIT_AUTHOR_NAME'],
+ 'GIT_COMMITTER_NAME': os.environ['GIT_COMMITTER_NAME'],
+ 'GIT_AUTHOR_EMAIL': os.environ['GIT_AUTHOR_EMAIL'],
+ 'GIT_COMMITTER_EMAIL': os.environ['GIT_COMMITTER_EMAIL'],
+ },
+ )
+ assert ret == 0
+ assert NORMAL_PRE_COMMIT_RUN.match(output)
+
+
+def _get_push_output(tempdir_factory, remote='origin', opts=()):
+ return cmd_output_mocked_pre_commit_home(
+ 'git', 'push', remote, 'HEAD:new_branch', *opts,
+ tempdir_factory=tempdir_factory,
+ retcode=None,
+ )[:2]
+
+
+def test_pre_push_integration_failing(tempdir_factory, store):
+ upstream = make_consuming_repo(tempdir_factory, 'failing_hook_repo')
+ path = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, path)
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ # commit succeeds because pre-commit is only installed for pre-push
+ assert _get_commit_output(tempdir_factory)[0] == 0
+ assert _get_commit_output(tempdir_factory, touch_file='zzz')[0] == 0
+
+ retc, output = _get_push_output(tempdir_factory)
+ assert retc == 1
+ assert 'Failing hook' in output
+ assert 'Failed' in output
+ assert 'foo zzz' in output # both filenames should be printed
+ assert 'hook id: failing_hook' in output
+
+
+def test_pre_push_integration_accepted(tempdir_factory, store):
+ upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ path = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, path)
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ assert _get_commit_output(tempdir_factory)[0] == 0
+
+ retc, output = _get_push_output(tempdir_factory)
+ assert retc == 0
+ assert 'Bash hook' in output
+ assert 'Passed' in output
+
+
+def test_pre_push_force_push_without_fetch(tempdir_factory, store):
+ upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ path1 = tempdir_factory.get()
+ path2 = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, path1)
+ cmd_output('git', 'clone', upstream, path2)
+ with cwd(path1):
+ assert _get_commit_output(tempdir_factory)[0] == 0
+ assert _get_push_output(tempdir_factory)[0] == 0
+
+ with cwd(path2):
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ assert _get_commit_output(tempdir_factory, msg='force!')[0] == 0
+
+ retc, output = _get_push_output(tempdir_factory, opts=('--force',))
+ assert retc == 0
+ assert 'Bash hook' in output
+ assert 'Passed' in output
+
+
+def test_pre_push_new_upstream(tempdir_factory, store):
+ upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ upstream2 = git_dir(tempdir_factory)
+ path = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, path)
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ assert _get_commit_output(tempdir_factory)[0] == 0
+
+ cmd_output('git', 'remote', 'rename', 'origin', 'upstream')
+ cmd_output('git', 'remote', 'add', 'origin', upstream2)
+ retc, output = _get_push_output(tempdir_factory)
+ assert retc == 0
+ assert 'Bash hook' in output
+ assert 'Passed' in output
+
+
+def test_pre_push_environment_variables(tempdir_factory, store):
+ config = {
+ 'repo': 'local',
+ 'hooks': [
+ {
+ 'id': 'print-remote-info',
+ 'name': 'print remote info',
+ 'entry': 'bash -c "echo remote: $PRE_COMMIT_REMOTE_NAME"',
+ 'language': 'system',
+ 'verbose': True,
+ },
+ ],
+ }
+
+ upstream = git_dir(tempdir_factory)
+ clone = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, clone)
+ add_config_to_repo(clone, config)
+ with cwd(clone):
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+
+ cmd_output('git', 'remote', 'rename', 'origin', 'origin2')
+ retc, output = _get_push_output(tempdir_factory, remote='origin2')
+ assert retc == 0
+ assert '\nremote: origin2\n' in output
+
+
+def test_pre_push_integration_empty_push(tempdir_factory, store):
+ upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ path = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, path)
+ with cwd(path):
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ _get_push_output(tempdir_factory)
+ retc, output = _get_push_output(tempdir_factory)
+ assert output == 'Everything up-to-date\n'
+ assert retc == 0
+
+
+def test_pre_push_legacy(tempdir_factory, store):
+ upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ path = tempdir_factory.get()
+ cmd_output('git', 'clone', upstream, path)
+ with cwd(path):
+ os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
+ with open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f:
+ f.write(
+ '#!/usr/bin/env bash\n'
+ 'set -eu\n'
+ 'read lr ls rr rs\n'
+ 'test -n "$lr" -a -n "$ls" -a -n "$rr" -a -n "$rs"\n'
+ 'echo legacy\n',
+ )
+ make_executable(f.name)
+
+ install(C.CONFIG_FILE, store, hook_types=['pre-push'])
+ assert _get_commit_output(tempdir_factory)[0] == 0
+
+ retc, output = _get_push_output(tempdir_factory)
+ assert retc == 0
+ first_line, _, third_line = output.splitlines()[:3]
+ assert first_line == 'legacy'
+ assert third_line.startswith('Bash hook')
+ assert third_line.endswith('Passed')
+
+
+def test_commit_msg_integration_failing(
+ commit_msg_repo, tempdir_factory, store,
+):
+ install(C.CONFIG_FILE, store, hook_types=['commit-msg'])
+ retc, out = _get_commit_output(tempdir_factory)
+ assert retc == 1
+ assert out == '''\
+Must have "Signed off by:"...............................................Failed
+- hook id: must-have-signoff
+- exit code: 1
+'''
+
+
+def test_commit_msg_integration_passing(
+ commit_msg_repo, tempdir_factory, store,
+):
+ install(C.CONFIG_FILE, store, hook_types=['commit-msg'])
+ msg = 'Hi\nSigned off by: me, lol'
+ retc, out = _get_commit_output(tempdir_factory, msg=msg)
+ assert retc == 0
+ first_line = out.splitlines()[0]
+ assert first_line.startswith('Must have "Signed off by:"...')
+ assert first_line.endswith('...Passed')
+
+
+def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store):
+ hook_path = os.path.join(commit_msg_repo, '.git/hooks/commit-msg')
+ os.makedirs(os.path.dirname(hook_path), exist_ok=True)
+ with open(hook_path, 'w') as hook_file:
+ hook_file.write(
+ '#!/usr/bin/env bash\n'
+ 'set -eu\n'
+ 'test -e "$1"\n'
+ 'echo legacy\n',
+ )
+ make_executable(hook_path)
+
+ install(C.CONFIG_FILE, store, hook_types=['commit-msg'])
+
+ msg = 'Hi\nSigned off by: asottile'
+ retc, out = _get_commit_output(tempdir_factory, msg=msg)
+ assert retc == 0
+ first_line, second_line = out.splitlines()[:2]
+ assert first_line == 'legacy'
+ assert second_line.startswith('Must have "Signed off by:"...')
+
+
+def test_post_checkout_integration(tempdir_factory, store):
+ path = git_dir(tempdir_factory)
+ config = [
+ {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'post-checkout',
+ 'name': 'Post checkout',
+ 'entry': 'bash -c "echo ${PRE_COMMIT_TO_REF}"',
+ 'language': 'system',
+ 'always_run': True,
+ 'verbose': True,
+ 'stages': ['post-checkout'],
+ }],
+ },
+ {'repo': 'meta', 'hooks': [{'id': 'identity'}]},
+ ]
+ write_config(path, config)
+ with cwd(path):
+ cmd_output('git', 'add', '.')
+ git_commit()
+
+ # add a file only on `feature`, it should not be passed to hooks
+ cmd_output('git', 'checkout', '-b', 'feature')
+ open('some_file', 'a').close()
+ cmd_output('git', 'add', '.')
+ git_commit()
+ cmd_output('git', 'checkout', 'master')
+
+ install(C.CONFIG_FILE, store, hook_types=['post-checkout'])
+ retc, _, stderr = cmd_output('git', 'checkout', 'feature')
+ assert stderr is not None
+ assert retc == 0
+ assert git.head_rev(path) in stderr
+ assert 'some_file' not in stderr
+
+
+def test_prepare_commit_msg_integration_failing(
+ failing_prepare_commit_msg_repo, tempdir_factory, store,
+):
+ install(C.CONFIG_FILE, store, hook_types=['prepare-commit-msg'])
+ retc, out = _get_commit_output(tempdir_factory)
+ assert retc == 1
+ assert out == '''\
+Add "Signed off by:".....................................................Failed
+- hook id: add-signoff
+- exit code: 1
+'''
+
+
+def test_prepare_commit_msg_integration_passing(
+ prepare_commit_msg_repo, tempdir_factory, store,
+):
+ install(C.CONFIG_FILE, store, hook_types=['prepare-commit-msg'])
+ retc, out = _get_commit_output(tempdir_factory, msg='Hi')
+ assert retc == 0
+ first_line = out.splitlines()[0]
+ assert first_line.startswith('Add "Signed off by:"...')
+ assert first_line.endswith('...Passed')
+ commit_msg_path = os.path.join(
+ prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
+ )
+ with open(commit_msg_path) as f:
+ assert 'Signed off by: ' in f.read()
+
+
+def test_prepare_commit_msg_legacy(
+ prepare_commit_msg_repo, tempdir_factory, store,
+):
+ hook_path = os.path.join(
+ prepare_commit_msg_repo, '.git/hooks/prepare-commit-msg',
+ )
+ os.makedirs(os.path.dirname(hook_path), exist_ok=True)
+ with open(hook_path, 'w') as hook_file:
+ hook_file.write(
+ '#!/usr/bin/env bash\n'
+ 'set -eu\n'
+ 'test -e "$1"\n'
+ 'echo legacy\n',
+ )
+ make_executable(hook_path)
+
+ install(C.CONFIG_FILE, store, hook_types=['prepare-commit-msg'])
+
+ retc, out = _get_commit_output(tempdir_factory, msg='Hi')
+ assert retc == 0
+ first_line, second_line = out.splitlines()[:2]
+ assert first_line == 'legacy'
+ assert second_line.startswith('Add "Signed off by:"...')
+ commit_msg_path = os.path.join(
+ prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
+ )
+ with open(commit_msg_path) as f:
+ assert 'Signed off by: ' in f.read()
+
+
+def test_pre_merge_commit_integration(tempdir_factory, store):
+ expected = re.compile(
+ r'^\[INFO\] Initializing environment for .+\n'
+ r'Bash hook\.+Passed\n'
+ r"Merge made by the 'recursive' strategy.\n"
+ r' foo \| 0\n'
+ r' 1 file changed, 0 insertions\(\+\), 0 deletions\(-\)\n'
+ r' create mode 100644 foo\n$',
+ )
+
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ ret = install(C.CONFIG_FILE, store, hook_types=['pre-merge-commit'])
+ assert ret == 0
+
+ cmd_output('git', 'checkout', 'master', '-b', 'feature')
+ _get_commit_output(tempdir_factory)
+ cmd_output('git', 'checkout', 'master')
+ ret, output, _ = cmd_output_mocked_pre_commit_home(
+ 'git', 'merge', '--no-ff', '--no-edit', 'feature',
+ tempdir_factory=tempdir_factory,
+ )
+ assert ret == 0
+ assert expected.match(output)
+
+
+def test_install_disallow_missing_config(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ remove_config_from_repo(path)
+ ret = install(
+ C.CONFIG_FILE, store, hook_types=['pre-commit'],
+ overwrite=True, skip_on_missing_config=False,
+ )
+ assert ret == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 1
+
+
+def test_install_allow_missing_config(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ remove_config_from_repo(path)
+ ret = install(
+ C.CONFIG_FILE, store, hook_types=['pre-commit'],
+ overwrite=True, skip_on_missing_config=True,
+ )
+ assert ret == 0
+
+ ret, output = _get_commit_output(tempdir_factory)
+ assert ret == 0
+ expected = (
+ '`.pre-commit-config.yaml` config file not found. '
+ 'Skipping `pre-commit`.'
+ )
+ assert expected in output
+
+
+def test_install_temporarily_allow_mising_config(tempdir_factory, store):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(path):
+ remove_config_from_repo(path)
+ ret = install(
+ C.CONFIG_FILE, store, hook_types=['pre-commit'],
+ overwrite=True, skip_on_missing_config=False,
+ )
+ assert ret == 0
+
+ env = dict(os.environ, PRE_COMMIT_ALLOW_NO_CONFIG='1')
+ ret, output = _get_commit_output(tempdir_factory, env=env)
+ assert ret == 0
+ expected = (
+ '`.pre-commit-config.yaml` config file not found. '
+ 'Skipping `pre-commit`.'
+ )
+ assert expected in output
diff --git a/tests/commands/migrate_config_test.py b/tests/commands/migrate_config_test.py
new file mode 100644
index 0000000..efc0d1c
--- /dev/null
+++ b/tests/commands/migrate_config_test.py
@@ -0,0 +1,156 @@
+import pytest
+
+import pre_commit.constants as C
+from pre_commit.commands.migrate_config import _indent
+from pre_commit.commands.migrate_config import migrate_config
+
+
+@pytest.mark.parametrize(
+ ('s', 'expected'),
+ (
+ ('', ''),
+ ('a', ' a'),
+ ('foo\nbar', ' foo\n bar'),
+ ('foo\n\nbar\n', ' foo\n\n bar\n'),
+ ('\n\n\n', '\n\n\n'),
+ ),
+)
+def test_indent(s, expected):
+ assert _indent(s) == expected
+
+
+def test_migrate_config_normal_format(tmpdir, capsys):
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n',
+ )
+ with tmpdir.as_cwd():
+ assert not migrate_config(C.CONFIG_FILE)
+ out, _ = capsys.readouterr()
+ assert out == 'Configuration has been migrated.\n'
+ contents = cfg.read()
+ assert contents == (
+ 'repos:\n'
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n'
+ )
+
+
+def test_migrate_config_document_marker(tmpdir):
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(
+ '# comment\n'
+ '\n'
+ '---\n'
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n',
+ )
+ with tmpdir.as_cwd():
+ assert not migrate_config(C.CONFIG_FILE)
+ contents = cfg.read()
+ assert contents == (
+ '# comment\n'
+ '\n'
+ '---\n'
+ 'repos:\n'
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n'
+ )
+
+
+def test_migrate_config_list_literal(tmpdir):
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(
+ '[{\n'
+ ' repo: local,\n'
+ ' hooks: [{\n'
+ ' id: foo, name: foo, entry: ./bin/foo.sh,\n'
+ ' language: script,\n'
+ ' }]\n'
+ '}]',
+ )
+ with tmpdir.as_cwd():
+ assert not migrate_config(C.CONFIG_FILE)
+ contents = cfg.read()
+ assert contents == (
+ 'repos:\n'
+ ' [{\n'
+ ' repo: local,\n'
+ ' hooks: [{\n'
+ ' id: foo, name: foo, entry: ./bin/foo.sh,\n'
+ ' language: script,\n'
+ ' }]\n'
+ ' }]'
+ )
+
+
+def test_already_migrated_configuration_noop(tmpdir, capsys):
+ contents = (
+ 'repos:\n'
+ '- repo: local\n'
+ ' hooks:\n'
+ ' - id: foo\n'
+ ' name: foo\n'
+ ' entry: ./bin/foo.sh\n'
+ ' language: script\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(contents)
+ with tmpdir.as_cwd():
+ assert not migrate_config(C.CONFIG_FILE)
+ out, _ = capsys.readouterr()
+ assert out == 'Configuration is already migrated.\n'
+ assert cfg.read() == contents
+
+
+def test_migrate_config_sha_to_rev(tmpdir):
+ contents = (
+ 'repos:\n'
+ '- repo: https://github.com/pre-commit/pre-commit-hooks\n'
+ ' sha: v1.2.0\n'
+ ' hooks: []\n'
+ '- repo: https://github.com/pre-commit/pre-commit-hooks\n'
+ ' sha: v1.2.0\n'
+ ' hooks: []\n'
+ )
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(contents)
+ with tmpdir.as_cwd():
+ assert not migrate_config(C.CONFIG_FILE)
+ contents = cfg.read()
+ assert contents == (
+ 'repos:\n'
+ '- repo: https://github.com/pre-commit/pre-commit-hooks\n'
+ ' rev: v1.2.0\n'
+ ' hooks: []\n'
+ '- repo: https://github.com/pre-commit/pre-commit-hooks\n'
+ ' rev: v1.2.0\n'
+ ' hooks: []\n'
+ )
+
+
+@pytest.mark.parametrize('contents', ('', '\n'))
+def test_empty_configuration_file_user_error(tmpdir, contents):
+ cfg = tmpdir.join(C.CONFIG_FILE)
+ cfg.write(contents)
+ with tmpdir.as_cwd():
+ assert not migrate_config(C.CONFIG_FILE)
+ # even though the config is invalid, this should be a noop
+ assert cfg.read() == contents
diff --git a/tests/commands/run_test.py b/tests/commands/run_test.py
new file mode 100644
index 0000000..f8e8823
--- /dev/null
+++ b/tests/commands/run_test.py
@@ -0,0 +1,1012 @@
+import os.path
+import shlex
+import sys
+import time
+from unittest import mock
+
+import pytest
+
+import pre_commit.constants as C
+from pre_commit import color
+from pre_commit.commands.install_uninstall import install
+from pre_commit.commands.run import _compute_cols
+from pre_commit.commands.run import _full_msg
+from pre_commit.commands.run import _get_skips
+from pre_commit.commands.run import _has_unmerged_paths
+from pre_commit.commands.run import _start_msg
+from pre_commit.commands.run import Classifier
+from pre_commit.commands.run import filter_by_include_exclude
+from pre_commit.commands.run import run
+from pre_commit.util import cmd_output
+from pre_commit.util import EnvironT
+from pre_commit.util import make_executable
+from testing.auto_namedtuple import auto_namedtuple
+from testing.fixtures import add_config_to_repo
+from testing.fixtures import git_dir
+from testing.fixtures import make_consuming_repo
+from testing.fixtures import modify_config
+from testing.fixtures import read_config
+from testing.fixtures import sample_meta_config
+from testing.fixtures import write_config
+from testing.util import cmd_output_mocked_pre_commit_home
+from testing.util import cwd
+from testing.util import git_commit
+from testing.util import run_opts
+
+
+def test_start_msg():
+ ret = _start_msg(start='start', end_len=5, cols=15)
+ # 4 dots: 15 - 5 - 5 - 1
+ assert ret == 'start....'
+
+
+def test_full_msg():
+ ret = _full_msg(
+ start='start',
+ end_msg='end',
+ end_color='',
+ use_color=False,
+ cols=15,
+ )
+ # 6 dots: 15 - 5 - 3 - 1
+ assert ret == 'start......end\n'
+
+
+def test_full_msg_with_color():
+ ret = _full_msg(
+ start='start',
+ end_msg='end',
+ end_color=color.RED,
+ use_color=True,
+ cols=15,
+ )
+ # 6 dots: 15 - 5 - 3 - 1
+ assert ret == f'start......{color.RED}end{color.NORMAL}\n'
+
+
+def test_full_msg_with_postfix():
+ ret = _full_msg(
+ start='start',
+ postfix='post ',
+ end_msg='end',
+ end_color='',
+ use_color=False,
+ cols=20,
+ )
+ # 6 dots: 20 - 5 - 5 - 3 - 1
+ assert ret == 'start......post end\n'
+
+
+def test_full_msg_postfix_not_colored():
+ ret = _full_msg(
+ start='start',
+ postfix='post ',
+ end_msg='end',
+ end_color=color.RED,
+ use_color=True,
+ cols=20,
+ )
+ # 6 dots: 20 - 5 - 5 - 3 - 1
+ assert ret == f'start......post {color.RED}end{color.NORMAL}\n'
+
+
+@pytest.fixture
+def repo_with_passing_hook(tempdir_factory):
+ git_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(git_path):
+ yield git_path
+
+
+@pytest.fixture
+def repo_with_failing_hook(tempdir_factory):
+ git_path = make_consuming_repo(tempdir_factory, 'failing_hook_repo')
+ with cwd(git_path):
+ yield git_path
+
+
+@pytest.fixture
+def aliased_repo(tempdir_factory):
+ git_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(git_path):
+ with modify_config() as config:
+ config['repos'][0]['hooks'].append(
+ {'id': 'bash_hook', 'alias': 'foo_bash'},
+ )
+ stage_a_file()
+ yield git_path
+
+
+def stage_a_file(filename='foo.py'):
+ open(filename, 'a').close()
+ cmd_output('git', 'add', filename)
+
+
+def _do_run(cap_out, store, repo, args, environ={}, config_file=C.CONFIG_FILE):
+ with cwd(repo): # replicates `main._adjust_args_and_chdir` behaviour
+ ret = run(config_file, store, args, environ=environ)
+ printed = cap_out.get_bytes()
+ return ret, printed
+
+
+def _test_run(
+ cap_out, store, repo, opts, expected_outputs, expected_ret, stage,
+ config_file=C.CONFIG_FILE,
+):
+ if stage:
+ stage_a_file()
+ args = run_opts(**opts)
+ ret, printed = _do_run(cap_out, store, repo, args, config_file=config_file)
+
+ assert ret == expected_ret, (ret, expected_ret, printed)
+ for expected_output_part in expected_outputs:
+ assert expected_output_part in printed
+
+
+def test_run_all_hooks_failing(cap_out, store, repo_with_failing_hook):
+ _test_run(
+ cap_out,
+ store,
+ repo_with_failing_hook,
+ {},
+ (
+ b'Failing hook',
+ b'Failed',
+ b'hook id: failing_hook',
+ b'Fail\nfoo.py\n',
+ ),
+ expected_ret=1,
+ stage=True,
+ )
+
+
+def test_arbitrary_bytes_hook(cap_out, store, tempdir_factory):
+ git_path = make_consuming_repo(tempdir_factory, 'arbitrary_bytes_repo')
+ with cwd(git_path):
+ _test_run(
+ cap_out, store, git_path, {}, (b'\xe2\x98\x83\xb2\n',), 1, True,
+ )
+
+
+def test_hook_that_modifies_but_returns_zero(cap_out, store, tempdir_factory):
+ git_path = make_consuming_repo(
+ tempdir_factory, 'modified_file_returns_zero_repo',
+ )
+ with cwd(git_path):
+ stage_a_file('bar.py')
+ _test_run(
+ cap_out,
+ store,
+ git_path,
+ {},
+ (
+ # The first should fail
+ b'Failed',
+ # With a modified file (default message + the hook's output)
+ b'- files were modified by this hook\n\n'
+ b'Modified: foo.py',
+ # The next hook should pass despite the first modifying
+ b'Passed',
+ # The next hook should fail
+ b'Failed',
+ # bar.py was modified, but provides no additional output
+ b'- files were modified by this hook\n',
+ ),
+ 1,
+ True,
+ )
+
+
+def test_types_hook_repository(cap_out, store, tempdir_factory):
+ git_path = make_consuming_repo(tempdir_factory, 'types_repo')
+ with cwd(git_path):
+ stage_a_file('bar.py')
+ stage_a_file('bar.notpy')
+ ret, printed = _do_run(cap_out, store, git_path, run_opts())
+ assert ret == 1
+ assert b'bar.py' in printed
+ assert b'bar.notpy' not in printed
+
+
+def test_exclude_types_hook_repository(cap_out, store, tempdir_factory):
+ git_path = make_consuming_repo(tempdir_factory, 'exclude_types_repo')
+ with cwd(git_path):
+ with open('exe', 'w') as exe:
+ exe.write('#!/usr/bin/env python3\n')
+ make_executable('exe')
+ cmd_output('git', 'add', 'exe')
+ stage_a_file('bar.py')
+ ret, printed = _do_run(cap_out, store, git_path, run_opts())
+ assert ret == 1
+ assert b'bar.py' in printed
+ assert b'exe' not in printed
+
+
+def test_global_exclude(cap_out, store, in_git_dir):
+ config = {
+ 'exclude': r'^foo\.py$',
+ 'repos': [{'repo': 'meta', 'hooks': [{'id': 'identity'}]}],
+ }
+ write_config('.', config)
+ open('foo.py', 'a').close()
+ open('bar.py', 'a').close()
+ cmd_output('git', 'add', '.')
+ opts = run_opts(verbose=True)
+ ret, printed = _do_run(cap_out, store, str(in_git_dir), opts)
+ assert ret == 0
+ # Does not contain foo.py since it was excluded
+ assert printed.startswith(f'identity{"." * 65}Passed\n'.encode())
+ assert printed.endswith(b'\n\n.pre-commit-config.yaml\nbar.py\n\n')
+
+
+def test_global_files(cap_out, store, in_git_dir):
+ config = {
+ 'files': r'^bar\.py$',
+ 'repos': [{'repo': 'meta', 'hooks': [{'id': 'identity'}]}],
+ }
+ write_config('.', config)
+ open('foo.py', 'a').close()
+ open('bar.py', 'a').close()
+ cmd_output('git', 'add', '.')
+ opts = run_opts(verbose=True)
+ ret, printed = _do_run(cap_out, store, str(in_git_dir), opts)
+ assert ret == 0
+ # Does not contain foo.py since it was excluded
+ assert printed.startswith(f'identity{"." * 65}Passed\n'.encode())
+ assert printed.endswith(b'\n\nbar.py\n\n')
+
+
+@pytest.mark.parametrize(
+ ('t1', 't2', 'expected'),
+ (
+ (1.234, 2., b'\n- duration: 0.77s\n'),
+ (1., 1., b'\n- duration: 0s\n'),
+ ),
+)
+def test_verbose_duration(cap_out, store, in_git_dir, t1, t2, expected):
+ write_config('.', {'repo': 'meta', 'hooks': [{'id': 'identity'}]})
+ cmd_output('git', 'add', '.')
+ opts = run_opts(verbose=True)
+ with mock.patch.object(time, 'time', side_effect=(t1, t2)):
+ ret, printed = _do_run(cap_out, store, str(in_git_dir), opts)
+ assert ret == 0
+ assert expected in printed
+
+
+@pytest.mark.parametrize(
+ ('args', 'expected_out'),
+ [
+ (
+ {
+ 'show_diff_on_failure': True,
+ },
+ b'All changes made by hooks:',
+ ),
+ (
+ {
+ 'show_diff_on_failure': True,
+ 'color': True,
+ },
+ b'All changes made by hooks:',
+ ),
+ (
+ {
+ 'show_diff_on_failure': True,
+ 'all_files': True,
+ },
+ b'reproduce locally with: pre-commit run --all-files',
+ ),
+ ],
+)
+def test_show_diff_on_failure(
+ args,
+ expected_out,
+ capfd,
+ cap_out,
+ store,
+ tempdir_factory,
+):
+ git_path = make_consuming_repo(
+ tempdir_factory, 'modified_file_returns_zero_repo',
+ )
+ with cwd(git_path):
+ stage_a_file('bar.py')
+ _test_run(
+ cap_out, store, git_path, args,
+ # we're only testing the output after running
+ expected_out, 1, True,
+ )
+ out, _ = capfd.readouterr()
+ assert 'diff --git' in out
+
+
+@pytest.mark.parametrize(
+ ('options', 'outputs', 'expected_ret', 'stage'),
+ (
+ ({}, (b'Bash hook', b'Passed'), 0, True),
+ ({'verbose': True}, (b'foo.py\nHello World',), 0, True),
+ ({'hook': 'bash_hook'}, (b'Bash hook', b'Passed'), 0, True),
+ (
+ {'hook': 'nope'},
+ (b'No hook with id `nope` in stage `commit`',),
+ 1,
+ True,
+ ),
+ (
+ {'hook': 'nope', 'hook_stage': 'push'},
+ (b'No hook with id `nope` in stage `push`',),
+ 1,
+ True,
+ ),
+ (
+ {'all_files': True, 'verbose': True},
+ (b'foo.py',),
+ 0,
+ True,
+ ),
+ (
+ {'files': ('foo.py',), 'verbose': True},
+ (b'foo.py',),
+ 0,
+ True,
+ ),
+ ({}, (b'Bash hook', b'(no files to check)', b'Skipped'), 0, False),
+ ),
+)
+def test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ options,
+ outputs,
+ expected_ret,
+ stage,
+):
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ options,
+ outputs,
+ expected_ret,
+ stage,
+ )
+
+
+def test_run_output_logfile(cap_out, store, tempdir_factory):
+ expected_output = (
+ b'This is STDOUT output\n',
+ b'This is STDERR output\n',
+ )
+
+ git_path = make_consuming_repo(tempdir_factory, 'logfile_repo')
+ with cwd(git_path):
+ _test_run(
+ cap_out,
+ store,
+ git_path, {},
+ expected_output,
+ expected_ret=1,
+ stage=True,
+ )
+ logfile_path = os.path.join(git_path, 'test.log')
+ assert os.path.exists(logfile_path)
+ with open(logfile_path, 'rb') as logfile:
+ logfile_content = logfile.readlines()
+
+ for expected_output_part in expected_output:
+ assert expected_output_part in logfile_content
+
+
+def test_always_run(cap_out, store, repo_with_passing_hook):
+ with modify_config() as config:
+ config['repos'][0]['hooks'][0]['always_run'] = True
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ {},
+ (b'Bash hook', b'Passed'),
+ 0,
+ stage=False,
+ )
+
+
+def test_always_run_alt_config(cap_out, store, repo_with_passing_hook):
+ repo_root = '.'
+ config = read_config(repo_root)
+ config['repos'][0]['hooks'][0]['always_run'] = True
+ alt_config_file = 'alternate_config.yaml'
+ add_config_to_repo(repo_root, config, config_file=alt_config_file)
+
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ {},
+ (b'Bash hook', b'Passed'),
+ 0,
+ stage=False,
+ config_file=alt_config_file,
+ )
+
+
+def test_hook_verbose_enabled(cap_out, store, repo_with_passing_hook):
+ with modify_config() as config:
+ config['repos'][0]['hooks'][0]['always_run'] = True
+ config['repos'][0]['hooks'][0]['verbose'] = True
+
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ {},
+ (b'Hello World',),
+ 0,
+ stage=False,
+ )
+
+
+@pytest.mark.parametrize(
+ ('from_ref', 'to_ref'), (('master', ''), ('', 'master')),
+)
+def test_from_ref_to_ref_error_msg_error(
+ cap_out, store, repo_with_passing_hook, from_ref, to_ref,
+):
+ args = run_opts(from_ref=from_ref, to_ref=to_ref)
+ ret, printed = _do_run(cap_out, store, repo_with_passing_hook, args)
+ assert ret == 1
+ assert b'Specify both --from-ref and --to-ref.' in printed
+
+
+def test_all_push_options_ok(cap_out, store, repo_with_passing_hook):
+ args = run_opts(
+ from_ref='master', to_ref='master',
+ remote_name='origin', remote_url='https://example.com/repo',
+ )
+ ret, printed = _do_run(cap_out, store, repo_with_passing_hook, args)
+ assert ret == 0
+ assert b'Specify both --from-ref and --to-ref.' not in printed
+
+
+def test_checkout_type(cap_out, store, repo_with_passing_hook):
+ args = run_opts(from_ref='', to_ref='', checkout_type='1')
+ environ: EnvironT = {}
+ ret, printed = _do_run(
+ cap_out, store, repo_with_passing_hook, args, environ,
+ )
+ assert environ['PRE_COMMIT_CHECKOUT_TYPE'] == '1'
+
+
+def test_has_unmerged_paths(in_merge_conflict):
+ assert _has_unmerged_paths() is True
+ cmd_output('git', 'add', '.')
+ assert _has_unmerged_paths() is False
+
+
+def test_merge_conflict(cap_out, store, in_merge_conflict):
+ ret, printed = _do_run(cap_out, store, in_merge_conflict, run_opts())
+ assert ret == 1
+ assert b'Unmerged files. Resolve before committing.' in printed
+
+
+def test_merge_conflict_modified(cap_out, store, in_merge_conflict):
+ # Touch another file so we have unstaged non-conflicting things
+ assert os.path.exists('dummy')
+ with open('dummy', 'w') as dummy_file:
+ dummy_file.write('bar\nbaz\n')
+
+ ret, printed = _do_run(cap_out, store, in_merge_conflict, run_opts())
+ assert ret == 1
+ assert b'Unmerged files. Resolve before committing.' in printed
+
+
+def test_merge_conflict_resolved(cap_out, store, in_merge_conflict):
+ cmd_output('git', 'add', '.')
+ ret, printed = _do_run(cap_out, store, in_merge_conflict, run_opts())
+ for msg in (
+ b'Checking merge-conflict files only.', b'Bash hook', b'Passed',
+ ):
+ assert msg in printed
+
+
+@pytest.mark.parametrize(
+ ('hooks', 'expected'),
+ (
+ ([], 80),
+ ([auto_namedtuple(id='a', name='a' * 51)], 81),
+ (
+ [
+ auto_namedtuple(id='a', name='a' * 51),
+ auto_namedtuple(id='b', name='b' * 52),
+ ],
+ 82,
+ ),
+ ),
+)
+def test_compute_cols(hooks, expected):
+ assert _compute_cols(hooks) == expected
+
+
+@pytest.mark.parametrize(
+ ('environ', 'expected_output'),
+ (
+ ({}, set()),
+ ({'SKIP': ''}, set()),
+ ({'SKIP': ','}, set()),
+ ({'SKIP': ',foo'}, {'foo'}),
+ ({'SKIP': 'foo'}, {'foo'}),
+ ({'SKIP': 'foo,bar'}, {'foo', 'bar'}),
+ ({'SKIP': ' foo , bar'}, {'foo', 'bar'}),
+ ),
+)
+def test_get_skips(environ, expected_output):
+ ret = _get_skips(environ)
+ assert ret == expected_output
+
+
+def test_skip_hook(cap_out, store, repo_with_passing_hook):
+ ret, printed = _do_run(
+ cap_out, store, repo_with_passing_hook, run_opts(),
+ {'SKIP': 'bash_hook'},
+ )
+ for msg in (b'Bash hook', b'Skipped'):
+ assert msg in printed
+
+
+def test_skip_aliased_hook(cap_out, store, aliased_repo):
+ ret, printed = _do_run(
+ cap_out, store, aliased_repo,
+ run_opts(hook='foo_bash'),
+ {'SKIP': 'foo_bash'},
+ )
+ assert ret == 0
+ # Only the aliased hook runs and is skipped
+ for msg in (b'Bash hook', b'Skipped'):
+ assert printed.count(msg) == 1
+
+
+def test_hook_id_not_in_non_verbose_output(
+ cap_out, store, repo_with_passing_hook,
+):
+ ret, printed = _do_run(
+ cap_out, store, repo_with_passing_hook, run_opts(verbose=False),
+ )
+ assert b'[bash_hook]' not in printed
+
+
+def test_hook_id_in_verbose_output(cap_out, store, repo_with_passing_hook):
+ ret, printed = _do_run(
+ cap_out, store, repo_with_passing_hook, run_opts(verbose=True),
+ )
+ assert b'- hook id: bash_hook' in printed
+
+
+def test_multiple_hooks_same_id(cap_out, store, repo_with_passing_hook):
+ with cwd(repo_with_passing_hook):
+ # Add bash hook on there again
+ with modify_config() as config:
+ config['repos'][0]['hooks'].append({'id': 'bash_hook'})
+ stage_a_file()
+
+ ret, output = _do_run(cap_out, store, repo_with_passing_hook, run_opts())
+ assert ret == 0
+ assert output.count(b'Bash hook') == 2
+
+
+def test_aliased_hook_run(cap_out, store, aliased_repo):
+ ret, output = _do_run(
+ cap_out, store, aliased_repo,
+ run_opts(verbose=True, hook='bash_hook'),
+ )
+ assert ret == 0
+ # Both hooks will run since they share the same ID
+ assert output.count(b'Bash hook') == 2
+
+ ret, output = _do_run(
+ cap_out, store, aliased_repo,
+ run_opts(verbose=True, hook='foo_bash'),
+ )
+ assert ret == 0
+ # Only the aliased hook runs
+ assert output.count(b'Bash hook') == 1
+
+
+def test_non_ascii_hook_id(repo_with_passing_hook, tempdir_factory):
+ with cwd(repo_with_passing_hook):
+ _, stdout, _ = cmd_output_mocked_pre_commit_home(
+ sys.executable, '-m', 'pre_commit.main', 'run', 'β˜ƒ',
+ retcode=None, tempdir_factory=tempdir_factory,
+ )
+ assert 'UnicodeDecodeError' not in stdout
+ # Doesn't actually happen, but a reasonable assertion
+ assert 'UnicodeEncodeError' not in stdout
+
+
+def test_stdout_write_bug_py26(repo_with_failing_hook, store, tempdir_factory):
+ with cwd(repo_with_failing_hook):
+ with modify_config() as config:
+ config['repos'][0]['hooks'][0]['args'] = ['β˜ƒ']
+ stage_a_file()
+
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+
+ # Have to use subprocess because pytest monkeypatches sys.stdout
+ _, out = git_commit(
+ fn=cmd_output_mocked_pre_commit_home,
+ tempdir_factory=tempdir_factory,
+ retcode=None,
+ )
+ assert 'UnicodeEncodeError' not in out
+ # Doesn't actually happen, but a reasonable assertion
+ assert 'UnicodeDecodeError' not in out
+
+
+def test_lots_of_files(store, tempdir_factory):
+ # windows xargs seems to have a bug, here's a regression test for
+ # our workaround
+ git_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ with cwd(git_path):
+ # Override files so we run against them
+ with modify_config() as config:
+ config['repos'][0]['hooks'][0]['files'] = ''
+
+ # Write a crap ton of files
+ for i in range(400):
+ open(f'{"a" * 100}{i}', 'w').close()
+
+ cmd_output('git', 'add', '.')
+ install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
+
+ git_commit(
+ fn=cmd_output_mocked_pre_commit_home,
+ tempdir_factory=tempdir_factory,
+ )
+
+
+def test_stages(cap_out, store, repo_with_passing_hook):
+ config = {
+ 'repo': 'local',
+ 'hooks': [
+ {
+ 'id': f'do-not-commit-{i}',
+ 'name': f'hook {i}',
+ 'entry': 'DO NOT COMMIT',
+ 'language': 'pygrep',
+ 'stages': [stage],
+ }
+ for i, stage in enumerate(('commit', 'push', 'manual'), 1)
+ ],
+ }
+ add_config_to_repo(repo_with_passing_hook, config)
+
+ stage_a_file()
+
+ def _run_for_stage(stage):
+ args = run_opts(hook_stage=stage)
+ ret, printed = _do_run(cap_out, store, repo_with_passing_hook, args)
+ assert not ret, (ret, printed)
+ # this test should only run one hook
+ assert printed.count(b'hook ') == 1
+ return printed
+
+ assert _run_for_stage('commit').startswith(b'hook 1...')
+ assert _run_for_stage('push').startswith(b'hook 2...')
+ assert _run_for_stage('manual').startswith(b'hook 3...')
+
+
+def test_commit_msg_hook(cap_out, store, commit_msg_repo):
+ filename = '.git/COMMIT_EDITMSG'
+ with open(filename, 'w') as f:
+ f.write('This is the commit message')
+
+ _test_run(
+ cap_out,
+ store,
+ commit_msg_repo,
+ {'hook_stage': 'commit-msg', 'commit_msg_filename': filename},
+ expected_outputs=[b'Must have "Signed off by:"', b'Failed'],
+ expected_ret=1,
+ stage=False,
+ )
+
+
+def test_post_checkout_hook(cap_out, store, tempdir_factory):
+ path = git_dir(tempdir_factory)
+ config = {
+ 'repo': 'meta', 'hooks': [
+ {'id': 'identity', 'stages': ['post-checkout']},
+ ],
+ }
+ add_config_to_repo(path, config)
+
+ with cwd(path):
+ _test_run(
+ cap_out,
+ store,
+ path,
+ {'hook_stage': 'post-checkout'},
+ expected_outputs=[b'identity...'],
+ expected_ret=0,
+ stage=False,
+ )
+
+
+def test_prepare_commit_msg_hook(cap_out, store, prepare_commit_msg_repo):
+ filename = '.git/COMMIT_EDITMSG'
+ with open(filename, 'w') as f:
+ f.write('This is the commit message')
+
+ _test_run(
+ cap_out,
+ store,
+ prepare_commit_msg_repo,
+ {'hook_stage': 'prepare-commit-msg', 'commit_msg_filename': filename},
+ expected_outputs=[b'Add "Signed off by:"', b'Passed'],
+ expected_ret=0,
+ stage=False,
+ )
+
+ with open(filename) as f:
+ assert 'Signed off by: ' in f.read()
+
+
+def test_local_hook_passes(cap_out, store, repo_with_passing_hook):
+ config = {
+ 'repo': 'local',
+ 'hooks': [
+ {
+ 'id': 'identity-copy',
+ 'name': 'identity-copy',
+ 'entry': '{} -m pre_commit.meta_hooks.identity'.format(
+ shlex.quote(sys.executable),
+ ),
+ 'language': 'system',
+ 'files': r'\.py$',
+ },
+ {
+ 'id': 'do_not_commit',
+ 'name': 'Block if "DO NOT COMMIT" is found',
+ 'entry': 'DO NOT COMMIT',
+ 'language': 'pygrep',
+ },
+ ],
+ }
+ add_config_to_repo(repo_with_passing_hook, config)
+
+ with open('dummy.py', 'w') as staged_file:
+ staged_file.write('"""TODO: something"""\n')
+ cmd_output('git', 'add', 'dummy.py')
+
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ opts={},
+ expected_outputs=[b''],
+ expected_ret=0,
+ stage=False,
+ )
+
+
+def test_local_hook_fails(cap_out, store, repo_with_passing_hook):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'no-todo',
+ 'name': 'No TODO',
+ 'entry': 'sh -c "! grep -iI todo $@" --',
+ 'language': 'system',
+ }],
+ }
+ add_config_to_repo(repo_with_passing_hook, config)
+
+ with open('dummy.py', 'w') as staged_file:
+ staged_file.write('"""TODO: something"""\n')
+ cmd_output('git', 'add', 'dummy.py')
+
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ opts={},
+ expected_outputs=[b''],
+ expected_ret=1,
+ stage=False,
+ )
+
+
+def test_meta_hook_passes(cap_out, store, repo_with_passing_hook):
+ add_config_to_repo(repo_with_passing_hook, sample_meta_config())
+
+ _test_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ opts={},
+ expected_outputs=[b'Check for useless excludes'],
+ expected_ret=0,
+ stage=False,
+ )
+
+
+@pytest.fixture
+def modified_config_repo(repo_with_passing_hook):
+ with modify_config(repo_with_passing_hook, commit=False) as config:
+ # Some minor modification
+ config['repos'][0]['hooks'][0]['files'] = ''
+ yield repo_with_passing_hook
+
+
+def test_error_with_unstaged_config(cap_out, store, modified_config_repo):
+ args = run_opts()
+ ret, printed = _do_run(cap_out, store, modified_config_repo, args)
+ assert b'Your pre-commit configuration is unstaged.' in printed
+ assert ret == 1
+
+
+def test_commit_msg_missing_filename(cap_out, store, repo_with_passing_hook):
+ args = run_opts(hook_stage='commit-msg')
+ ret, printed = _do_run(cap_out, store, repo_with_passing_hook, args)
+ assert ret == 1
+ assert printed == (
+ b'[ERROR] `--commit-msg-filename` is required for '
+ b'`--hook-stage commit-msg`\n'
+ )
+
+
+@pytest.mark.parametrize(
+ 'opts', (run_opts(all_files=True), run_opts(files=[C.CONFIG_FILE])),
+)
+def test_no_unstaged_error_with_all_files_or_files(
+ cap_out, store, modified_config_repo, opts,
+):
+ ret, printed = _do_run(cap_out, store, modified_config_repo, opts)
+ assert b'Your pre-commit configuration is unstaged.' not in printed
+
+
+def test_files_running_subdir(repo_with_passing_hook, tempdir_factory):
+ with cwd(repo_with_passing_hook):
+ os.mkdir('subdir')
+ open('subdir/foo.py', 'w').close()
+ cmd_output('git', 'add', 'subdir/foo.py')
+
+ with cwd('subdir'):
+ # Use subprocess to demonstrate behaviour in main
+ _, stdout, _ = cmd_output_mocked_pre_commit_home(
+ sys.executable, '-m', 'pre_commit.main', 'run', '-v',
+ # Files relative to where we are (#339)
+ '--files', 'foo.py',
+ tempdir_factory=tempdir_factory,
+ )
+ assert 'subdir/foo.py' in stdout
+
+
+@pytest.mark.parametrize(
+ ('pass_filenames', 'hook_args', 'expected_out'),
+ (
+ (True, [], b'foo.py'),
+ (False, [], b''),
+ (True, ['some', 'args'], b'some args foo.py'),
+ (False, ['some', 'args'], b'some args'),
+ ),
+)
+def test_pass_filenames(
+ cap_out, store, repo_with_passing_hook,
+ pass_filenames, hook_args, expected_out,
+):
+ with modify_config() as config:
+ config['repos'][0]['hooks'][0]['pass_filenames'] = pass_filenames
+ config['repos'][0]['hooks'][0]['args'] = hook_args
+ stage_a_file()
+ ret, printed = _do_run(
+ cap_out, store, repo_with_passing_hook, run_opts(verbose=True),
+ )
+ assert expected_out + b'\nHello World' in printed
+ assert (b'foo.py' in printed) == pass_filenames
+
+
+def test_fail_fast(cap_out, store, repo_with_failing_hook):
+ with modify_config() as config:
+ # More than one hook
+ config['fail_fast'] = True
+ config['repos'][0]['hooks'] *= 2
+ stage_a_file()
+
+ ret, printed = _do_run(cap_out, store, repo_with_failing_hook, run_opts())
+ # it should have only run one hook
+ assert printed.count(b'Failing hook') == 1
+
+
+def test_classifier_removes_dne():
+ classifier = Classifier(('this_file_does_not_exist',))
+ assert classifier.filenames == []
+
+
+def test_classifier_normalizes_filenames_on_windows_to_forward_slashes(tmpdir):
+ with tmpdir.as_cwd():
+ tmpdir.join('a/b/c').ensure()
+ with mock.patch.object(os, 'altsep', '/'):
+ with mock.patch.object(os, 'sep', '\\'):
+ classifier = Classifier((r'a\b\c',))
+ assert classifier.filenames == ['a/b/c']
+
+
+def test_classifier_does_not_normalize_backslashes_non_windows(tmpdir):
+ with mock.patch.object(os.path, 'lexists', return_value=True):
+ with mock.patch.object(os, 'altsep', None):
+ with mock.patch.object(os, 'sep', '/'):
+ classifier = Classifier((r'a/b\c',))
+ assert classifier.filenames == [r'a/b\c']
+
+
+@pytest.fixture
+def some_filenames():
+ return (
+ '.pre-commit-hooks.yaml',
+ 'pre_commit/git.py',
+ 'pre_commit/main.py',
+ )
+
+
+def test_include_exclude_base_case(some_filenames):
+ ret = filter_by_include_exclude(some_filenames, '', '^$')
+ assert ret == [
+ '.pre-commit-hooks.yaml',
+ 'pre_commit/git.py',
+ 'pre_commit/main.py',
+ ]
+
+
+def test_matches_broken_symlink(tmpdir):
+ with tmpdir.as_cwd():
+ os.symlink('does-not-exist', 'link')
+ ret = filter_by_include_exclude({'link'}, '', '^$')
+ assert ret == ['link']
+
+
+def test_include_exclude_total_match(some_filenames):
+ ret = filter_by_include_exclude(some_filenames, r'^.*\.py$', '^$')
+ assert ret == ['pre_commit/git.py', 'pre_commit/main.py']
+
+
+def test_include_exclude_does_search_instead_of_match(some_filenames):
+ ret = filter_by_include_exclude(some_filenames, r'\.yaml$', '^$')
+ assert ret == ['.pre-commit-hooks.yaml']
+
+
+def test_include_exclude_exclude_removes_files(some_filenames):
+ ret = filter_by_include_exclude(some_filenames, '', r'\.py$')
+ assert ret == ['.pre-commit-hooks.yaml']
+
+
+def test_args_hook_only(cap_out, store, repo_with_passing_hook):
+ config = {
+ 'repo': 'local',
+ 'hooks': [
+ {
+ 'id': 'identity-copy',
+ 'name': 'identity-copy',
+ 'entry': '{} -m pre_commit.meta_hooks.identity'.format(
+ shlex.quote(sys.executable),
+ ),
+ 'language': 'system',
+ 'files': r'\.py$',
+ 'stages': ['commit'],
+ },
+ {
+ 'id': 'do_not_commit',
+ 'name': 'Block if "DO NOT COMMIT" is found',
+ 'entry': 'DO NOT COMMIT',
+ 'language': 'pygrep',
+ },
+ ],
+ }
+ add_config_to_repo(repo_with_passing_hook, config)
+ stage_a_file()
+ ret, printed = _do_run(
+ cap_out,
+ store,
+ repo_with_passing_hook,
+ run_opts(hook='do_not_commit'),
+ )
+ assert b'identity-copy' not in printed
diff --git a/tests/commands/sample_config_test.py b/tests/commands/sample_config_test.py
new file mode 100644
index 0000000..11c0876
--- /dev/null
+++ b/tests/commands/sample_config_test.py
@@ -0,0 +1,19 @@
+from pre_commit.commands.sample_config import sample_config
+
+
+def test_sample_config(capsys):
+ ret = sample_config()
+ assert ret == 0
+ out, _ = capsys.readouterr()
+ assert out == '''\
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v2.4.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ - id: check-added-large-files
+'''
diff --git a/tests/commands/try_repo_test.py b/tests/commands/try_repo_test.py
new file mode 100644
index 0000000..d3ec3fd
--- /dev/null
+++ b/tests/commands/try_repo_test.py
@@ -0,0 +1,151 @@
+import os.path
+import re
+import time
+from unittest import mock
+
+from pre_commit import git
+from pre_commit.commands.try_repo import try_repo
+from pre_commit.util import cmd_output
+from testing.auto_namedtuple import auto_namedtuple
+from testing.fixtures import git_dir
+from testing.fixtures import make_repo
+from testing.fixtures import modify_manifest
+from testing.util import cwd
+from testing.util import git_commit
+from testing.util import run_opts
+
+
+def try_repo_opts(repo, ref=None, **kwargs):
+ return auto_namedtuple(repo=repo, ref=ref, **run_opts(**kwargs)._asdict())
+
+
+def _get_out(cap_out):
+ out = re.sub(r'\[INFO\].+\n', '', cap_out.get())
+ start, using_config, config, rest = out.split(f'{"=" * 79}\n')
+ assert using_config == 'Using config:\n'
+ return start, config, rest
+
+
+def _add_test_file():
+ open('test-file', 'a').close()
+ cmd_output('git', 'add', '.')
+
+
+def _run_try_repo(tempdir_factory, **kwargs):
+ repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
+ with cwd(git_dir(tempdir_factory)):
+ _add_test_file()
+ assert not try_repo(try_repo_opts(repo, **kwargs))
+
+
+def test_try_repo_repo_only(cap_out, tempdir_factory):
+ with mock.patch.object(time, 'time', return_value=0.0):
+ _run_try_repo(tempdir_factory, verbose=True)
+ start, config, rest = _get_out(cap_out)
+ assert start == ''
+ assert re.match(
+ '^repos:\n'
+ '- repo: .+\n'
+ ' rev: .+\n'
+ ' hooks:\n'
+ ' - id: bash_hook\n'
+ ' - id: bash_hook2\n'
+ ' - id: bash_hook3\n$',
+ config,
+ )
+ assert rest == '''\
+Bash hook............................................(no files to check)Skipped
+- hook id: bash_hook
+Bash hook................................................................Passed
+- hook id: bash_hook2
+- duration: 0s
+
+test-file
+
+Bash hook............................................(no files to check)Skipped
+- hook id: bash_hook3
+'''
+
+
+def test_try_repo_with_specific_hook(cap_out, tempdir_factory):
+ _run_try_repo(tempdir_factory, hook='bash_hook', verbose=True)
+ start, config, rest = _get_out(cap_out)
+ assert start == ''
+ assert re.match(
+ '^repos:\n'
+ '- repo: .+\n'
+ ' rev: .+\n'
+ ' hooks:\n'
+ ' - id: bash_hook\n$',
+ config,
+ )
+ assert rest == '''\
+Bash hook............................................(no files to check)Skipped
+- hook id: bash_hook
+'''
+
+
+def test_try_repo_relative_path(cap_out, tempdir_factory):
+ repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
+ with cwd(git_dir(tempdir_factory)):
+ _add_test_file()
+ relative_repo = os.path.relpath(repo, '.')
+ # previously crashed on cloning a relative path
+ assert not try_repo(try_repo_opts(relative_repo, hook='bash_hook'))
+
+
+def test_try_repo_bare_repo(cap_out, tempdir_factory):
+ repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
+ with cwd(git_dir(tempdir_factory)):
+ _add_test_file()
+ bare_repo = os.path.join(repo, '.git')
+ # previously crashed attempting modification changes
+ assert not try_repo(try_repo_opts(bare_repo, hook='bash_hook'))
+
+
+def test_try_repo_specific_revision(cap_out, tempdir_factory):
+ repo = make_repo(tempdir_factory, 'script_hooks_repo')
+ ref = git.head_rev(repo)
+ git_commit(cwd=repo)
+ with cwd(git_dir(tempdir_factory)):
+ _add_test_file()
+ assert not try_repo(try_repo_opts(repo, ref=ref))
+
+ _, config, _ = _get_out(cap_out)
+ assert ref in config
+
+
+def test_try_repo_uncommitted_changes(cap_out, tempdir_factory):
+ repo = make_repo(tempdir_factory, 'script_hooks_repo')
+ # make an uncommitted change
+ with modify_manifest(repo, commit=False) as manifest:
+ manifest[0]['name'] = 'modified name!'
+
+ with cwd(git_dir(tempdir_factory)):
+ open('test-fie', 'a').close()
+ cmd_output('git', 'add', '.')
+ assert not try_repo(try_repo_opts(repo))
+
+ start, config, rest = _get_out(cap_out)
+ assert start == '[WARNING] Creating temporary repo with uncommitted changes...\n' # noqa: E501
+ assert re.match(
+ '^repos:\n'
+ '- repo: .+shadow-repo\n'
+ ' rev: .+\n'
+ ' hooks:\n'
+ ' - id: bash_hook\n$',
+ config,
+ )
+ assert rest == 'modified name!...........................................................Passed\n' # noqa: E501
+
+
+def test_try_repo_staged_changes(tempdir_factory):
+ repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
+
+ with cwd(repo):
+ open('staged-file', 'a').close()
+ open('second-staged-file', 'a').close()
+ cmd_output('git', 'add', '.')
+
+ with cwd(git_dir(tempdir_factory)):
+ assert not try_repo(try_repo_opts(repo, hook='bash_hook'))
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..335d261
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,277 @@
+import functools
+import io
+import logging
+import os.path
+from unittest import mock
+
+import pytest
+
+from pre_commit import output
+from pre_commit.envcontext import envcontext
+from pre_commit.logging_handler import logging_handler
+from pre_commit.store import Store
+from pre_commit.util import cmd_output
+from pre_commit.util import make_executable
+from testing.fixtures import git_dir
+from testing.fixtures import make_consuming_repo
+from testing.fixtures import write_config
+from testing.util import cwd
+from testing.util import git_commit
+
+
+@pytest.fixture(autouse=True)
+def no_warnings(recwarn):
+ yield
+ warnings = []
+ for warning in recwarn: # pragma: no cover
+ message = str(warning.message)
+ # ImportWarning: Not importing directory '...' missing __init__(.py)
+ if not (
+ isinstance(warning.message, ImportWarning) and
+ message.startswith('Not importing directory ') and
+ ' missing __init__' in message
+ ):
+ warnings.append(
+ f'{warning.filename}:{warning.lineno} {message}',
+ )
+ assert not warnings
+
+
+@pytest.fixture
+def tempdir_factory(tmpdir):
+ class TmpdirFactory:
+ def __init__(self):
+ self.tmpdir_count = 0
+
+ def get(self):
+ path = tmpdir.join(str(self.tmpdir_count)).strpath
+ self.tmpdir_count += 1
+ os.mkdir(path)
+ return path
+
+ yield TmpdirFactory()
+
+
+@pytest.fixture
+def in_tmpdir(tempdir_factory):
+ path = tempdir_factory.get()
+ with cwd(path):
+ yield path
+
+
+@pytest.fixture
+def in_git_dir(tmpdir):
+ repo = tmpdir.join('repo').ensure_dir()
+ with repo.as_cwd():
+ cmd_output('git', 'init')
+ yield repo
+
+
+def _make_conflict():
+ cmd_output('git', 'checkout', 'origin/master', '-b', 'foo')
+ with open('conflict_file', 'w') as conflict_file:
+ conflict_file.write('herp\nderp\n')
+ cmd_output('git', 'add', 'conflict_file')
+ with open('foo_only_file', 'w') as foo_only_file:
+ foo_only_file.write('foo')
+ cmd_output('git', 'add', 'foo_only_file')
+ git_commit(msg=_make_conflict.__name__)
+ cmd_output('git', 'checkout', 'origin/master', '-b', 'bar')
+ with open('conflict_file', 'w') as conflict_file:
+ conflict_file.write('harp\nddrp\n')
+ cmd_output('git', 'add', 'conflict_file')
+ with open('bar_only_file', 'w') as bar_only_file:
+ bar_only_file.write('bar')
+ cmd_output('git', 'add', 'bar_only_file')
+ git_commit(msg=_make_conflict.__name__)
+ cmd_output('git', 'merge', 'foo', retcode=None)
+
+
+@pytest.fixture
+def in_merge_conflict(tempdir_factory):
+ path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
+ open(os.path.join(path, 'dummy'), 'a').close()
+ cmd_output('git', 'add', 'dummy', cwd=path)
+ git_commit(msg=in_merge_conflict.__name__, cwd=path)
+
+ conflict_path = tempdir_factory.get()
+ cmd_output('git', 'clone', path, conflict_path)
+ with cwd(conflict_path):
+ _make_conflict()
+ yield os.path.join(conflict_path)
+
+
+@pytest.fixture
+def in_conflicting_submodule(tempdir_factory):
+ git_dir_1 = git_dir(tempdir_factory)
+ git_dir_2 = git_dir(tempdir_factory)
+ git_commit(msg=in_conflicting_submodule.__name__, cwd=git_dir_2)
+ cmd_output('git', 'submodule', 'add', git_dir_2, 'sub', cwd=git_dir_1)
+ with cwd(os.path.join(git_dir_1, 'sub')):
+ _make_conflict()
+ yield
+
+
+@pytest.fixture
+def commit_msg_repo(tempdir_factory):
+ path = git_dir(tempdir_factory)
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'must-have-signoff',
+ 'name': 'Must have "Signed off by:"',
+ 'entry': 'grep -q "Signed off by:"',
+ 'language': 'system',
+ 'stages': ['commit-msg'],
+ }],
+ }
+ write_config(path, config)
+ with cwd(path):
+ cmd_output('git', 'add', '.')
+ git_commit(msg=commit_msg_repo.__name__)
+ yield path
+
+
+@pytest.fixture
+def prepare_commit_msg_repo(tempdir_factory):
+ path = git_dir(tempdir_factory)
+ script_name = 'add_sign_off.sh'
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'add-signoff',
+ 'name': 'Add "Signed off by:"',
+ 'entry': f'./{script_name}',
+ 'language': 'script',
+ 'stages': ['prepare-commit-msg'],
+ }],
+ }
+ write_config(path, config)
+ with cwd(path):
+ with open(script_name, 'w') as script_file:
+ script_file.write(
+ '#!/usr/bin/env bash\n'
+ 'set -eu\n'
+ 'echo "\nSigned off by: " >> "$1"\n',
+ )
+ make_executable(script_name)
+ cmd_output('git', 'add', '.')
+ git_commit(msg=prepare_commit_msg_repo.__name__)
+ yield path
+
+
+@pytest.fixture
+def failing_prepare_commit_msg_repo(tempdir_factory):
+ path = git_dir(tempdir_factory)
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'add-signoff',
+ 'name': 'Add "Signed off by:"',
+ 'entry': 'bash -c "exit 1"',
+ 'language': 'system',
+ 'stages': ['prepare-commit-msg'],
+ }],
+ }
+ write_config(path, config)
+ with cwd(path):
+ cmd_output('git', 'add', '.')
+ git_commit(msg=failing_prepare_commit_msg_repo.__name__)
+ yield path
+
+
+@pytest.fixture(autouse=True, scope='session')
+def dont_write_to_home_directory():
+ """pre_commit.store.Store will by default write to the home directory
+ We'll mock out `Store.get_default_directory` to raise invariantly so we
+ don't construct a `Store` object that writes to our home directory.
+ """
+ class YouForgotToExplicitlyChooseAStoreDirectory(AssertionError):
+ pass
+
+ with mock.patch.object(
+ Store,
+ 'get_default_directory',
+ side_effect=YouForgotToExplicitlyChooseAStoreDirectory,
+ ):
+ yield
+
+
+@pytest.fixture(autouse=True, scope='session')
+def configure_logging():
+ with logging_handler(use_color=False):
+ yield
+
+
+@pytest.fixture
+def mock_store_dir(tempdir_factory):
+ tmpdir = tempdir_factory.get()
+ with mock.patch.object(
+ Store,
+ 'get_default_directory',
+ return_value=tmpdir,
+ ):
+ yield tmpdir
+
+
+@pytest.fixture
+def store(tempdir_factory):
+ yield Store(os.path.join(tempdir_factory.get(), '.pre-commit'))
+
+
+@pytest.fixture
+def log_info_mock():
+ with mock.patch.object(logging.getLogger('pre_commit'), 'info') as mck:
+ yield mck
+
+
+class FakeStream:
+ def __init__(self):
+ self.data = io.BytesIO()
+
+ def write(self, s):
+ self.data.write(s)
+
+ def flush(self):
+ pass
+
+
+class Fixture:
+ def __init__(self, stream):
+ self._stream = stream
+
+ def get_bytes(self):
+ """Get the output as-if no encoding occurred"""
+ data = self._stream.data.getvalue()
+ self._stream.data.seek(0)
+ self._stream.data.truncate()
+ return data.replace(b'\r\n', b'\n')
+
+ def get(self):
+ """Get the output assuming it was written as UTF-8 bytes"""
+ return self.get_bytes().decode()
+
+
+@pytest.fixture
+def cap_out():
+ stream = FakeStream()
+ write = functools.partial(output.write, stream=stream)
+ write_line_b = functools.partial(output.write_line_b, stream=stream)
+ with mock.patch.multiple(output, write=write, write_line_b=write_line_b):
+ yield Fixture(stream)
+
+
+@pytest.fixture
+def fake_log_handler():
+ handler = mock.Mock(level=logging.INFO)
+ logger = logging.getLogger('pre_commit')
+ logger.addHandler(handler)
+ yield handler
+ logger.removeHandler(handler)
+
+
+@pytest.fixture(scope='session', autouse=True)
+def set_git_templatedir(tmpdir_factory):
+ tdir = str(tmpdir_factory.mktemp('git_template_dir'))
+ with envcontext((('GIT_TEMPLATE_DIR', tdir),)):
+ yield
diff --git a/tests/envcontext_test.py b/tests/envcontext_test.py
new file mode 100644
index 0000000..f9d4dce
--- /dev/null
+++ b/tests/envcontext_test.py
@@ -0,0 +1,101 @@
+import os
+from unittest import mock
+
+import pytest
+
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import UNSET
+from pre_commit.envcontext import Var
+
+
+def _test(*, before, patch, expected):
+ env = before.copy()
+ with envcontext(patch, _env=env):
+ assert env == expected
+ assert env == before
+
+
+def test_trivial():
+ _test(before={}, patch={}, expected={})
+
+
+def test_noop():
+ _test(before={'foo': 'bar'}, patch=(), expected={'foo': 'bar'})
+
+
+def test_adds():
+ _test(before={}, patch=[('foo', 'bar')], expected={'foo': 'bar'})
+
+
+def test_overrides():
+ _test(
+ before={'foo': 'baz'},
+ patch=[('foo', 'bar')],
+ expected={'foo': 'bar'},
+ )
+
+
+def test_unset_but_nothing_to_unset():
+ _test(before={}, patch=[('foo', UNSET)], expected={})
+
+
+def test_unset_things_to_remove():
+ _test(
+ before={'PYTHONHOME': ''},
+ patch=[('PYTHONHOME', UNSET)],
+ expected={},
+ )
+
+
+def test_templated_environment_variable_missing():
+ _test(
+ before={},
+ patch=[('PATH', ('~/bin:', Var('PATH')))],
+ expected={'PATH': '~/bin:'},
+ )
+
+
+def test_templated_environment_variable_defaults():
+ _test(
+ before={},
+ patch=[('PATH', ('~/bin:', Var('PATH', default='/bin')))],
+ expected={'PATH': '~/bin:/bin'},
+ )
+
+
+def test_templated_environment_variable_there():
+ _test(
+ before={'PATH': '/usr/local/bin:/usr/bin'},
+ patch=[('PATH', ('~/bin:', Var('PATH')))],
+ expected={'PATH': '~/bin:/usr/local/bin:/usr/bin'},
+ )
+
+
+def test_templated_environ_sources_from_previous():
+ _test(
+ before={'foo': 'bar'},
+ patch=(
+ ('foo', 'baz'),
+ ('herp', ('foo: ', Var('foo'))),
+ ),
+ expected={'foo': 'baz', 'herp': 'foo: bar'},
+ )
+
+
+def test_exception_safety():
+ class MyError(RuntimeError):
+ pass
+
+ env = {'hello': 'world'}
+ with pytest.raises(MyError):
+ with envcontext((('foo', 'bar'),), _env=env):
+ raise MyError()
+ assert env == {'hello': 'world'}
+
+
+def test_integration_os_environ():
+ with mock.patch.dict(os.environ, {'FOO': 'bar'}, clear=True):
+ assert os.environ == {'FOO': 'bar'}
+ with envcontext((('HERP', 'derp'),)):
+ assert os.environ == {'FOO': 'bar', 'HERP': 'derp'}
+ assert os.environ == {'FOO': 'bar'}
diff --git a/tests/error_handler_test.py b/tests/error_handler_test.py
new file mode 100644
index 0000000..833bb8f
--- /dev/null
+++ b/tests/error_handler_test.py
@@ -0,0 +1,170 @@
+import os.path
+import re
+import sys
+from unittest import mock
+
+import pytest
+
+from pre_commit import error_handler
+from pre_commit.util import CalledProcessError
+from testing.util import cmd_output_mocked_pre_commit_home
+
+
+@pytest.fixture
+def mocked_log_and_exit():
+ with mock.patch.object(error_handler, '_log_and_exit') as log_and_exit:
+ yield log_and_exit
+
+
+def test_error_handler_no_exception(mocked_log_and_exit):
+ with error_handler.error_handler():
+ pass
+ assert mocked_log_and_exit.call_count == 0
+
+
+def test_error_handler_fatal_error(mocked_log_and_exit):
+ exc = error_handler.FatalError('just a test')
+ with error_handler.error_handler():
+ raise exc
+
+ mocked_log_and_exit.assert_called_once_with(
+ 'An error has occurred',
+ exc,
+ # Tested below
+ mock.ANY,
+ )
+
+ assert re.match(
+ r'Traceback \(most recent call last\):\n'
+ r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
+ r' yield\n'
+ r' File ".+tests.error_handler_test.py", line \d+, '
+ r'in test_error_handler_fatal_error\n'
+ r' raise exc\n'
+ r'(pre_commit\.error_handler\.)?FatalError: just a test\n',
+ mocked_log_and_exit.call_args[0][2],
+ )
+
+
+def test_error_handler_uncaught_error(mocked_log_and_exit):
+ exc = ValueError('another test')
+ with error_handler.error_handler():
+ raise exc
+
+ mocked_log_and_exit.assert_called_once_with(
+ 'An unexpected error has occurred',
+ exc,
+ # Tested below
+ mock.ANY,
+ )
+ assert re.match(
+ r'Traceback \(most recent call last\):\n'
+ r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
+ r' yield\n'
+ r' File ".+tests.error_handler_test.py", line \d+, '
+ r'in test_error_handler_uncaught_error\n'
+ r' raise exc\n'
+ r'ValueError: another test\n',
+ mocked_log_and_exit.call_args[0][2],
+ )
+
+
+def test_error_handler_keyboardinterrupt(mocked_log_and_exit):
+ exc = KeyboardInterrupt()
+ with error_handler.error_handler():
+ raise exc
+
+ mocked_log_and_exit.assert_called_once_with(
+ 'Interrupted (^C)',
+ exc,
+ # Tested below
+ mock.ANY,
+ )
+ assert re.match(
+ r'Traceback \(most recent call last\):\n'
+ r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
+ r' yield\n'
+ r' File ".+tests.error_handler_test.py", line \d+, '
+ r'in test_error_handler_keyboardinterrupt\n'
+ r' raise exc\n'
+ r'KeyboardInterrupt\n',
+ mocked_log_and_exit.call_args[0][2],
+ )
+
+
+def test_log_and_exit(cap_out, mock_store_dir):
+ with pytest.raises(SystemExit):
+ error_handler._log_and_exit(
+ 'msg', error_handler.FatalError('hai'), "I'm a stacktrace",
+ )
+
+ printed = cap_out.get()
+ log_file = os.path.join(mock_store_dir, 'pre-commit.log')
+ assert printed == f'msg: FatalError: hai\nCheck the log at {log_file}\n'
+
+ assert os.path.exists(log_file)
+ with open(log_file) as f:
+ logged = f.read()
+ expected = (
+ r'^### version information\n'
+ r'\n'
+ r'```\n'
+ r'pre-commit version: \d+\.\d+\.\d+\n'
+ r'sys.version:\n'
+ r'( .*\n)*'
+ r'sys.executable: .*\n'
+ r'os.name: .*\n'
+ r'sys.platform: .*\n'
+ r'```\n'
+ r'\n'
+ r'### error information\n'
+ r'\n'
+ r'```\n'
+ r'msg: FatalError: hai\n'
+ r'```\n'
+ r'\n'
+ r'```\n'
+ r"I'm a stacktrace\n"
+ r'```\n'
+ )
+ assert re.match(expected, logged)
+
+
+def test_error_handler_non_ascii_exception(mock_store_dir):
+ with pytest.raises(SystemExit):
+ with error_handler.error_handler():
+ raise ValueError('β˜ƒ')
+
+
+def test_error_handler_non_utf8_exception(mock_store_dir):
+ with pytest.raises(SystemExit):
+ with error_handler.error_handler():
+ raise CalledProcessError(1, ('exe',), 0, b'error: \xa0\xe1', b'')
+
+
+def test_error_handler_non_stringable_exception(mock_store_dir):
+ class C(Exception):
+ def __str__(self):
+ raise RuntimeError('not today!')
+
+ with pytest.raises(SystemExit):
+ with error_handler.error_handler():
+ raise C()
+
+
+def test_error_handler_no_tty(tempdir_factory):
+ pre_commit_home = tempdir_factory.get()
+ ret, out, _ = cmd_output_mocked_pre_commit_home(
+ sys.executable,
+ '-c',
+ 'from pre_commit.error_handler import error_handler\n'
+ 'with error_handler():\n'
+ ' raise ValueError("\\u2603")\n',
+ retcode=1,
+ tempdir_factory=tempdir_factory,
+ pre_commit_home=pre_commit_home,
+ )
+ log_file = os.path.join(pre_commit_home, 'pre-commit.log')
+ out_lines = out.splitlines()
+ assert out_lines[-2] == 'An unexpected error has occurred: ValueError: β˜ƒ'
+ assert out_lines[-1] == f'Check the log at {log_file}'
diff --git a/tests/git_test.py b/tests/git_test.py
new file mode 100644
index 0000000..e73a6f2
--- /dev/null
+++ b/tests/git_test.py
@@ -0,0 +1,188 @@
+import os.path
+
+import pytest
+
+from pre_commit import git
+from pre_commit.util import cmd_output
+from testing.util import git_commit
+
+
+def test_get_root_at_root(in_git_dir):
+ expected = os.path.normcase(in_git_dir.strpath)
+ assert os.path.normcase(git.get_root()) == expected
+
+
+def test_get_root_deeper(in_git_dir):
+ expected = os.path.normcase(in_git_dir.strpath)
+ with in_git_dir.join('foo').ensure_dir().as_cwd():
+ assert os.path.normcase(git.get_root()) == expected
+
+
+def test_get_staged_files_deleted(in_git_dir):
+ in_git_dir.join('test').ensure()
+ cmd_output('git', 'add', 'test')
+ git_commit()
+ cmd_output('git', 'rm', '--cached', 'test')
+ assert git.get_staged_files() == []
+
+
+def test_is_not_in_merge_conflict(in_git_dir):
+ assert git.is_in_merge_conflict() is False
+
+
+def test_is_in_merge_conflict(in_merge_conflict):
+ assert git.is_in_merge_conflict() is True
+
+
+def test_is_in_merge_conflict_submodule(in_conflicting_submodule):
+ assert git.is_in_merge_conflict() is True
+
+
+def test_cherry_pick_conflict(in_merge_conflict):
+ cmd_output('git', 'merge', '--abort')
+ foo_ref = cmd_output('git', 'rev-parse', 'foo')[1].strip()
+ cmd_output('git', 'cherry-pick', foo_ref, retcode=None)
+ assert git.is_in_merge_conflict() is False
+
+
+def resolve_conflict():
+ with open('conflict_file', 'w') as conflicted_file:
+ conflicted_file.write('herp\nderp\n')
+ cmd_output('git', 'add', 'conflict_file')
+
+
+def test_get_conflicted_files(in_merge_conflict):
+ resolve_conflict()
+ with open('other_file', 'w') as other_file:
+ other_file.write('oh hai')
+ cmd_output('git', 'add', 'other_file')
+
+ ret = set(git.get_conflicted_files())
+ assert ret == {'conflict_file', 'other_file'}
+
+
+def test_get_conflicted_files_in_submodule(in_conflicting_submodule):
+ resolve_conflict()
+ assert set(git.get_conflicted_files()) == {'conflict_file'}
+
+
+def test_get_conflicted_files_unstaged_files(in_merge_conflict):
+ """This case no longer occurs, but it is a useful test nonetheless"""
+ resolve_conflict()
+
+ # Make unstaged file.
+ with open('bar_only_file', 'w') as bar_only_file:
+ bar_only_file.write('new contents!\n')
+
+ ret = set(git.get_conflicted_files())
+ assert ret == {'conflict_file'}
+
+
+MERGE_MSG = b"Merge branch 'foo' into bar\n\nConflicts:\n\tconflict_file\n"
+OTHER_MERGE_MSG = MERGE_MSG + b'\tother_conflict_file\n'
+
+
+@pytest.mark.parametrize(
+ ('input', 'expected_output'),
+ (
+ (MERGE_MSG, ['conflict_file']),
+ (OTHER_MERGE_MSG, ['conflict_file', 'other_conflict_file']),
+ ),
+)
+def test_parse_merge_msg_for_conflicts(input, expected_output):
+ ret = git.parse_merge_msg_for_conflicts(input)
+ assert ret == expected_output
+
+
+def test_get_changed_files(in_git_dir):
+ git_commit()
+ in_git_dir.join('a.txt').ensure()
+ in_git_dir.join('b.txt').ensure()
+ cmd_output('git', 'add', '.')
+ git_commit()
+ files = git.get_changed_files('HEAD^', 'HEAD')
+ assert files == ['a.txt', 'b.txt']
+
+ # files changed in source but not in origin should not be returned
+ files = git.get_changed_files('HEAD', 'HEAD^')
+ assert files == []
+
+
+@pytest.mark.parametrize(
+ ('s', 'expected'),
+ (
+ ('foo\0bar\0', ['foo', 'bar']),
+ ('foo\0', ['foo']),
+ ('', []),
+ ('foo', ['foo']),
+ ),
+)
+def test_zsplit(s, expected):
+ assert git.zsplit(s) == expected
+
+
+@pytest.fixture
+def non_ascii_repo(in_git_dir):
+ git_commit()
+ in_git_dir.join('ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ').ensure()
+ cmd_output('git', 'add', '.')
+ git_commit()
+ yield in_git_dir
+
+
+def test_all_files_non_ascii(non_ascii_repo):
+ ret = git.get_all_files()
+ assert ret == ['ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ']
+
+
+def test_staged_files_non_ascii(non_ascii_repo):
+ non_ascii_repo.join('ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ').write('hi')
+ cmd_output('git', 'add', '.')
+ assert git.get_staged_files() == ['ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ']
+
+
+def test_changed_files_non_ascii(non_ascii_repo):
+ ret = git.get_changed_files('HEAD^', 'HEAD')
+ assert ret == ['ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ']
+
+
+def test_get_conflicted_files_non_ascii(in_merge_conflict):
+ open('ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ', 'a').close()
+ cmd_output('git', 'add', '.')
+ ret = git.get_conflicted_files()
+ assert ret == {'conflict_file', 'ΠΈΠ½Ρ‚Π΅Ρ€Π²ΡŒΡŽ'}
+
+
+def test_intent_to_add(in_git_dir):
+ in_git_dir.join('a').ensure()
+ cmd_output('git', 'add', '--intent-to-add', 'a')
+
+ assert git.intent_to_add_files() == ['a']
+
+
+def test_status_output_with_rename(in_git_dir):
+ in_git_dir.join('a').write('1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n')
+ cmd_output('git', 'add', 'a')
+ git_commit()
+ cmd_output('git', 'mv', 'a', 'b')
+ in_git_dir.join('c').ensure()
+ cmd_output('git', 'add', '--intent-to-add', 'c')
+
+ assert git.intent_to_add_files() == ['c']
+
+
+def test_no_git_env():
+ env = {
+ 'http_proxy': 'http://myproxy:80',
+ 'GIT_EXEC_PATH': '/some/git/exec/path',
+ 'GIT_SSH': '/usr/bin/ssh',
+ 'GIT_SSH_COMMAND': 'ssh -o',
+ 'GIT_DIR': '/none/shall/pass',
+ }
+ no_git_env = git.no_git_env(env)
+ assert no_git_env == {
+ 'http_proxy': 'http://myproxy:80',
+ 'GIT_EXEC_PATH': '/some/git/exec/path',
+ 'GIT_SSH': '/usr/bin/ssh',
+ 'GIT_SSH_COMMAND': 'ssh -o',
+ }
diff --git a/tests/languages/__init__.py b/tests/languages/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/languages/__init__.py
diff --git a/tests/languages/docker_test.py b/tests/languages/docker_test.py
new file mode 100644
index 0000000..171a3f7
--- /dev/null
+++ b/tests/languages/docker_test.py
@@ -0,0 +1,23 @@
+from unittest import mock
+
+from pre_commit.languages import docker
+from pre_commit.util import CalledProcessError
+
+
+def test_docker_is_running_process_error():
+ with mock.patch(
+ 'pre_commit.languages.docker.cmd_output_b',
+ side_effect=CalledProcessError(1, (), 0, b'', None),
+ ):
+ assert docker.docker_is_running() is False
+
+
+def test_docker_fallback_user():
+ def invalid_attribute():
+ raise AttributeError
+ with mock.patch.multiple(
+ 'os', create=True,
+ getuid=invalid_attribute,
+ getgid=invalid_attribute,
+ ):
+ assert docker.get_docker_user() == '1000:1000'
diff --git a/tests/languages/golang_test.py b/tests/languages/golang_test.py
new file mode 100644
index 0000000..9a64ed1
--- /dev/null
+++ b/tests/languages/golang_test.py
@@ -0,0 +1,20 @@
+import pytest
+
+from pre_commit.languages.golang import guess_go_dir
+
+
+@pytest.mark.parametrize(
+ ('url', 'expected'),
+ (
+ ('/im/a/path/on/disk', 'unknown_src_dir'),
+ ('file:///im/a/path/on/disk', 'unknown_src_dir'),
+ ('git@github.com:golang/lint', 'github.com/golang/lint'),
+ ('git://github.com/golang/lint', 'github.com/golang/lint'),
+ ('http://github.com/golang/lint', 'github.com/golang/lint'),
+ ('https://github.com/golang/lint', 'github.com/golang/lint'),
+ ('ssh://git@github.com/golang/lint', 'github.com/golang/lint'),
+ ('git@github.com:golang/lint.git', 'github.com/golang/lint'),
+ ),
+)
+def test_guess_go_dir(url, expected):
+ assert guess_go_dir(url) == expected
diff --git a/tests/languages/helpers_test.py b/tests/languages/helpers_test.py
new file mode 100644
index 0000000..c52e947
--- /dev/null
+++ b/tests/languages/helpers_test.py
@@ -0,0 +1,82 @@
+import multiprocessing
+import os
+import sys
+from unittest import mock
+
+import pytest
+
+import pre_commit.constants as C
+from pre_commit.languages import helpers
+from pre_commit.prefix import Prefix
+from pre_commit.util import CalledProcessError
+from testing.auto_namedtuple import auto_namedtuple
+
+
+def test_basic_get_default_version():
+ assert helpers.basic_get_default_version() == C.DEFAULT
+
+
+def test_basic_healthy():
+ assert helpers.basic_healthy(Prefix('.'), 'default') is True
+
+
+def test_failed_setup_command_does_not_unicode_error():
+ script = (
+ 'import sys\n'
+ "getattr(sys.stderr, 'buffer', sys.stderr).write(b'\\x81\\xfe')\n"
+ 'exit(1)\n'
+ )
+
+ # an assertion that this does not raise `UnicodeError`
+ with pytest.raises(CalledProcessError):
+ helpers.run_setup_cmd(Prefix('.'), (sys.executable, '-c', script))
+
+
+def test_assert_no_additional_deps():
+ with pytest.raises(AssertionError) as excinfo:
+ helpers.assert_no_additional_deps('lang', ['hmmm'])
+ msg, = excinfo.value.args
+ assert msg == (
+ 'For now, pre-commit does not support additional_dependencies for lang'
+ )
+
+
+SERIAL_FALSE = auto_namedtuple(require_serial=False)
+SERIAL_TRUE = auto_namedtuple(require_serial=True)
+
+
+def test_target_concurrency_normal():
+ with mock.patch.object(multiprocessing, 'cpu_count', return_value=123):
+ with mock.patch.dict(os.environ, {}, clear=True):
+ assert helpers.target_concurrency(SERIAL_FALSE) == 123
+
+
+def test_target_concurrency_cpu_count_require_serial_true():
+ with mock.patch.dict(os.environ, {}, clear=True):
+ assert helpers.target_concurrency(SERIAL_TRUE) == 1
+
+
+def test_target_concurrency_testing_env_var():
+ with mock.patch.dict(
+ os.environ, {'PRE_COMMIT_NO_CONCURRENCY': '1'}, clear=True,
+ ):
+ assert helpers.target_concurrency(SERIAL_FALSE) == 1
+
+
+def test_target_concurrency_on_travis():
+ with mock.patch.dict(os.environ, {'TRAVIS': '1'}, clear=True):
+ assert helpers.target_concurrency(SERIAL_FALSE) == 2
+
+
+def test_target_concurrency_cpu_count_not_implemented():
+ with mock.patch.object(
+ multiprocessing, 'cpu_count', side_effect=NotImplementedError,
+ ):
+ with mock.patch.dict(os.environ, {}, clear=True):
+ assert helpers.target_concurrency(SERIAL_FALSE) == 1
+
+
+def test_shuffled_is_deterministic():
+ seq = [str(i) for i in range(10)]
+ expected = ['3', '7', '8', '2', '4', '6', '5', '1', '0', '9']
+ assert helpers._shuffled(seq) == expected
diff --git a/tests/languages/pygrep_test.py b/tests/languages/pygrep_test.py
new file mode 100644
index 0000000..cabea22
--- /dev/null
+++ b/tests/languages/pygrep_test.py
@@ -0,0 +1,65 @@
+import pytest
+
+from pre_commit.languages import pygrep
+
+
+@pytest.fixture
+def some_files(tmpdir):
+ tmpdir.join('f1').write_binary(b'foo\nbar\n')
+ tmpdir.join('f2').write_binary(b'[INFO] hi\n')
+ tmpdir.join('f3').write_binary(b"with'quotes\n")
+ with tmpdir.as_cwd():
+ yield
+
+
+@pytest.mark.usefixtures('some_files')
+@pytest.mark.parametrize(
+ ('pattern', 'expected_retcode', 'expected_out'),
+ (
+ ('baz', 0, ''),
+ ('foo', 1, 'f1:1:foo\n'),
+ ('bar', 1, 'f1:2:bar\n'),
+ (r'(?i)\[info\]', 1, 'f2:1:[INFO] hi\n'),
+ ("h'q", 1, "f3:1:with'quotes\n"),
+ ),
+)
+def test_main(some_files, cap_out, pattern, expected_retcode, expected_out):
+ ret = pygrep.main((pattern, 'f1', 'f2', 'f3'))
+ out = cap_out.get()
+ assert ret == expected_retcode
+ assert out == expected_out
+
+
+def test_ignore_case(some_files, cap_out):
+ ret = pygrep.main(('--ignore-case', 'info', 'f1', 'f2', 'f3'))
+ out = cap_out.get()
+ assert ret == 1
+ assert out == 'f2:1:[INFO] hi\n'
+
+
+def test_multiline(some_files, cap_out):
+ ret = pygrep.main(('--multiline', r'foo\nbar', 'f1', 'f2', 'f3'))
+ out = cap_out.get()
+ assert ret == 1
+ assert out == 'f1:1:foo\nbar\n'
+
+
+def test_multiline_line_number(some_files, cap_out):
+ ret = pygrep.main(('--multiline', r'ar', 'f1', 'f2', 'f3'))
+ out = cap_out.get()
+ assert ret == 1
+ assert out == 'f1:2:bar\n'
+
+
+def test_multiline_dotall_flag_is_enabled(some_files, cap_out):
+ ret = pygrep.main(('--multiline', r'o.*bar', 'f1', 'f2', 'f3'))
+ out = cap_out.get()
+ assert ret == 1
+ assert out == 'f1:1:foo\nbar\n'
+
+
+def test_multiline_multiline_flag_is_enabled(some_files, cap_out):
+ ret = pygrep.main(('--multiline', r'foo$.*bar', 'f1', 'f2', 'f3'))
+ out = cap_out.get()
+ assert ret == 1
+ assert out == 'f1:1:foo\nbar\n'
diff --git a/tests/languages/python_test.py b/tests/languages/python_test.py
new file mode 100644
index 0000000..34c6c7f
--- /dev/null
+++ b/tests/languages/python_test.py
@@ -0,0 +1,75 @@
+import os.path
+import sys
+from unittest import mock
+
+import pytest
+
+import pre_commit.constants as C
+from pre_commit.languages import python
+from pre_commit.prefix import Prefix
+
+
+def test_norm_version_expanduser():
+ home = os.path.expanduser('~')
+ if os.name == 'nt': # pragma: nt cover
+ path = r'~\python343'
+ expected_path = fr'{home}\python343'
+ else: # pragma: nt no cover
+ path = '~/.pyenv/versions/3.4.3/bin/python'
+ expected_path = f'{home}/.pyenv/versions/3.4.3/bin/python'
+ result = python.norm_version(path)
+ assert result == expected_path
+
+
+@pytest.mark.parametrize('v', ('python3.6', 'python3', 'python'))
+def test_sys_executable_matches(v):
+ with mock.patch.object(sys, 'version_info', (3, 6, 7)):
+ assert python._sys_executable_matches(v)
+
+
+@pytest.mark.parametrize('v', ('notpython', 'python3.x'))
+def test_sys_executable_matches_does_not_match(v):
+ with mock.patch.object(sys, 'version_info', (3, 6, 7)):
+ assert not python._sys_executable_matches(v)
+
+
+@pytest.mark.parametrize(
+ ('exe', 'realpath', 'expected'), (
+ ('/usr/bin/python3', '/usr/bin/python3.7', 'python3'),
+ ('/usr/bin/python', '/usr/bin/python3.7', 'python3.7'),
+ ('/usr/bin/python', '/usr/bin/python', None),
+ ('/usr/bin/python3.6m', '/usr/bin/python3.6m', 'python3.6m'),
+ ('v/bin/python', 'v/bin/pypy', 'pypy'),
+ ),
+)
+def test_find_by_sys_executable(exe, realpath, expected):
+ with mock.patch.object(sys, 'executable', exe):
+ with mock.patch.object(os.path, 'realpath', return_value=realpath):
+ with mock.patch.object(python, 'find_executable', lambda x: x):
+ assert python._find_by_sys_executable() == expected
+
+
+def test_healthy_types_py_in_cwd(tmpdir):
+ with tmpdir.as_cwd():
+ prefix = tmpdir.join('prefix').ensure_dir()
+ prefix.join('setup.py').write('import setuptools; setuptools.setup()')
+ prefix = Prefix(str(prefix))
+ python.install_environment(prefix, C.DEFAULT, ())
+
+ # even if a `types.py` file exists, should still be healthy
+ tmpdir.join('types.py').ensure()
+ assert python.healthy(prefix, C.DEFAULT) is True
+
+
+def test_healthy_python_goes_missing(tmpdir):
+ with tmpdir.as_cwd():
+ prefix = tmpdir.join('prefix').ensure_dir()
+ prefix.join('setup.py').write('import setuptools; setuptools.setup()')
+ prefix = Prefix(str(prefix))
+ python.install_environment(prefix, C.DEFAULT, ())
+
+ exe_name = 'python' if sys.platform != 'win32' else 'python.exe'
+ py_exe = prefix.path(python.bin_dir('py_env-default'), exe_name)
+ os.remove(py_exe)
+
+ assert python.healthy(prefix, C.DEFAULT) is False
diff --git a/tests/languages/ruby_test.py b/tests/languages/ruby_test.py
new file mode 100644
index 0000000..36a029d
--- /dev/null
+++ b/tests/languages/ruby_test.py
@@ -0,0 +1,28 @@
+import os.path
+
+from pre_commit.languages import ruby
+from pre_commit.prefix import Prefix
+from pre_commit.util import cmd_output
+from testing.util import xfailif_windows_no_ruby
+
+
+@xfailif_windows_no_ruby
+def test_install_rbenv(tempdir_factory):
+ prefix = Prefix(tempdir_factory.get())
+ ruby._install_rbenv(prefix)
+ # Should have created rbenv directory
+ assert os.path.exists(prefix.path('rbenv-default'))
+
+ # Should be able to activate using our script and access rbenv
+ with ruby.in_env(prefix, 'default'):
+ cmd_output('rbenv', '--help')
+
+
+@xfailif_windows_no_ruby
+def test_install_rbenv_with_version(tempdir_factory):
+ prefix = Prefix(tempdir_factory.get())
+ ruby._install_rbenv(prefix, version='1.9.3p547')
+
+ # Should be able to activate and use rbenv install
+ with ruby.in_env(prefix, '1.9.3p547'):
+ cmd_output('rbenv', 'install', '--help')
diff --git a/tests/logging_handler_test.py b/tests/logging_handler_test.py
new file mode 100644
index 0000000..fe68593
--- /dev/null
+++ b/tests/logging_handler_test.py
@@ -0,0 +1,21 @@
+import logging
+
+from pre_commit import color
+from pre_commit.logging_handler import LoggingHandler
+
+
+def _log_record(message, level):
+ return logging.LogRecord('name', level, '', 1, message, {}, None)
+
+
+def test_logging_handler_color(cap_out):
+ handler = LoggingHandler(True)
+ handler.emit(_log_record('hi', logging.WARNING))
+ ret = cap_out.get()
+ assert ret == f'{color.YELLOW}[WARNING]{color.NORMAL} hi\n'
+
+
+def test_logging_handler_no_color(cap_out):
+ handler = LoggingHandler(False)
+ handler.emit(_log_record('hi', logging.WARNING))
+ assert cap_out.get() == '[WARNING] hi\n'
diff --git a/tests/main_test.py b/tests/main_test.py
new file mode 100644
index 0000000..c472476
--- /dev/null
+++ b/tests/main_test.py
@@ -0,0 +1,189 @@
+import argparse
+import os.path
+from unittest import mock
+
+import pytest
+
+import pre_commit.constants as C
+from pre_commit import main
+from pre_commit.error_handler import FatalError
+from testing.auto_namedtuple import auto_namedtuple
+
+
+@pytest.mark.parametrize(
+ ('argv', 'expected'),
+ (
+ ((), ['f']),
+ (('--f', 'x'), ['x']),
+ (('--f', 'x', '--f', 'y'), ['x', 'y']),
+ ),
+)
+def test_append_replace_default(argv, expected):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--f', action=main.AppendReplaceDefault, default=['f'])
+ assert parser.parse_args(argv).f == expected
+
+
+def _args(**kwargs):
+ kwargs.setdefault('command', 'help')
+ kwargs.setdefault('config', C.CONFIG_FILE)
+ return argparse.Namespace(**kwargs)
+
+
+def test_adjust_args_and_chdir_not_in_git_dir(in_tmpdir):
+ with pytest.raises(FatalError):
+ main._adjust_args_and_chdir(_args())
+
+
+def test_adjust_args_and_chdir_in_dot_git_dir(in_git_dir):
+ with in_git_dir.join('.git').as_cwd(), pytest.raises(FatalError):
+ main._adjust_args_and_chdir(_args())
+
+
+def test_adjust_args_and_chdir_noop(in_git_dir):
+ args = _args(command='run', files=['f1', 'f2'])
+ main._adjust_args_and_chdir(args)
+ assert os.getcwd() == in_git_dir
+ assert args.config == C.CONFIG_FILE
+ assert args.files == ['f1', 'f2']
+
+
+def test_adjust_args_and_chdir_relative_things(in_git_dir):
+ in_git_dir.join('foo/cfg.yaml').ensure()
+ in_git_dir.join('foo').chdir()
+
+ args = _args(command='run', files=['f1', 'f2'], config='cfg.yaml')
+ main._adjust_args_and_chdir(args)
+ assert os.getcwd() == in_git_dir
+ assert args.config == os.path.join('foo', 'cfg.yaml')
+ assert args.files == [os.path.join('foo', 'f1'), os.path.join('foo', 'f2')]
+
+
+def test_adjust_args_and_chdir_non_relative_config(in_git_dir):
+ in_git_dir.join('foo').ensure_dir().chdir()
+
+ args = _args()
+ main._adjust_args_and_chdir(args)
+ assert os.getcwd() == in_git_dir
+ assert args.config == C.CONFIG_FILE
+
+
+def test_adjust_args_try_repo_repo_relative(in_git_dir):
+ in_git_dir.join('foo').ensure_dir().chdir()
+
+ args = _args(command='try-repo', repo='../foo', files=[])
+ assert args.repo is not None
+ assert os.path.exists(args.repo)
+ main._adjust_args_and_chdir(args)
+ assert os.getcwd() == in_git_dir
+ assert os.path.exists(args.repo)
+ assert args.repo == 'foo'
+
+
+FNS = (
+ 'autoupdate', 'clean', 'gc', 'hook_impl', 'install', 'install_hooks',
+ 'migrate_config', 'run', 'sample_config', 'uninstall',
+)
+CMDS = tuple(fn.replace('_', '-') for fn in FNS)
+
+
+@pytest.fixture
+def mock_commands():
+ mcks = {fn: mock.patch.object(main, fn).start() for fn in FNS}
+ ret = auto_namedtuple(**mcks)
+ yield ret
+ for mck in ret:
+ mck.stop()
+
+
+@pytest.fixture
+def argparse_parse_args_spy():
+ parse_args_mock = mock.Mock()
+
+ original_parse_args = argparse.ArgumentParser.parse_args
+
+ def fake_parse_args(self, args):
+ # call our spy object
+ parse_args_mock(args)
+ return original_parse_args(self, args)
+
+ with mock.patch.object(
+ argparse.ArgumentParser, 'parse_args', fake_parse_args,
+ ):
+ yield parse_args_mock
+
+
+def assert_only_one_mock_called(mock_objs):
+ total_call_count = sum(mock_obj.call_count for mock_obj in mock_objs)
+ assert total_call_count == 1
+
+
+def test_overall_help(mock_commands):
+ with pytest.raises(SystemExit):
+ main.main(['--help'])
+
+
+def test_help_command(mock_commands, argparse_parse_args_spy):
+ with pytest.raises(SystemExit):
+ main.main(['help'])
+
+ argparse_parse_args_spy.assert_has_calls([
+ mock.call(['help']),
+ mock.call(['--help']),
+ ])
+
+
+def test_help_other_command(mock_commands, argparse_parse_args_spy):
+ with pytest.raises(SystemExit):
+ main.main(['help', 'run'])
+
+ argparse_parse_args_spy.assert_has_calls([
+ mock.call(['help', 'run']),
+ mock.call(['run', '--help']),
+ ])
+
+
+@pytest.mark.parametrize('command', CMDS)
+def test_all_cmds(command, mock_commands, mock_store_dir):
+ main.main((command,))
+ assert getattr(mock_commands, command.replace('-', '_')).call_count == 1
+ assert_only_one_mock_called(mock_commands)
+
+
+def test_try_repo(mock_store_dir):
+ with mock.patch.object(main, 'try_repo') as patch:
+ main.main(('try-repo', '.'))
+ assert patch.call_count == 1
+
+
+def test_init_templatedir(mock_store_dir):
+ with mock.patch.object(main, 'init_templatedir') as patch:
+ main.main(('init-templatedir', 'tdir'))
+ assert patch.call_count == 1
+
+
+def test_help_cmd_in_empty_directory(
+ in_tmpdir,
+ mock_commands,
+ argparse_parse_args_spy,
+):
+ with pytest.raises(SystemExit):
+ main.main(['help', 'run'])
+
+ argparse_parse_args_spy.assert_has_calls([
+ mock.call(['help', 'run']),
+ mock.call(['run', '--help']),
+ ])
+
+
+def test_expected_fatal_error_no_git_repo(in_tmpdir, cap_out, mock_store_dir):
+ with pytest.raises(SystemExit):
+ main.main([])
+ log_file = os.path.join(mock_store_dir, 'pre-commit.log')
+ cap_out_lines = cap_out.get().splitlines()
+ assert (
+ cap_out_lines[-2] ==
+ 'An error has occurred: FatalError: git failed. '
+ 'Is it installed, and are you in a Git repository directory?'
+ )
+ assert cap_out_lines[-1] == f'Check the log at {log_file}'
diff --git a/tests/make_archives_test.py b/tests/make_archives_test.py
new file mode 100644
index 0000000..6ae2f8e
--- /dev/null
+++ b/tests/make_archives_test.py
@@ -0,0 +1,46 @@
+import tarfile
+
+from pre_commit import git
+from pre_commit import make_archives
+from pre_commit.util import cmd_output
+from testing.util import git_commit
+
+
+def test_make_archive(in_git_dir, tmpdir):
+ output_dir = tmpdir.join('output').ensure_dir()
+ # Add a files to the git directory
+ in_git_dir.join('foo').ensure()
+ cmd_output('git', 'add', '.')
+ git_commit()
+ # We'll use this rev
+ head_rev = git.head_rev('.')
+ # And check that this file doesn't exist
+ in_git_dir.join('bar').ensure()
+ cmd_output('git', 'add', '.')
+ git_commit()
+
+ # Do the thing
+ archive_path = make_archives.make_archive(
+ 'foo', in_git_dir.strpath, head_rev, output_dir.strpath,
+ )
+
+ expected = output_dir.join('foo.tar.gz')
+ assert archive_path == expected.strpath
+ assert expected.exists()
+
+ extract_dir = tmpdir.join('extract').ensure_dir()
+ with tarfile.open(archive_path) as tf:
+ tf.extractall(extract_dir.strpath)
+
+ # Verify the contents of the tar
+ assert extract_dir.join('foo').isdir()
+ assert extract_dir.join('foo/foo').exists()
+ assert not extract_dir.join('foo/.git').exists()
+ assert not extract_dir.join('foo/bar').exists()
+
+
+def test_main(tmpdir):
+ make_archives.main(('--dest', tmpdir.strpath))
+
+ for archive, _, _ in make_archives.REPOS:
+ assert tmpdir.join(f'{archive}.tar.gz').exists()
diff --git a/tests/meta_hooks/__init__.py b/tests/meta_hooks/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/meta_hooks/__init__.py
diff --git a/tests/meta_hooks/check_hooks_apply_test.py b/tests/meta_hooks/check_hooks_apply_test.py
new file mode 100644
index 0000000..06bdd04
--- /dev/null
+++ b/tests/meta_hooks/check_hooks_apply_test.py
@@ -0,0 +1,138 @@
+from pre_commit.meta_hooks import check_hooks_apply
+from testing.fixtures import add_config_to_repo
+
+
+def test_hook_excludes_everything(capsys, in_git_dir, mock_store_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [
+ {
+ 'id': 'check-useless-excludes',
+ 'exclude': '.pre-commit-config.yaml',
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_hooks_apply.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ assert 'check-useless-excludes does not apply to this repository' in out
+
+
+def test_hook_includes_nothing(capsys, in_git_dir, mock_store_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [
+ {
+ 'id': 'check-useless-excludes',
+ 'files': 'foo',
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_hooks_apply.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ assert 'check-useless-excludes does not apply to this repository' in out
+
+
+def test_hook_types_not_matched(capsys, in_git_dir, mock_store_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [
+ {
+ 'id': 'check-useless-excludes',
+ 'types': ['python'],
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_hooks_apply.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ assert 'check-useless-excludes does not apply to this repository' in out
+
+
+def test_hook_types_excludes_everything(capsys, in_git_dir, mock_store_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [
+ {
+ 'id': 'check-useless-excludes',
+ 'exclude_types': ['yaml'],
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_hooks_apply.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ assert 'check-useless-excludes does not apply to this repository' in out
+
+
+def test_valid_exceptions(capsys, in_git_dir, mock_store_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'local',
+ 'hooks': [
+ # applies to a file
+ {
+ 'id': 'check-yaml',
+ 'name': 'check yaml',
+ 'entry': './check-yaml',
+ 'language': 'script',
+ 'files': r'\.yaml$',
+ },
+ # Should not be reported as an error due to language: fail
+ {
+ 'id': 'changelogs-rst',
+ 'name': 'changelogs must be rst',
+ 'entry': 'changelog filenames must end in .rst',
+ 'language': 'fail',
+ 'files': r'changelog/.*(?<!\.rst)$',
+ },
+ # Should not be reported as an error due to always_run
+ {
+ 'id': 'i-always-run',
+ 'name': 'make check',
+ 'entry': 'make check',
+ 'language': 'system',
+ 'files': '^$',
+ 'always_run': True,
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_hooks_apply.main(()) == 0
+
+ out, _ = capsys.readouterr()
+ assert out == ''
diff --git a/tests/meta_hooks/check_useless_excludes_test.py b/tests/meta_hooks/check_useless_excludes_test.py
new file mode 100644
index 0000000..d261e81
--- /dev/null
+++ b/tests/meta_hooks/check_useless_excludes_test.py
@@ -0,0 +1,115 @@
+from pre_commit.meta_hooks import check_useless_excludes
+from testing.fixtures import add_config_to_repo
+
+
+def test_useless_exclude_global(capsys, in_git_dir):
+ config = {
+ 'exclude': 'foo',
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [{'id': 'check-useless-excludes'}],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_useless_excludes.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ out = out.strip()
+ assert "The global exclude pattern 'foo' does not match any files" == out
+
+
+def test_useless_exclude_for_hook(capsys, in_git_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [{'id': 'check-useless-excludes', 'exclude': 'foo'}],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_useless_excludes.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ out = out.strip()
+ expected = (
+ "The exclude pattern 'foo' for check-useless-excludes "
+ 'does not match any files'
+ )
+ assert expected == out
+
+
+def test_useless_exclude_with_types_filter(capsys, in_git_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [
+ {
+ 'id': 'check-useless-excludes',
+ 'exclude': '.pre-commit-config.yaml',
+ 'types': ['python'],
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_useless_excludes.main(()) == 1
+
+ out, _ = capsys.readouterr()
+ out = out.strip()
+ expected = (
+ "The exclude pattern '.pre-commit-config.yaml' for "
+ 'check-useless-excludes does not match any files'
+ )
+ assert expected == out
+
+
+def test_no_excludes(capsys, in_git_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [{'id': 'check-useless-excludes'}],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_useless_excludes.main(()) == 0
+
+ out, _ = capsys.readouterr()
+ assert out == ''
+
+
+def test_valid_exclude(capsys, in_git_dir):
+ config = {
+ 'repos': [
+ {
+ 'repo': 'meta',
+ 'hooks': [
+ {
+ 'id': 'check-useless-excludes',
+ 'exclude': '.pre-commit-config.yaml',
+ },
+ ],
+ },
+ ],
+ }
+
+ add_config_to_repo(in_git_dir.strpath, config)
+
+ assert check_useless_excludes.main(()) == 0
+
+ out, _ = capsys.readouterr()
+ assert out == ''
diff --git a/tests/meta_hooks/identity_test.py b/tests/meta_hooks/identity_test.py
new file mode 100644
index 0000000..3eff00b
--- /dev/null
+++ b/tests/meta_hooks/identity_test.py
@@ -0,0 +1,6 @@
+from pre_commit.meta_hooks import identity
+
+
+def test_identity(cap_out):
+ assert not identity.main(('a', 'b', 'c'))
+ assert cap_out.get() == 'a\nb\nc\n'
diff --git a/tests/output_test.py b/tests/output_test.py
new file mode 100644
index 0000000..1cdacbb
--- /dev/null
+++ b/tests/output_test.py
@@ -0,0 +1,9 @@
+import io
+
+from pre_commit import output
+
+
+def test_output_write_writes():
+ stream = io.BytesIO()
+ output.write('hello world', stream)
+ assert stream.getvalue() == b'hello world'
diff --git a/tests/parse_shebang_test.py b/tests/parse_shebang_test.py
new file mode 100644
index 0000000..0bb19c7
--- /dev/null
+++ b/tests/parse_shebang_test.py
@@ -0,0 +1,152 @@
+import contextlib
+import os.path
+import shutil
+import sys
+
+import pytest
+
+from pre_commit import parse_shebang
+from pre_commit.envcontext import envcontext
+from pre_commit.envcontext import Var
+from pre_commit.util import make_executable
+
+
+def _echo_exe() -> str:
+ exe = shutil.which('echo')
+ assert exe is not None
+ return exe
+
+
+def test_file_doesnt_exist():
+ assert parse_shebang.parse_filename('herp derp derp') == ()
+
+
+def test_simple_case(tmpdir):
+ x = tmpdir.join('f')
+ x.write('#!/usr/bin/env echo')
+ make_executable(x.strpath)
+ assert parse_shebang.parse_filename(x.strpath) == ('echo',)
+
+
+def test_find_executable_full_path():
+ assert parse_shebang.find_executable(sys.executable) == sys.executable
+
+
+def test_find_executable_on_path():
+ assert parse_shebang.find_executable('echo') == _echo_exe()
+
+
+def test_find_executable_not_found_none():
+ assert parse_shebang.find_executable('not-a-real-executable') is None
+
+
+def write_executable(shebang, filename='run'):
+ os.mkdir('bin')
+ path = os.path.join('bin', filename)
+ with open(path, 'w') as f:
+ f.write(f'#!{shebang}')
+ make_executable(path)
+ return path
+
+
+@contextlib.contextmanager
+def bin_on_path():
+ bindir = os.path.join(os.getcwd(), 'bin')
+ with envcontext((('PATH', (bindir, os.pathsep, Var('PATH'))),)):
+ yield
+
+
+def test_find_executable_path_added(in_tmpdir):
+ path = os.path.abspath(write_executable('/usr/bin/env sh'))
+ assert parse_shebang.find_executable('run') is None
+ with bin_on_path():
+ assert parse_shebang.find_executable('run') == path
+
+
+def test_find_executable_path_ext(in_tmpdir):
+ """Windows exports PATHEXT as a list of extensions to automatically add
+ to executables when doing PATH searching.
+ """
+ exe_path = os.path.abspath(
+ write_executable('/usr/bin/env sh', filename='run.myext'),
+ )
+ env_path = {'PATH': os.path.dirname(exe_path)}
+ env_path_ext = dict(env_path, PATHEXT=os.pathsep.join(('.exe', '.myext')))
+ assert parse_shebang.find_executable('run') is None
+ assert parse_shebang.find_executable('run', _environ=env_path) is None
+ ret = parse_shebang.find_executable('run.myext', _environ=env_path)
+ assert ret == exe_path
+ ret = parse_shebang.find_executable('run', _environ=env_path_ext)
+ assert ret == exe_path
+
+
+def test_normexe_does_not_exist():
+ with pytest.raises(OSError) as excinfo:
+ parse_shebang.normexe('i-dont-exist-lol')
+ assert excinfo.value.args == ('Executable `i-dont-exist-lol` not found',)
+
+
+def test_normexe_does_not_exist_sep():
+ with pytest.raises(OSError) as excinfo:
+ parse_shebang.normexe('./i-dont-exist-lol')
+ assert excinfo.value.args == ('Executable `./i-dont-exist-lol` not found',)
+
+
+@pytest.mark.xfail(os.name == 'nt', reason='posix only')
+def test_normexe_not_executable(tmpdir): # pragma: win32 no cover
+ tmpdir.join('exe').ensure()
+ with tmpdir.as_cwd(), pytest.raises(OSError) as excinfo:
+ parse_shebang.normexe('./exe')
+ assert excinfo.value.args == ('Executable `./exe` is not executable',)
+
+
+def test_normexe_is_a_directory(tmpdir):
+ with tmpdir.as_cwd():
+ tmpdir.join('exe').ensure_dir()
+ exe = os.path.join('.', 'exe')
+ with pytest.raises(OSError) as excinfo:
+ parse_shebang.normexe(exe)
+ msg, = excinfo.value.args
+ assert msg == f'Executable `{exe}` is a directory'
+
+
+def test_normexe_already_full_path():
+ assert parse_shebang.normexe(sys.executable) == sys.executable
+
+
+def test_normexe_gives_full_path():
+ assert parse_shebang.normexe('echo') == _echo_exe()
+ assert os.sep in _echo_exe()
+
+
+def test_normalize_cmd_trivial():
+ cmd = (_echo_exe(), 'hi')
+ assert parse_shebang.normalize_cmd(cmd) == cmd
+
+
+def test_normalize_cmd_PATH():
+ cmd = ('echo', '--version')
+ expected = (_echo_exe(), '--version')
+ assert parse_shebang.normalize_cmd(cmd) == expected
+
+
+def test_normalize_cmd_shebang(in_tmpdir):
+ echo = _echo_exe().replace(os.sep, '/')
+ path = write_executable(echo)
+ assert parse_shebang.normalize_cmd((path,)) == (echo, path)
+
+
+def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir):
+ echo = _echo_exe().replace(os.sep, '/')
+ path = write_executable(echo)
+ with bin_on_path():
+ ret = parse_shebang.normalize_cmd(('run',))
+ assert ret == (echo, os.path.abspath(path))
+
+
+def test_normalize_cmd_PATH_shebang_PATH(in_tmpdir):
+ echo = _echo_exe()
+ path = write_executable('/usr/bin/env echo')
+ with bin_on_path():
+ ret = parse_shebang.normalize_cmd(('run',))
+ assert ret == (echo, os.path.abspath(path))
diff --git a/tests/prefix_test.py b/tests/prefix_test.py
new file mode 100644
index 0000000..6ce8be1
--- /dev/null
+++ b/tests/prefix_test.py
@@ -0,0 +1,44 @@
+import os.path
+
+import pytest
+
+from pre_commit.prefix import Prefix
+
+
+def norm_slash(*args):
+ return tuple(x.replace('/', os.sep) for x in args)
+
+
+@pytest.mark.parametrize(
+ ('prefix', 'path_end', 'expected_output'),
+ (
+ norm_slash('foo', '', 'foo'),
+ norm_slash('foo', 'bar', 'foo/bar'),
+ norm_slash('foo/bar', '../baz', 'foo/baz'),
+ norm_slash('./', 'bar', 'bar'),
+ norm_slash('./', '', '.'),
+ norm_slash('/tmp/foo', '/tmp/bar', '/tmp/bar'),
+ ),
+)
+def test_path(prefix, path_end, expected_output):
+ instance = Prefix(prefix)
+ ret = instance.path(path_end)
+ assert ret == expected_output
+
+
+def test_path_multiple_args():
+ instance = Prefix('foo')
+ ret = instance.path('bar', 'baz')
+ assert ret == os.path.join('foo', 'bar', 'baz')
+
+
+def test_exists(tmpdir):
+ assert not Prefix(str(tmpdir)).exists('foo')
+ tmpdir.ensure('foo')
+ assert Prefix(str(tmpdir)).exists('foo')
+
+
+def test_star(tmpdir):
+ for f in ('a.txt', 'b.txt', 'c.py'):
+ tmpdir.join(f).ensure()
+ assert set(Prefix(str(tmpdir)).star('.txt')) == {'a.txt', 'b.txt'}
diff --git a/tests/repository_test.py b/tests/repository_test.py
new file mode 100644
index 0000000..df7e7d1
--- /dev/null
+++ b/tests/repository_test.py
@@ -0,0 +1,902 @@
+import os.path
+import re
+import shutil
+import sys
+from typing import Any
+from typing import Dict
+from unittest import mock
+
+import cfgv
+import pytest
+
+import pre_commit.constants as C
+from pre_commit.clientlib import CONFIG_SCHEMA
+from pre_commit.clientlib import load_manifest
+from pre_commit.envcontext import envcontext
+from pre_commit.hook import Hook
+from pre_commit.languages import golang
+from pre_commit.languages import helpers
+from pre_commit.languages import node
+from pre_commit.languages import python
+from pre_commit.languages import ruby
+from pre_commit.languages import rust
+from pre_commit.languages.all import languages
+from pre_commit.prefix import Prefix
+from pre_commit.repository import all_hooks
+from pre_commit.repository import install_hook_envs
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from testing.fixtures import make_config_from_repo
+from testing.fixtures import make_repo
+from testing.fixtures import modify_manifest
+from testing.util import cwd
+from testing.util import get_resource_path
+from testing.util import skipif_cant_run_docker
+from testing.util import skipif_cant_run_swift
+from testing.util import xfailif_no_venv
+from testing.util import xfailif_windows_no_ruby
+
+
+def _norm_out(b):
+ return b.replace(b'\r\n', b'\n')
+
+
+def _hook_run(hook, filenames, color):
+ return languages[hook.language].run_hook(hook, filenames, color)
+
+
+def _get_hook_no_install(repo_config, store, hook_id):
+ config = {'repos': [repo_config]}
+ config = cfgv.validate(config, CONFIG_SCHEMA)
+ config = cfgv.apply_defaults(config, CONFIG_SCHEMA)
+ hooks = all_hooks(config, store)
+ hook, = [hook for hook in hooks if hook.id == hook_id]
+ return hook
+
+
+def _get_hook(repo_config, store, hook_id):
+ hook = _get_hook_no_install(repo_config, store, hook_id)
+ install_hook_envs([hook], store)
+ return hook
+
+
+def _test_hook_repo(
+ tempdir_factory,
+ store,
+ repo_path,
+ hook_id,
+ args,
+ expected,
+ expected_return_code=0,
+ config_kwargs=None,
+ color=False,
+):
+ path = make_repo(tempdir_factory, repo_path)
+ config = make_config_from_repo(path, **(config_kwargs or {}))
+ hook = _get_hook(config, store, hook_id)
+ ret, out = _hook_run(hook, args, color=color)
+ assert ret == expected_return_code
+ assert _norm_out(out) == expected
+
+
+def test_conda_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'conda_hooks_repo',
+ 'sys-exec', [os.devnull],
+ b'conda-default\n',
+ )
+
+
+def test_conda_with_additional_dependencies_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'conda_hooks_repo',
+ 'additional-deps', [os.devnull],
+ b'OK\n',
+ config_kwargs={
+ 'hooks': [{
+ 'id': 'additional-deps',
+ 'args': ['-c', 'import mccabe; print("OK")'],
+ 'additional_dependencies': ['mccabe'],
+ }],
+ },
+ )
+
+
+def test_local_conda_additional_dependencies(store):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'local-conda',
+ 'name': 'local-conda',
+ 'entry': 'python',
+ 'language': 'conda',
+ 'args': ['-c', 'import mccabe; print("OK")'],
+ 'additional_dependencies': ['mccabe'],
+ }],
+ }
+ hook = _get_hook(config, store, 'local-conda')
+ ret, out = _hook_run(hook, (), color=False)
+ assert ret == 0
+ assert _norm_out(out) == b'OK\n'
+
+
+def test_python_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'python_hooks_repo',
+ 'foo', [os.devnull],
+ f'[{os.devnull!r}]\nHello World\n'.encode(),
+ )
+
+
+def test_python_hook_default_version(tempdir_factory, store):
+ # make sure that this continues to work for platforms where default
+ # language detection does not work
+ with mock.patch.object(
+ python, 'get_default_version', return_value=C.DEFAULT,
+ ):
+ test_python_hook(tempdir_factory, store)
+
+
+def test_python_hook_args_with_spaces(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'python_hooks_repo',
+ 'foo',
+ [],
+ b"['i have spaces', 'and\"\\'quotes', '$and !this']\n"
+ b'Hello World\n',
+ config_kwargs={
+ 'hooks': [{
+ 'id': 'foo',
+ 'args': ['i have spaces', 'and"\'quotes', '$and !this'],
+ }],
+ },
+ )
+
+
+def test_python_hook_weird_setup_cfg(in_git_dir, tempdir_factory, store):
+ in_git_dir.join('setup.cfg').write('[install]\ninstall_scripts=/usr/sbin')
+
+ _test_hook_repo(
+ tempdir_factory, store, 'python_hooks_repo',
+ 'foo', [os.devnull],
+ f'[{os.devnull!r}]\nHello World\n'.encode(),
+ )
+
+
+@xfailif_no_venv
+def test_python_venv(tempdir_factory, store): # pragma: no cover (no venv)
+ _test_hook_repo(
+ tempdir_factory, store, 'python_venv_hooks_repo',
+ 'foo', [os.devnull],
+ f'[{os.devnull!r}]\nHello World\n'.encode(),
+ )
+
+
+def test_switch_language_versions_doesnt_clobber(tempdir_factory, store):
+ # We're using the python3 repo because it prints the python version
+ path = make_repo(tempdir_factory, 'python3_hooks_repo')
+
+ def run_on_version(version, expected_output):
+ config = make_config_from_repo(path)
+ config['hooks'][0]['language_version'] = version
+ hook = _get_hook(config, store, 'python3-hook')
+ ret, out = _hook_run(hook, [], color=False)
+ assert ret == 0
+ assert _norm_out(out) == expected_output
+
+ run_on_version('python2', b'2\n[]\nHello World\n')
+ run_on_version('python3', b'3\n[]\nHello World\n')
+
+
+def test_versioned_python_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'python3_hooks_repo',
+ 'python3-hook',
+ [os.devnull],
+ f'3\n[{os.devnull!r}]\nHello World\n'.encode(),
+ )
+
+
+@skipif_cant_run_docker # pragma: win32 no cover
+def test_run_a_docker_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'docker_hooks_repo',
+ 'docker-hook',
+ ['Hello World from docker'], b'Hello World from docker\n',
+ )
+
+
+@skipif_cant_run_docker # pragma: win32 no cover
+def test_run_a_docker_hook_with_entry_args(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'docker_hooks_repo',
+ 'docker-hook-arg',
+ ['Hello World from docker'], b'Hello World from docker',
+ )
+
+
+@skipif_cant_run_docker # pragma: win32 no cover
+def test_run_a_failing_docker_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'docker_hooks_repo',
+ 'docker-hook-failing',
+ ['Hello World from docker'],
+ mock.ANY, # an error message about `bork` not existing
+ expected_return_code=127,
+ )
+
+
+@skipif_cant_run_docker # pragma: win32 no cover
+@pytest.mark.parametrize('hook_id', ('echo-entrypoint', 'echo-cmd'))
+def test_run_a_docker_image_hook(tempdir_factory, store, hook_id):
+ _test_hook_repo(
+ tempdir_factory, store, 'docker_image_hooks_repo',
+ hook_id,
+ ['Hello World from docker'], b'Hello World from docker\n',
+ )
+
+
+def test_run_a_node_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'node_hooks_repo',
+ 'foo', [os.devnull], b'Hello World\n',
+ )
+
+
+def test_run_versioned_node_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'node_versioned_hooks_repo',
+ 'versioned-node-hook', [os.devnull], b'v9.3.0\nHello World\n',
+ )
+
+
+@xfailif_windows_no_ruby
+def test_run_a_ruby_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'ruby_hooks_repo',
+ 'ruby_hook', [os.devnull], b'Hello world from a ruby hook\n',
+ )
+
+
+@xfailif_windows_no_ruby
+def test_run_versioned_ruby_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'ruby_versioned_hooks_repo',
+ 'ruby_hook',
+ [os.devnull],
+ b'2.5.1\nHello world from a ruby hook\n',
+ )
+
+
+@xfailif_windows_no_ruby
+def test_run_ruby_hook_with_disable_shared_gems(
+ tempdir_factory,
+ store,
+ tmpdir,
+):
+ """Make sure a Gemfile in the project doesn't interfere."""
+ tmpdir.join('Gemfile').write('gem "lol_hai"')
+ tmpdir.join('.bundle').mkdir()
+ tmpdir.join('.bundle', 'config').write(
+ 'BUNDLE_DISABLE_SHARED_GEMS: true\n'
+ 'BUNDLE_PATH: vendor/gem\n',
+ )
+ with cwd(tmpdir.strpath):
+ _test_hook_repo(
+ tempdir_factory, store, 'ruby_versioned_hooks_repo',
+ 'ruby_hook',
+ [os.devnull],
+ b'2.5.1\nHello world from a ruby hook\n',
+ )
+
+
+def test_system_hook_with_spaces(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'system_hook_with_spaces_repo',
+ 'system-hook-with-spaces', [os.devnull], b'Hello World\n',
+ )
+
+
+@skipif_cant_run_swift # pragma: win32 no cover
+def test_swift_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'swift_hooks_repo',
+ 'swift-hooks-repo', [], b'Hello, world!\n',
+ )
+
+
+def test_golang_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'golang_hooks_repo',
+ 'golang-hook', [], b'hello world\n',
+ )
+
+
+def test_golang_hook_still_works_when_gobin_is_set(tempdir_factory, store):
+ gobin_dir = tempdir_factory.get()
+ with envcontext((('GOBIN', gobin_dir),)):
+ test_golang_hook(tempdir_factory, store)
+ assert os.listdir(gobin_dir) == []
+
+
+def test_rust_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'rust_hooks_repo',
+ 'rust-hook', [], b'hello world\n',
+ )
+
+
+@pytest.mark.parametrize('dep', ('cli:shellharden:3.1.0', 'cli:shellharden'))
+def test_additional_rust_cli_dependencies_installed(
+ tempdir_factory, store, dep,
+):
+ path = make_repo(tempdir_factory, 'rust_hooks_repo')
+ config = make_config_from_repo(path)
+ # A small rust package with no dependencies.
+ config['hooks'][0]['additional_dependencies'] = [dep]
+ hook = _get_hook(config, store, 'rust-hook')
+ binaries = os.listdir(
+ hook.prefix.path(
+ helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
+ ),
+ )
+ # normalize for windows
+ binaries = [os.path.splitext(binary)[0] for binary in binaries]
+ assert 'shellharden' in binaries
+
+
+def test_additional_rust_lib_dependencies_installed(
+ tempdir_factory, store,
+):
+ path = make_repo(tempdir_factory, 'rust_hooks_repo')
+ config = make_config_from_repo(path)
+ # A small rust package with no dependencies.
+ deps = ['shellharden:3.1.0']
+ config['hooks'][0]['additional_dependencies'] = deps
+ hook = _get_hook(config, store, 'rust-hook')
+ binaries = os.listdir(
+ hook.prefix.path(
+ helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
+ ),
+ )
+ # normalize for windows
+ binaries = [os.path.splitext(binary)[0] for binary in binaries]
+ assert 'rust-hello-world' in binaries
+ assert 'shellharden' not in binaries
+
+
+def test_missing_executable(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'not_found_exe',
+ 'not-found-exe', [os.devnull],
+ b'Executable `i-dont-exist-lol` not found',
+ expected_return_code=1,
+ )
+
+
+def test_run_a_script_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'script_hooks_repo',
+ 'bash_hook', ['bar'], b'bar\nHello World\n',
+ )
+
+
+def test_run_hook_with_spaced_args(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'arg_per_line_hooks_repo',
+ 'arg-per-line',
+ ['foo bar', 'baz'],
+ b'arg: hello\narg: world\narg: foo bar\narg: baz\n',
+ )
+
+
+def test_run_hook_with_curly_braced_arguments(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'arg_per_line_hooks_repo',
+ 'arg-per-line',
+ [],
+ b"arg: hi {1}\narg: I'm {a} problem\n",
+ config_kwargs={
+ 'hooks': [{
+ 'id': 'arg-per-line',
+ 'args': ['hi {1}', "I'm {a} problem"],
+ }],
+ },
+ )
+
+
+def test_intermixed_stdout_stderr(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'stdout_stderr_repo',
+ 'stdout-stderr',
+ [],
+ b'0\n1\n2\n3\n4\n5\n',
+ )
+
+
+@pytest.mark.xfail(os.name == 'nt', reason='ptys are posix-only')
+def test_output_isatty(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'stdout_stderr_repo',
+ 'tty-check',
+ [],
+ b'stdin: False\nstdout: True\nstderr: True\n',
+ color=True,
+ )
+
+
+def _make_grep_repo(entry, store, args=()):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'grep-hook',
+ 'name': 'grep-hook',
+ 'language': 'pygrep',
+ 'entry': entry,
+ 'args': args,
+ 'types': ['text'],
+ }],
+ }
+ return _get_hook(config, store, 'grep-hook')
+
+
+@pytest.fixture
+def greppable_files(tmpdir):
+ with tmpdir.as_cwd():
+ cmd_output_b('git', 'init', '.')
+ tmpdir.join('f1').write_binary(b"hello'hi\nworld\n")
+ tmpdir.join('f2').write_binary(b'foo\nbar\nbaz\n')
+ tmpdir.join('f3').write_binary(b'[WARN] hi\n')
+ yield tmpdir
+
+
+def test_grep_hook_matching(greppable_files, store):
+ hook = _make_grep_repo('ello', store)
+ ret, out = _hook_run(hook, ('f1', 'f2', 'f3'), color=False)
+ assert ret == 1
+ assert _norm_out(out) == b"f1:1:hello'hi\n"
+
+
+def test_grep_hook_case_insensitive(greppable_files, store):
+ hook = _make_grep_repo('ELLO', store, args=['-i'])
+ ret, out = _hook_run(hook, ('f1', 'f2', 'f3'), color=False)
+ assert ret == 1
+ assert _norm_out(out) == b"f1:1:hello'hi\n"
+
+
+@pytest.mark.parametrize('regex', ('nope', "foo'bar", r'^\[INFO\]'))
+def test_grep_hook_not_matching(regex, greppable_files, store):
+ hook = _make_grep_repo(regex, store)
+ ret, out = _hook_run(hook, ('f1', 'f2', 'f3'), color=False)
+ assert (ret, out) == (0, b'')
+
+
+def _norm_pwd(path):
+ # Under windows bash's temp and windows temp is different.
+ # This normalizes to the bash /tmp
+ return cmd_output_b(
+ 'bash', '-c', f"cd '{path}' && pwd",
+ )[1].strip()
+
+
+def test_cwd_of_hook(in_git_dir, tempdir_factory, store):
+ # Note: this doubles as a test for `system` hooks
+ _test_hook_repo(
+ tempdir_factory, store, 'prints_cwd_repo',
+ 'prints_cwd', ['-L'], _norm_pwd(in_git_dir.strpath) + b'\n',
+ )
+
+
+def test_lots_of_files(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'script_hooks_repo',
+ 'bash_hook', [os.devnull] * 15000, mock.ANY,
+ )
+
+
+def test_additional_dependencies_roll_forward(tempdir_factory, store):
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+
+ config1 = make_config_from_repo(path)
+ hook1 = _get_hook(config1, store, 'foo')
+ with python.in_env(hook1.prefix, hook1.language_version):
+ assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1]
+
+ # Make another repo with additional dependencies
+ config2 = make_config_from_repo(path)
+ config2['hooks'][0]['additional_dependencies'] = ['mccabe']
+ hook2 = _get_hook(config2, store, 'foo')
+ with python.in_env(hook2.prefix, hook2.language_version):
+ assert 'mccabe' in cmd_output('pip', 'freeze', '-l')[1]
+
+ # should not have affected original
+ with python.in_env(hook1.prefix, hook1.language_version):
+ assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1]
+
+
+@xfailif_windows_no_ruby # pragma: win32 no cover
+def test_additional_ruby_dependencies_installed(tempdir_factory, store):
+ path = make_repo(tempdir_factory, 'ruby_hooks_repo')
+ config = make_config_from_repo(path)
+ config['hooks'][0]['additional_dependencies'] = ['tins']
+ hook = _get_hook(config, store, 'ruby_hook')
+ with ruby.in_env(hook.prefix, hook.language_version):
+ output = cmd_output('gem', 'list', '--local')[1]
+ assert 'tins' in output
+
+
+def test_additional_node_dependencies_installed(tempdir_factory, store):
+ path = make_repo(tempdir_factory, 'node_hooks_repo')
+ config = make_config_from_repo(path)
+ # Careful to choose a small package that's not depped by npm
+ config['hooks'][0]['additional_dependencies'] = ['lodash']
+ hook = _get_hook(config, store, 'foo')
+ with node.in_env(hook.prefix, hook.language_version):
+ output = cmd_output('npm', 'ls', '-g')[1]
+ assert 'lodash' in output
+
+
+def test_additional_golang_dependencies_installed(
+ tempdir_factory, store,
+):
+ path = make_repo(tempdir_factory, 'golang_hooks_repo')
+ config = make_config_from_repo(path)
+ # A small go package
+ deps = ['github.com/golang/example/hello']
+ config['hooks'][0]['additional_dependencies'] = deps
+ hook = _get_hook(config, store, 'golang-hook')
+ binaries = os.listdir(
+ hook.prefix.path(
+ helpers.environment_dir(golang.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
+ ),
+ )
+ # normalize for windows
+ binaries = [os.path.splitext(binary)[0] for binary in binaries]
+ assert 'hello' in binaries
+
+
+def test_local_golang_additional_dependencies(store):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'hello',
+ 'name': 'hello',
+ 'entry': 'hello',
+ 'language': 'golang',
+ 'additional_dependencies': ['github.com/golang/example/hello'],
+ }],
+ }
+ hook = _get_hook(config, store, 'hello')
+ ret, out = _hook_run(hook, (), color=False)
+ assert ret == 0
+ assert _norm_out(out) == b'Hello, Go examples!\n'
+
+
+def test_local_rust_additional_dependencies(store):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'hello',
+ 'name': 'hello',
+ 'entry': 'hello',
+ 'language': 'rust',
+ 'additional_dependencies': ['cli:hello-cli:0.2.2'],
+ }],
+ }
+ hook = _get_hook(config, store, 'hello')
+ ret, out = _hook_run(hook, (), color=False)
+ assert ret == 0
+ assert _norm_out(out) == b'Hello World!\n'
+
+
+def test_fail_hooks(store):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'fail',
+ 'name': 'fail',
+ 'language': 'fail',
+ 'entry': 'make sure to name changelogs as .rst!',
+ 'files': r'changelog/.*(?<!\.rst)$',
+ }],
+ }
+ hook = _get_hook(config, store, 'fail')
+ ret, out = _hook_run(
+ hook, ('changelog/123.bugfix', 'changelog/wat'), color=False,
+ )
+ assert ret == 1
+ assert out == (
+ b'make sure to name changelogs as .rst!\n'
+ b'\n'
+ b'changelog/123.bugfix\n'
+ b'changelog/wat\n'
+ )
+
+
+def test_unknown_keys(store, fake_log_handler):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'too-much',
+ 'name': 'too much',
+ 'hello': 'world',
+ 'foo': 'bar',
+ 'language': 'system',
+ 'entry': 'true',
+ }],
+ }
+ _get_hook(config, store, 'too-much')
+ expected = 'Unexpected key(s) present on local => too-much: foo, hello'
+ assert fake_log_handler.handle.call_args[0][0].msg == expected
+
+
+def test_reinstall(tempdir_factory, store, log_info_mock):
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ config = make_config_from_repo(path)
+ _get_hook(config, store, 'foo')
+ # We print some logging during clone (1) + install (3)
+ assert log_info_mock.call_count == 4
+ log_info_mock.reset_mock()
+ # Reinstall on another run should not trigger another install
+ _get_hook(config, store, 'foo')
+ assert log_info_mock.call_count == 0
+
+
+def test_control_c_control_c_on_install(tempdir_factory, store):
+ """Regression test for #186."""
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ config = make_config_from_repo(path)
+ hooks = [_get_hook_no_install(config, store, 'foo')]
+
+ class MyKeyboardInterrupt(KeyboardInterrupt):
+ pass
+
+ # To simulate a killed install, we'll make PythonEnv.run raise ^C
+ # and then to simulate a second ^C during cleanup, we'll make shutil.rmtree
+ # raise as well.
+ with pytest.raises(MyKeyboardInterrupt):
+ with mock.patch.object(
+ helpers, 'run_setup_cmd', side_effect=MyKeyboardInterrupt,
+ ):
+ with mock.patch.object(
+ shutil, 'rmtree', side_effect=MyKeyboardInterrupt,
+ ):
+ install_hook_envs(hooks, store)
+
+ # Should have made an environment, however this environment is broken!
+ hook, = hooks
+ assert hook.prefix.exists(
+ helpers.environment_dir(python.ENVIRONMENT_DIR, hook.language_version),
+ )
+
+ # However, it should be perfectly runnable (reinstall after botched
+ # install)
+ install_hook_envs(hooks, store)
+ ret, out = _hook_run(hook, (), color=False)
+ assert ret == 0
+
+
+def test_invalidated_virtualenv(tempdir_factory, store):
+ # A cached virtualenv may become invalidated if the system python upgrades
+ # This should not cause every hook in that virtualenv to fail.
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ config = make_config_from_repo(path)
+ hook = _get_hook(config, store, 'foo')
+
+ # Simulate breaking of the virtualenv
+ libdir = hook.prefix.path(
+ helpers.environment_dir(python.ENVIRONMENT_DIR, hook.language_version),
+ 'lib', hook.language_version,
+ )
+ paths = [
+ os.path.join(libdir, p) for p in ('site.py', 'site.pyc', '__pycache__')
+ ]
+ cmd_output_b('rm', '-rf', *paths)
+
+ # pre-commit should rebuild the virtualenv and it should be runnable
+ hook = _get_hook(config, store, 'foo')
+ ret, out = _hook_run(hook, (), color=False)
+ assert ret == 0
+
+
+def test_really_long_file_paths(tempdir_factory, store):
+ base_path = tempdir_factory.get()
+ really_long_path = os.path.join(base_path, 'really_long' * 10)
+ cmd_output_b('git', 'init', really_long_path)
+
+ path = make_repo(tempdir_factory, 'python_hooks_repo')
+ config = make_config_from_repo(path)
+
+ with cwd(really_long_path):
+ _get_hook(config, store, 'foo')
+
+
+def test_config_overrides_repo_specifics(tempdir_factory, store):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ config = make_config_from_repo(path)
+
+ hook = _get_hook(config, store, 'bash_hook')
+ assert hook.files == ''
+ # Set the file regex to something else
+ config['hooks'][0]['files'] = '\\.sh$'
+ hook = _get_hook(config, store, 'bash_hook')
+ assert hook.files == '\\.sh$'
+
+
+def _create_repo_with_tags(tempdir_factory, src, tag):
+ path = make_repo(tempdir_factory, src)
+ cmd_output_b('git', 'tag', tag, cwd=path)
+ return path
+
+
+def test_tags_on_repositories(in_tmpdir, tempdir_factory, store):
+ tag = 'v1.1'
+ git1 = _create_repo_with_tags(tempdir_factory, 'prints_cwd_repo', tag)
+ git2 = _create_repo_with_tags(tempdir_factory, 'script_hooks_repo', tag)
+
+ config1 = make_config_from_repo(git1, rev=tag)
+ hook1 = _get_hook(config1, store, 'prints_cwd')
+ ret1, out1 = _hook_run(hook1, ('-L',), color=False)
+ assert ret1 == 0
+ assert out1.strip() == _norm_pwd(in_tmpdir)
+
+ config2 = make_config_from_repo(git2, rev=tag)
+ hook2 = _get_hook(config2, store, 'bash_hook')
+ ret2, out2 = _hook_run(hook2, ('bar',), color=False)
+ assert ret2 == 0
+ assert out2 == b'bar\nHello World\n'
+
+
+@pytest.fixture
+def local_python_config():
+ # Make a "local" hooks repo that just installs our other hooks repo
+ repo_path = get_resource_path('python_hooks_repo')
+ manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
+ hooks = [
+ dict(hook, additional_dependencies=[repo_path]) for hook in manifest
+ ]
+ return {'repo': 'local', 'hooks': hooks}
+
+
+@pytest.mark.xfail( # pragma: win32 no cover
+ sys.platform == 'win32',
+ reason='microsoft/azure-pipelines-image-generation#989',
+)
+def test_local_python_repo(store, local_python_config):
+ hook = _get_hook(local_python_config, store, 'foo')
+ # language_version should have been adjusted to the interpreter version
+ assert hook.language_version != C.DEFAULT
+ ret, out = _hook_run(hook, ('filename',), color=False)
+ assert ret == 0
+ assert _norm_out(out) == b"['filename']\nHello World\n"
+
+
+def test_default_language_version(store, local_python_config):
+ config: Dict[str, Any] = {
+ 'default_language_version': {'python': 'fake'},
+ 'default_stages': ['commit'],
+ 'repos': [local_python_config],
+ }
+
+ # `language_version` was not set, should default
+ hook, = all_hooks(config, store)
+ assert hook.language_version == 'fake'
+
+ # `language_version` is set, should not default
+ config['repos'][0]['hooks'][0]['language_version'] = 'fake2'
+ hook, = all_hooks(config, store)
+ assert hook.language_version == 'fake2'
+
+
+def test_default_stages(store, local_python_config):
+ config: Dict[str, Any] = {
+ 'default_language_version': {'python': C.DEFAULT},
+ 'default_stages': ['commit'],
+ 'repos': [local_python_config],
+ }
+
+ # `stages` was not set, should default
+ hook, = all_hooks(config, store)
+ assert hook.stages == ['commit']
+
+ # `stages` is set, should not default
+ config['repos'][0]['hooks'][0]['stages'] = ['push']
+ hook, = all_hooks(config, store)
+ assert hook.stages == ['push']
+
+
+def test_hook_id_not_present(tempdir_factory, store, fake_log_handler):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ config = make_config_from_repo(path)
+ config['hooks'][0]['id'] = 'i-dont-exist'
+ with pytest.raises(SystemExit):
+ _get_hook(config, store, 'i-dont-exist')
+ assert fake_log_handler.handle.call_args[0][0].msg == (
+ f'`i-dont-exist` is not present in repository file://{path}. '
+ f'Typo? Perhaps it is introduced in a newer version? '
+ f'Often `pre-commit autoupdate` fixes this.'
+ )
+
+
+def test_too_new_version(tempdir_factory, store, fake_log_handler):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ with modify_manifest(path) as manifest:
+ manifest[0]['minimum_pre_commit_version'] = '999.0.0'
+ config = make_config_from_repo(path)
+ with pytest.raises(SystemExit):
+ _get_hook(config, store, 'bash_hook')
+ msg = fake_log_handler.handle.call_args[0][0].msg
+ assert re.match(
+ r'^The hook `bash_hook` requires pre-commit version 999\.0\.0 but '
+ r'version \d+\.\d+\.\d+ is installed. '
+ r'Perhaps run `pip install --upgrade pre-commit`\.$',
+ msg,
+ )
+
+
+@pytest.mark.parametrize('version', ('0.1.0', C.VERSION))
+def test_versions_ok(tempdir_factory, store, version):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ with modify_manifest(path) as manifest:
+ manifest[0]['minimum_pre_commit_version'] = version
+ config = make_config_from_repo(path)
+ # Should succeed
+ _get_hook(config, store, 'bash_hook')
+
+
+def test_manifest_hooks(tempdir_factory, store):
+ path = make_repo(tempdir_factory, 'script_hooks_repo')
+ config = make_config_from_repo(path)
+ hook = _get_hook(config, store, 'bash_hook')
+
+ assert hook == Hook(
+ src=f'file://{path}',
+ prefix=Prefix(mock.ANY),
+ additional_dependencies=[],
+ alias='',
+ always_run=False,
+ args=[],
+ description='',
+ entry='bin/hook.sh',
+ exclude='^$',
+ exclude_types=[],
+ files='',
+ id='bash_hook',
+ language='script',
+ language_version='default',
+ log_file='',
+ minimum_pre_commit_version='0',
+ name='Bash hook',
+ pass_filenames=True,
+ require_serial=False,
+ stages=(
+ 'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg',
+ 'manual', 'post-checkout', 'push',
+ ),
+ types=['file'],
+ verbose=False,
+ )
+
+
+def test_perl_hook(tempdir_factory, store):
+ _test_hook_repo(
+ tempdir_factory, store, 'perl_hooks_repo',
+ 'perl-hook', [], b'Hello from perl-commit Perl!\n',
+ )
+
+
+def test_local_perl_additional_dependencies(store):
+ config = {
+ 'repo': 'local',
+ 'hooks': [{
+ 'id': 'hello',
+ 'name': 'hello',
+ 'entry': 'perltidy --version',
+ 'language': 'perl',
+ 'additional_dependencies': ['SHANCOCK/Perl-Tidy-20200110.tar.gz'],
+ }],
+ }
+ hook = _get_hook(config, store, 'hello')
+ ret, out = _hook_run(hook, (), color=False)
+ assert ret == 0
+ assert _norm_out(out).startswith(b'This is perltidy, v20200110')
diff --git a/tests/staged_files_only_test.py b/tests/staged_files_only_test.py
new file mode 100644
index 0000000..ddb9574
--- /dev/null
+++ b/tests/staged_files_only_test.py
@@ -0,0 +1,349 @@
+import itertools
+import os.path
+import shutil
+
+import pytest
+
+from pre_commit import git
+from pre_commit.staged_files_only import staged_files_only
+from pre_commit.util import cmd_output
+from testing.auto_namedtuple import auto_namedtuple
+from testing.fixtures import git_dir
+from testing.util import cwd
+from testing.util import get_resource_path
+from testing.util import git_commit
+
+
+FOO_CONTENTS = '\n'.join(('1', '2', '3', '4', '5', '6', '7', '8', ''))
+
+
+@pytest.fixture
+def patch_dir(tempdir_factory):
+ return tempdir_factory.get()
+
+
+def get_short_git_status():
+ git_status = cmd_output('git', 'status', '-s')[1]
+ line_parts = [line.split() for line in git_status.splitlines()]
+ return {v: k for k, v in line_parts}
+
+
+@pytest.fixture
+def foo_staged(in_git_dir):
+ foo = in_git_dir.join('foo')
+ foo.write(FOO_CONTENTS)
+ cmd_output('git', 'add', 'foo')
+ yield auto_namedtuple(path=in_git_dir.strpath, foo_filename=foo.strpath)
+
+
+def _test_foo_state(
+ path,
+ foo_contents=FOO_CONTENTS,
+ status='A',
+ encoding='UTF-8',
+):
+ assert os.path.exists(path.foo_filename)
+ with open(path.foo_filename, encoding=encoding) as f:
+ assert f.read() == foo_contents
+ actual_status = get_short_git_status()['foo']
+ assert status == actual_status
+
+
+def test_foo_staged(foo_staged):
+ _test_foo_state(foo_staged)
+
+
+def test_foo_nothing_unstaged(foo_staged, patch_dir):
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+ _test_foo_state(foo_staged)
+
+
+def test_foo_something_unstaged(foo_staged, patch_dir):
+ with open(foo_staged.foo_filename, 'w') as foo_file:
+ foo_file.write('herp\nderp\n')
+
+ _test_foo_state(foo_staged, 'herp\nderp\n', 'AM')
+
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+
+ _test_foo_state(foo_staged, 'herp\nderp\n', 'AM')
+
+
+def test_does_not_crash_patch_dir_does_not_exist(foo_staged, patch_dir):
+ with open(foo_staged.foo_filename, 'w') as foo_file:
+ foo_file.write('hello\nworld\n')
+
+ shutil.rmtree(patch_dir)
+ with staged_files_only(patch_dir):
+ pass
+
+
+def test_something_unstaged_ext_diff_tool(foo_staged, patch_dir, tmpdir):
+ diff_tool = tmpdir.join('diff-tool.sh')
+ diff_tool.write('#!/usr/bin/env bash\necho "$@"\n')
+ cmd_output('git', 'config', 'diff.external', diff_tool.strpath)
+ test_foo_something_unstaged(foo_staged, patch_dir)
+
+
+def test_foo_something_unstaged_diff_color_always(foo_staged, patch_dir):
+ cmd_output('git', 'config', '--local', 'color.diff', 'always')
+ test_foo_something_unstaged(foo_staged, patch_dir)
+
+
+def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
+ with open(foo_staged.foo_filename, 'w') as foo_file:
+ foo_file.write(f'{FOO_CONTENTS}9\n')
+
+ _test_foo_state(foo_staged, f'{FOO_CONTENTS}9\n', 'AM')
+
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+
+ # Modify the file as part of the "pre-commit"
+ with open(foo_staged.foo_filename, 'w') as foo_file:
+ foo_file.write(FOO_CONTENTS.replace('1', 'a'))
+
+ _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
+
+ _test_foo_state(foo_staged, f'{FOO_CONTENTS.replace("1", "a")}9\n', 'AM')
+
+
+def test_foo_both_modify_conflicting(foo_staged, patch_dir):
+ with open(foo_staged.foo_filename, 'w') as foo_file:
+ foo_file.write(FOO_CONTENTS.replace('1', 'a'))
+
+ _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
+
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+
+ # Modify in the same place as the stashed diff
+ with open(foo_staged.foo_filename, 'w') as foo_file:
+ foo_file.write(FOO_CONTENTS.replace('1', 'b'))
+
+ _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'b'), 'AM')
+
+ _test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
+
+
+@pytest.fixture
+def img_staged(in_git_dir):
+ img = in_git_dir.join('img.jpg')
+ shutil.copy(get_resource_path('img1.jpg'), img.strpath)
+ cmd_output('git', 'add', 'img.jpg')
+ yield auto_namedtuple(path=in_git_dir.strpath, img_filename=img.strpath)
+
+
+def _test_img_state(path, expected_file='img1.jpg', status='A'):
+ assert os.path.exists(path.img_filename)
+ with open(path.img_filename, 'rb') as f1:
+ with open(get_resource_path(expected_file), 'rb') as f2:
+ assert f1.read() == f2.read()
+ actual_status = get_short_git_status()['img.jpg']
+ assert status == actual_status
+
+
+def test_img_staged(img_staged):
+ _test_img_state(img_staged)
+
+
+def test_img_nothing_unstaged(img_staged, patch_dir):
+ with staged_files_only(patch_dir):
+ _test_img_state(img_staged)
+ _test_img_state(img_staged)
+
+
+def test_img_something_unstaged(img_staged, patch_dir):
+ shutil.copy(get_resource_path('img2.jpg'), img_staged.img_filename)
+
+ _test_img_state(img_staged, 'img2.jpg', 'AM')
+
+ with staged_files_only(patch_dir):
+ _test_img_state(img_staged)
+
+ _test_img_state(img_staged, 'img2.jpg', 'AM')
+
+
+def test_img_conflict(img_staged, patch_dir):
+ """Admittedly, this shouldn't happen, but just in case."""
+ shutil.copy(get_resource_path('img2.jpg'), img_staged.img_filename)
+
+ _test_img_state(img_staged, 'img2.jpg', 'AM')
+
+ with staged_files_only(patch_dir):
+ _test_img_state(img_staged)
+ shutil.copy(get_resource_path('img3.jpg'), img_staged.img_filename)
+ _test_img_state(img_staged, 'img3.jpg', 'AM')
+
+ _test_img_state(img_staged, 'img2.jpg', 'AM')
+
+
+@pytest.fixture
+def submodule_with_commits(tempdir_factory):
+ path = git_dir(tempdir_factory)
+ with cwd(path):
+ git_commit()
+ rev1 = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
+ git_commit()
+ rev2 = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
+ yield auto_namedtuple(path=path, rev1=rev1, rev2=rev2)
+
+
+def checkout_submodule(rev):
+ cmd_output('git', 'checkout', rev, cwd='sub')
+
+
+@pytest.fixture
+def sub_staged(submodule_with_commits, tempdir_factory):
+ path = git_dir(tempdir_factory)
+ with cwd(path):
+ cmd_output(
+ 'git', 'submodule', 'add', submodule_with_commits.path, 'sub',
+ )
+ checkout_submodule(submodule_with_commits.rev1)
+ cmd_output('git', 'add', 'sub')
+ yield auto_namedtuple(
+ path=path,
+ sub_path=os.path.join(path, 'sub'),
+ submodule=submodule_with_commits,
+ )
+
+
+def _test_sub_state(path, rev='rev1', status='A'):
+ assert os.path.exists(path.sub_path)
+ with cwd(path.sub_path):
+ actual_rev = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
+ assert actual_rev == getattr(path.submodule, rev)
+ actual_status = get_short_git_status()['sub']
+ assert actual_status == status
+
+
+def test_sub_staged(sub_staged):
+ _test_sub_state(sub_staged)
+
+
+def test_sub_nothing_unstaged(sub_staged, patch_dir):
+ with staged_files_only(patch_dir):
+ _test_sub_state(sub_staged)
+ _test_sub_state(sub_staged)
+
+
+def test_sub_something_unstaged(sub_staged, patch_dir):
+ checkout_submodule(sub_staged.submodule.rev2)
+
+ _test_sub_state(sub_staged, 'rev2', 'AM')
+
+ with staged_files_only(patch_dir):
+ # This is different from others, we don't want to touch subs
+ _test_sub_state(sub_staged, 'rev2', 'AM')
+
+ _test_sub_state(sub_staged, 'rev2', 'AM')
+
+
+def test_stage_utf8_changes(foo_staged, patch_dir):
+ contents = '\u2603'
+ with open('foo', 'w', encoding='UTF-8') as foo_file:
+ foo_file.write(contents)
+
+ _test_foo_state(foo_staged, contents, 'AM')
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+ _test_foo_state(foo_staged, contents, 'AM')
+
+
+def test_stage_non_utf8_changes(foo_staged, patch_dir):
+ contents = 'ΓΊ'
+ # Produce a latin-1 diff
+ with open('foo', 'w', encoding='latin-1') as foo_file:
+ foo_file.write(contents)
+
+ _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+ _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
+
+
+def test_non_utf8_conflicting_diff(foo_staged, patch_dir):
+ """Regression test for #397"""
+ # The trailing whitespace is important here, this triggers git to produce
+ # an error message which looks like:
+ #
+ # ...patch1471530032:14: trailing whitespace.
+ # [[unprintable character]][[space character]]
+ # error: patch failed: foo:1
+ # error: foo: patch does not apply
+ #
+ # Previously, the error message (though discarded immediately) was being
+ # decoded with the UTF-8 codec (causing a crash)
+ contents = 'ΓΊ \n'
+ with open('foo', 'w', encoding='latin-1') as foo_file:
+ foo_file.write(contents)
+
+ _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
+ with staged_files_only(patch_dir):
+ _test_foo_state(foo_staged)
+ # Create a conflicting diff that will need to be rolled back
+ with open('foo', 'w') as foo_file:
+ foo_file.write('')
+ _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
+
+
+def _write(b):
+ with open('foo', 'wb') as f:
+ f.write(b)
+
+
+def assert_no_diff():
+ tree = cmd_output('git', 'write-tree')[1].strip()
+ cmd_output('git', 'diff-index', tree, '--exit-code')
+
+
+bool_product = tuple(itertools.product((True, False), repeat=2))
+
+
+@pytest.mark.parametrize(('crlf_before', 'crlf_after'), bool_product)
+@pytest.mark.parametrize('autocrlf', ('true', 'false', 'input'))
+def test_crlf(in_git_dir, patch_dir, crlf_before, crlf_after, autocrlf):
+ cmd_output('git', 'config', '--local', 'core.autocrlf', autocrlf)
+
+ before, after = b'1\n2\n', b'3\n4\n\n'
+ before = before.replace(b'\n', b'\r\n') if crlf_before else before
+ after = after.replace(b'\n', b'\r\n') if crlf_after else after
+
+ _write(before)
+ cmd_output('git', 'add', 'foo')
+ _write(after)
+ with staged_files_only(patch_dir):
+ assert_no_diff()
+
+
+def test_whitespace_errors(in_git_dir, patch_dir):
+ cmd_output('git', 'config', '--local', 'apply.whitespace', 'error')
+ test_crlf(in_git_dir, patch_dir, True, True, 'true')
+
+
+def test_autocrlf_committed_crlf(in_git_dir, patch_dir):
+ """Regression test for #570"""
+ cmd_output('git', 'config', '--local', 'core.autocrlf', 'false')
+ _write(b'1\r\n2\r\n')
+ cmd_output('git', 'add', 'foo')
+ git_commit()
+
+ cmd_output('git', 'config', '--local', 'core.autocrlf', 'true')
+ _write(b'1\r\n2\r\n\r\n\r\n\r\n')
+
+ with staged_files_only(patch_dir):
+ assert_no_diff()
+
+
+def test_intent_to_add(in_git_dir, patch_dir):
+ """Regression test for #881"""
+ _write(b'hello\nworld\n')
+ cmd_output('git', 'add', '--intent-to-add', 'foo')
+
+ assert git.intent_to_add_files() == ['foo']
+ with staged_files_only(patch_dir):
+ assert_no_diff()
+ assert git.intent_to_add_files() == ['foo']
diff --git a/tests/store_test.py b/tests/store_test.py
new file mode 100644
index 0000000..5866616
--- /dev/null
+++ b/tests/store_test.py
@@ -0,0 +1,216 @@
+import os.path
+import sqlite3
+from unittest import mock
+
+import pytest
+
+from pre_commit import git
+from pre_commit.store import _get_default_directory
+from pre_commit.store import Store
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+from testing.fixtures import git_dir
+from testing.util import cwd
+from testing.util import git_commit
+
+
+def test_our_session_fixture_works():
+ """There's a session fixture which makes `Store` invariantly raise to
+ prevent writing to the home directory.
+ """
+ with pytest.raises(AssertionError):
+ Store()
+
+
+def test_get_default_directory_defaults_to_home():
+ # Not we use the module level one which is not mocked
+ ret = _get_default_directory()
+ assert ret == os.path.join(os.path.expanduser('~/.cache'), 'pre-commit')
+
+
+def test_adheres_to_xdg_specification():
+ with mock.patch.dict(
+ os.environ, {'XDG_CACHE_HOME': '/tmp/fakehome'},
+ ):
+ ret = _get_default_directory()
+ assert ret == os.path.join('/tmp/fakehome', 'pre-commit')
+
+
+def test_uses_environment_variable_when_present():
+ with mock.patch.dict(
+ os.environ, {'PRE_COMMIT_HOME': '/tmp/pre_commit_home'},
+ ):
+ ret = _get_default_directory()
+ assert ret == '/tmp/pre_commit_home'
+
+
+def test_store_init(store):
+ # Should create the store directory
+ assert os.path.exists(store.directory)
+ # Should create a README file indicating what the directory is about
+ with open(os.path.join(store.directory, 'README')) as readme_file:
+ readme_contents = readme_file.read()
+ for text_line in (
+ 'This directory is maintained by the pre-commit project.',
+ 'Learn more: https://github.com/pre-commit/pre-commit',
+ ):
+ assert text_line in readme_contents
+
+
+def test_clone(store, tempdir_factory, log_info_mock):
+ path = git_dir(tempdir_factory)
+ with cwd(path):
+ git_commit()
+ rev = git.head_rev(path)
+ git_commit()
+
+ ret = store.clone(path, rev)
+ # Should have printed some stuff
+ assert log_info_mock.call_args_list[0][0][0].startswith(
+ 'Initializing environment for ',
+ )
+
+ # Should return a directory inside of the store
+ assert os.path.exists(ret)
+ assert ret.startswith(store.directory)
+ # Directory should start with `repo`
+ _, dirname = os.path.split(ret)
+ assert dirname.startswith('repo')
+ # Should be checked out to the rev we specified
+ assert git.head_rev(ret) == rev
+
+ # Assert there's an entry in the sqlite db for this
+ assert store.select_all_repos() == [(path, rev, ret)]
+
+
+def test_clone_cleans_up_on_checkout_failure(store):
+ with pytest.raises(Exception) as excinfo:
+ # This raises an exception because you can't clone something that
+ # doesn't exist!
+ store.clone('/i_dont_exist_lol', 'fake_rev')
+ assert '/i_dont_exist_lol' in str(excinfo.value)
+
+ repo_dirs = [
+ d for d in os.listdir(store.directory) if d.startswith('repo')
+ ]
+ assert repo_dirs == []
+
+
+def test_clone_when_repo_already_exists(store):
+ # Create an entry in the sqlite db that makes it look like the repo has
+ # been cloned.
+ with sqlite3.connect(store.db_path) as db:
+ db.execute(
+ 'INSERT INTO repos (repo, ref, path) '
+ 'VALUES ("fake_repo", "fake_ref", "fake_path")',
+ )
+
+ assert store.clone('fake_repo', 'fake_ref') == 'fake_path'
+
+
+def test_clone_shallow_failure_fallback_to_complete(
+ store, tempdir_factory,
+ log_info_mock,
+):
+ path = git_dir(tempdir_factory)
+ with cwd(path):
+ git_commit()
+ rev = git.head_rev(path)
+ git_commit()
+
+ # Force shallow clone failure
+ def fake_shallow_clone(self, *args, **kwargs):
+ raise CalledProcessError(1, (), 0, b'', None)
+ store._shallow_clone = fake_shallow_clone
+
+ ret = store.clone(path, rev)
+
+ # Should have printed some stuff
+ assert log_info_mock.call_args_list[0][0][0].startswith(
+ 'Initializing environment for ',
+ )
+
+ # Should return a directory inside of the store
+ assert os.path.exists(ret)
+ assert ret.startswith(store.directory)
+ # Directory should start with `repo`
+ _, dirname = os.path.split(ret)
+ assert dirname.startswith('repo')
+ # Should be checked out to the rev we specified
+ assert git.head_rev(ret) == rev
+
+ # Assert there's an entry in the sqlite db for this
+ assert store.select_all_repos() == [(path, rev, ret)]
+
+
+def test_clone_tag_not_on_mainline(store, tempdir_factory):
+ path = git_dir(tempdir_factory)
+ with cwd(path):
+ git_commit()
+ cmd_output('git', 'checkout', 'master', '-b', 'branch')
+ git_commit()
+ cmd_output('git', 'tag', 'v1')
+ cmd_output('git', 'checkout', 'master')
+ cmd_output('git', 'branch', '-D', 'branch')
+
+ # previously crashed on unreachable refs
+ store.clone(path, 'v1')
+
+
+def test_create_when_directory_exists_but_not_db(store):
+ # In versions <= 0.3.5, there was no sqlite db causing a need for
+ # backward compatibility
+ os.remove(store.db_path)
+ store = Store(store.directory)
+ assert os.path.exists(store.db_path)
+
+
+def test_create_when_store_already_exists(store):
+ # an assertion that this is idempotent and does not crash
+ Store(store.directory)
+
+
+def test_db_repo_name(store):
+ assert store.db_repo_name('repo', ()) == 'repo'
+ assert store.db_repo_name('repo', ('b', 'a', 'c')) == 'repo:a,b,c'
+
+
+def test_local_resources_reflects_reality():
+ on_disk = {
+ res[len('empty_template_'):]
+ for res in os.listdir('pre_commit/resources')
+ if res.startswith('empty_template_')
+ }
+ assert on_disk == set(Store.LOCAL_RESOURCES)
+
+
+def test_mark_config_as_used(store, tmpdir):
+ with tmpdir.as_cwd():
+ f = tmpdir.join('f').ensure()
+ store.mark_config_used('f')
+ assert store.select_all_configs() == [f.strpath]
+
+
+def test_mark_config_as_used_idempotent(store, tmpdir):
+ test_mark_config_as_used(store, tmpdir)
+ test_mark_config_as_used(store, tmpdir)
+
+
+def test_mark_config_as_used_does_not_exist(store):
+ store.mark_config_used('f')
+ assert store.select_all_configs() == []
+
+
+def _simulate_pre_1_14_0(store):
+ with store.connect() as db:
+ db.executescript('DROP TABLE configs')
+
+
+def test_select_all_configs_roll_forward(store):
+ _simulate_pre_1_14_0(store)
+ assert store.select_all_configs() == []
+
+
+def test_mark_config_as_used_roll_forward(store, tmpdir):
+ _simulate_pre_1_14_0(store)
+ test_mark_config_as_used(store, tmpdir)
diff --git a/tests/util_test.py b/tests/util_test.py
new file mode 100644
index 0000000..01afbd4
--- /dev/null
+++ b/tests/util_test.py
@@ -0,0 +1,122 @@
+import os.path
+import stat
+import subprocess
+
+import pytest
+
+from pre_commit.util import CalledProcessError
+from pre_commit.util import clean_path_on_failure
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from pre_commit.util import cmd_output_p
+from pre_commit.util import make_executable
+from pre_commit.util import parse_version
+from pre_commit.util import rmtree
+from pre_commit.util import tmpdir
+
+
+def test_CalledProcessError_str():
+ error = CalledProcessError(1, ('exe',), 0, b'output', b'errors')
+ assert str(error) == (
+ "command: ('exe',)\n"
+ 'return code: 1\n'
+ 'expected return code: 0\n'
+ 'stdout:\n'
+ ' output\n'
+ 'stderr:\n'
+ ' errors'
+ )
+
+
+def test_CalledProcessError_str_nooutput():
+ error = CalledProcessError(1, ('exe',), 0, b'', b'')
+ assert str(error) == (
+ "command: ('exe',)\n"
+ 'return code: 1\n'
+ 'expected return code: 0\n'
+ 'stdout: (none)\n'
+ 'stderr: (none)'
+ )
+
+
+def test_clean_on_failure_noop(in_tmpdir):
+ with clean_path_on_failure('foo'):
+ pass
+
+
+def test_clean_path_on_failure_does_nothing_when_not_raising(in_tmpdir):
+ with clean_path_on_failure('foo'):
+ os.mkdir('foo')
+ assert os.path.exists('foo')
+
+
+def test_clean_path_on_failure_cleans_for_normal_exception(in_tmpdir):
+ class MyException(Exception):
+ pass
+
+ with pytest.raises(MyException):
+ with clean_path_on_failure('foo'):
+ os.mkdir('foo')
+ raise MyException
+
+ assert not os.path.exists('foo')
+
+
+def test_clean_path_on_failure_cleans_for_system_exit(in_tmpdir):
+ class MySystemExit(SystemExit):
+ pass
+
+ with pytest.raises(MySystemExit):
+ with clean_path_on_failure('foo'):
+ os.mkdir('foo')
+ raise MySystemExit
+
+ assert not os.path.exists('foo')
+
+
+def test_tmpdir():
+ with tmpdir() as tempdir:
+ assert os.path.exists(tempdir)
+ assert not os.path.exists(tempdir)
+
+
+def test_cmd_output_exe_not_found():
+ ret, out, _ = cmd_output('dne', retcode=None)
+ assert ret == 1
+ assert out == 'Executable `dne` not found'
+
+
+@pytest.mark.parametrize('fn', (cmd_output_b, cmd_output_p))
+def test_cmd_output_exe_not_found_bytes(fn):
+ ret, out, _ = fn('dne', retcode=None, stderr=subprocess.STDOUT)
+ assert ret == 1
+ assert out == b'Executable `dne` not found'
+
+
+@pytest.mark.parametrize('fn', (cmd_output_b, cmd_output_p))
+def test_cmd_output_no_shebang(tmpdir, fn):
+ f = tmpdir.join('f').ensure()
+ make_executable(f)
+
+ # previously this raised `OSError` -- the output is platform specific
+ ret, out, _ = fn(str(f), retcode=None, stderr=subprocess.STDOUT)
+ assert ret == 1
+ assert isinstance(out, bytes)
+ assert out.endswith(b'\n')
+
+
+def test_parse_version():
+ assert parse_version('0.0') == parse_version('0.0')
+ assert parse_version('0.1') > parse_version('0.0')
+ assert parse_version('2.1') >= parse_version('2')
+
+
+def test_rmtree_read_only_directories(tmpdir):
+ """Simulates the go module tree. See #1042"""
+ tmpdir.join('x/y/z').ensure_dir().join('a').ensure()
+ mode = os.stat(str(tmpdir.join('x'))).st_mode
+ mode_no_w = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
+ tmpdir.join('x/y/z').chmod(mode_no_w)
+ tmpdir.join('x/y/z').chmod(mode_no_w)
+ tmpdir.join('x/y/z').chmod(mode_no_w)
+ rmtree(str(tmpdir.join('x')))
diff --git a/tests/xargs_test.py b/tests/xargs_test.py
new file mode 100644
index 0000000..1fc9207
--- /dev/null
+++ b/tests/xargs_test.py
@@ -0,0 +1,197 @@
+import concurrent.futures
+import os
+import sys
+import time
+from typing import Tuple
+from unittest import mock
+
+import pytest
+
+from pre_commit import parse_shebang
+from pre_commit import xargs
+
+
+@pytest.mark.parametrize(
+ ('env', 'expected'),
+ (
+ ({}, 0),
+ ({b'x': b'1'}, 12),
+ ({b'x': b'12'}, 13),
+ ({b'x': b'1', b'y': b'2'}, 24),
+ ),
+)
+def test_environ_size(env, expected):
+ # normalize integer sizing
+ assert xargs._environ_size(_env=env) == expected
+
+
+@pytest.fixture
+def win32_mock():
+ with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
+ with mock.patch.object(sys, 'platform', 'win32'):
+ yield
+
+
+@pytest.fixture
+def linux_mock():
+ with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
+ with mock.patch.object(sys, 'platform', 'linux'):
+ yield
+
+
+def test_partition_trivial():
+ assert xargs.partition(('cmd',), (), 1) == (('cmd',),)
+
+
+def test_partition_simple():
+ assert xargs.partition(('cmd',), ('foo',), 1) == (('cmd', 'foo'),)
+
+
+def test_partition_limits():
+ ret = xargs.partition(
+ ('ninechars',), (
+ # Just match the end (with spaces)
+ '.' * 5, '.' * 4,
+ # Just match the end (single arg)
+ '.' * 10,
+ # Goes over the end
+ '.' * 5,
+ '.' * 6,
+ ),
+ 1,
+ _max_length=21,
+ )
+ assert ret == (
+ ('ninechars', '.' * 5, '.' * 4),
+ ('ninechars', '.' * 10),
+ ('ninechars', '.' * 5),
+ ('ninechars', '.' * 6),
+ )
+
+
+def test_partition_limit_win32(win32_mock):
+ cmd = ('ninechars',)
+ # counted as half because of utf-16 encode
+ varargs = ('πŸ˜‘' * 5,)
+ ret = xargs.partition(cmd, varargs, 1, _max_length=21)
+ assert ret == (cmd + varargs,)
+
+
+def test_partition_limit_linux(linux_mock):
+ cmd = ('ninechars',)
+ varargs = ('πŸ˜‘' * 5,)
+ ret = xargs.partition(cmd, varargs, 1, _max_length=31)
+ assert ret == (cmd + varargs,)
+
+
+def test_argument_too_long_with_large_unicode(linux_mock):
+ cmd = ('ninechars',)
+ varargs = ('πŸ˜‘' * 10,) # 4 bytes * 10
+ with pytest.raises(xargs.ArgumentTooLongError):
+ xargs.partition(cmd, varargs, 1, _max_length=20)
+
+
+def test_partition_target_concurrency():
+ ret = xargs.partition(
+ ('foo',), ('A',) * 22,
+ 4,
+ _max_length=50,
+ )
+ assert ret == (
+ ('foo',) + ('A',) * 6,
+ ('foo',) + ('A',) * 6,
+ ('foo',) + ('A',) * 6,
+ ('foo',) + ('A',) * 4,
+ )
+
+
+def test_partition_target_concurrency_wont_make_tiny_partitions():
+ ret = xargs.partition(
+ ('foo',), ('A',) * 10,
+ 4,
+ _max_length=50,
+ )
+ assert ret == (
+ ('foo',) + ('A',) * 4,
+ ('foo',) + ('A',) * 4,
+ ('foo',) + ('A',) * 2,
+ )
+
+
+def test_argument_too_long():
+ with pytest.raises(xargs.ArgumentTooLongError):
+ xargs.partition(('a' * 5,), ('a' * 5,), 1, _max_length=10)
+
+
+def test_xargs_smoke():
+ ret, out = xargs.xargs(('echo',), ('hello', 'world'))
+ assert ret == 0
+ assert out.replace(b'\r\n', b'\n') == b'hello world\n'
+
+
+exit_cmd = parse_shebang.normalize_cmd(('bash', '-c', 'exit $1', '--'))
+# Abuse max_length to control the exit code
+max_length = len(' '.join(exit_cmd)) + 3
+
+
+def test_xargs_retcode_normal():
+ ret, _ = xargs.xargs(exit_cmd, ('0',), _max_length=max_length)
+ assert ret == 0
+
+ ret, _ = xargs.xargs(exit_cmd, ('0', '1'), _max_length=max_length)
+ assert ret == 1
+
+ # takes the maximum return code
+ ret, _ = xargs.xargs(exit_cmd, ('0', '5', '1'), _max_length=max_length)
+ assert ret == 5
+
+
+def test_xargs_concurrency():
+ bash_cmd = parse_shebang.normalize_cmd(('bash', '-c'))
+ print_pid = ('sleep 0.5 && echo $$',)
+
+ start = time.time()
+ ret, stdout = xargs.xargs(
+ bash_cmd, print_pid * 5,
+ target_concurrency=5,
+ _max_length=len(' '.join(bash_cmd + print_pid)) + 1,
+ )
+ elapsed = time.time() - start
+ assert ret == 0
+ pids = stdout.splitlines()
+ assert len(pids) == 5
+ # It would take 0.5*5=2.5 seconds ot run all of these in serial, so if it
+ # takes less, they must have run concurrently.
+ assert elapsed < 2.5
+
+
+def test_thread_mapper_concurrency_uses_threadpoolexecutor_map():
+ with xargs._thread_mapper(10) as thread_map:
+ _self = thread_map.__self__ # type: ignore
+ assert isinstance(_self, concurrent.futures.ThreadPoolExecutor)
+
+
+def test_thread_mapper_concurrency_uses_regular_map():
+ with xargs._thread_mapper(1) as thread_map:
+ assert thread_map is map
+
+
+def test_xargs_propagate_kwargs_to_cmd():
+ env = {'PRE_COMMIT_TEST_VAR': 'Pre commit is awesome'}
+ cmd: Tuple[str, ...] = ('bash', '-c', 'echo $PRE_COMMIT_TEST_VAR', '--')
+ cmd = parse_shebang.normalize_cmd(cmd)
+
+ ret, stdout = xargs.xargs(cmd, ('1',), env=env)
+ assert ret == 0
+ assert b'Pre commit is awesome' in stdout
+
+
+@pytest.mark.xfail(os.name == 'nt', reason='posix only')
+def test_xargs_color_true_makes_tty():
+ retcode, out = xargs.xargs(
+ (sys.executable, '-c', 'import sys; print(sys.stdout.isatty())'),
+ ('1',),
+ color=True,
+ )
+ assert retcode == 0
+ assert out == b'True\n'
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..d9f9420
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,28 @@
+[tox]
+envlist = py36,py37,py38,pypy3,pre-commit
+
+[testenv]
+deps = -rrequirements-dev.txt
+passenv = HOME LOCALAPPDATA RUSTUP_HOME
+commands =
+ coverage erase
+ coverage run -m pytest {posargs:tests}
+ coverage report
+ pre-commit install
+
+[testenv:pre-commit]
+skip_install = true
+deps = pre-commit
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[pep8]
+ignore = E265,E501,W504
+
+[pytest]
+env =
+ GIT_AUTHOR_NAME=test
+ GIT_COMMITTER_NAME=test
+ GIT_AUTHOR_EMAIL=test@example.com
+ GIT_COMMITTER_EMAIL=test@example.com
+ VIRTUALENV_NO_DOWNLOAD=1
+ PRE_COMMIT_NO_CONCURRENCY=1