summaryrefslogtreecommitdiffstats
path: root/pre_commit/commands
diff options
context:
space:
mode:
Diffstat (limited to 'pre_commit/commands')
-rw-r--r--pre_commit/commands/__init__.py0
-rw-r--r--pre_commit/commands/autoupdate.py215
-rw-r--r--pre_commit/commands/clean.py16
-rw-r--r--pre_commit/commands/gc.py89
-rw-r--r--pre_commit/commands/hook_impl.py271
-rw-r--r--pre_commit/commands/init_templatedir.py39
-rw-r--r--pre_commit/commands/install_uninstall.py167
-rw-r--r--pre_commit/commands/migrate_config.py75
-rw-r--r--pre_commit/commands/run.py447
-rw-r--r--pre_commit/commands/sample_config.py18
-rw-r--r--pre_commit/commands/try_repo.py77
-rw-r--r--pre_commit/commands/validate_config.py18
-rw-r--r--pre_commit/commands/validate_manifest.py18
13 files changed, 1450 insertions, 0 deletions
diff --git a/pre_commit/commands/__init__.py b/pre_commit/commands/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pre_commit/commands/__init__.py
diff --git a/pre_commit/commands/autoupdate.py b/pre_commit/commands/autoupdate.py
new file mode 100644
index 0000000..aa0c5e2
--- /dev/null
+++ b/pre_commit/commands/autoupdate.py
@@ -0,0 +1,215 @@
+from __future__ import annotations
+
+import concurrent.futures
+import os.path
+import re
+import tempfile
+from collections.abc import Sequence
+from typing import Any
+from typing import NamedTuple
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit import output
+from pre_commit import xargs
+from pre_commit.clientlib import InvalidManifestError
+from pre_commit.clientlib import load_config
+from pre_commit.clientlib import load_manifest
+from pre_commit.clientlib import LOCAL
+from pre_commit.clientlib import META
+from pre_commit.commands.migrate_config import migrate_config
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+from pre_commit.util import cmd_output_b
+from pre_commit.yaml import yaml_dump
+from pre_commit.yaml import yaml_load
+
+
+class RevInfo(NamedTuple):
+ repo: str
+ rev: str
+ frozen: str | None = None
+ hook_ids: frozenset[str] = frozenset()
+
+ @classmethod
+ def from_config(cls, config: dict[str, Any]) -> RevInfo:
+ return cls(config['repo'], config['rev'])
+
+ def update(self, tags_only: bool, freeze: bool) -> RevInfo:
+ with tempfile.TemporaryDirectory() as tmp:
+ _git = ('git', *git.NO_FS_MONITOR, '-C', tmp)
+
+ if tags_only:
+ tag_opt = '--abbrev=0'
+ else:
+ tag_opt = '--exact'
+ tag_cmd = (*_git, 'describe', 'FETCH_HEAD', '--tags', tag_opt)
+
+ git.init_repo(tmp, self.repo)
+ cmd_output_b(*_git, 'config', 'extensions.partialClone', 'true')
+ cmd_output_b(
+ *_git, 'fetch', 'origin', 'HEAD',
+ '--quiet', '--filter=blob:none', '--tags',
+ )
+
+ try:
+ rev = cmd_output(*tag_cmd)[1].strip()
+ except CalledProcessError:
+ rev = cmd_output(*_git, 'rev-parse', 'FETCH_HEAD')[1].strip()
+ else:
+ if tags_only:
+ rev = git.get_best_candidate_tag(rev, tmp)
+
+ frozen = None
+ if freeze:
+ exact = cmd_output(*_git, 'rev-parse', rev)[1].strip()
+ if exact != rev:
+ rev, frozen = exact, rev
+
+ try:
+ # workaround for windows -- see #2865
+ cmd_output_b(*_git, 'show', f'{rev}:{C.MANIFEST_FILE}')
+ cmd_output(*_git, 'checkout', rev, '--', C.MANIFEST_FILE)
+ except CalledProcessError:
+ pass # this will be caught by manifest validating code
+ try:
+ manifest = load_manifest(os.path.join(tmp, C.MANIFEST_FILE))
+ except InvalidManifestError as e:
+ raise RepositoryCannotBeUpdatedError(f'[{self.repo}] {e}')
+ else:
+ hook_ids = frozenset(hook['id'] for hook in manifest)
+
+ return self._replace(rev=rev, frozen=frozen, hook_ids=hook_ids)
+
+
+class RepositoryCannotBeUpdatedError(RuntimeError):
+ pass
+
+
+def _check_hooks_still_exist_at_rev(
+ repo_config: dict[str, Any],
+ info: RevInfo,
+) -> None:
+ # See if any of our hooks were deleted with the new commits
+ hooks = {hook['id'] for hook in repo_config['hooks']}
+ hooks_missing = hooks - info.hook_ids
+ if hooks_missing:
+ raise RepositoryCannotBeUpdatedError(
+ f'[{info.repo}] Cannot update because the update target is '
+ f'missing these hooks: {", ".join(sorted(hooks_missing))}',
+ )
+
+
+def _update_one(
+ i: int,
+ repo: dict[str, Any],
+ *,
+ tags_only: bool,
+ freeze: bool,
+) -> tuple[int, RevInfo, RevInfo]:
+ old = RevInfo.from_config(repo)
+ new = old.update(tags_only=tags_only, freeze=freeze)
+ _check_hooks_still_exist_at_rev(repo, new)
+ return i, old, new
+
+
+REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([\'"]?)([^\s#]+)(.*)(\r?\n)$')
+
+
+def _original_lines(
+ path: str,
+ rev_infos: list[RevInfo | None],
+ retry: bool = False,
+) -> tuple[list[str], list[int]]:
+ """detect `rev:` lines or reformat the file"""
+ with open(path, newline='') as f:
+ original = f.read()
+
+ lines = original.splitlines(True)
+ idxs = [i for i, line in enumerate(lines) if REV_LINE_RE.match(line)]
+ if len(idxs) == len(rev_infos):
+ return lines, idxs
+ elif retry:
+ raise AssertionError('could not find rev lines')
+ else:
+ with open(path, 'w') as f:
+ f.write(yaml_dump(yaml_load(original)))
+ return _original_lines(path, rev_infos, retry=True)
+
+
+def _write_new_config(path: str, rev_infos: list[RevInfo | None]) -> None:
+ lines, idxs = _original_lines(path, rev_infos)
+
+ for idx, rev_info in zip(idxs, rev_infos):
+ if rev_info is None:
+ continue
+ match = REV_LINE_RE.match(lines[idx])
+ assert match is not None
+ new_rev_s = yaml_dump({'rev': rev_info.rev}, default_style=match[3])
+ new_rev = new_rev_s.split(':', 1)[1].strip()
+ if rev_info.frozen is not None:
+ comment = f' # frozen: {rev_info.frozen}'
+ elif match[5].strip().startswith('# frozen:'):
+ comment = ''
+ else:
+ comment = match[5]
+ lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[6]}'
+
+ with open(path, 'w', newline='') as f:
+ f.write(''.join(lines))
+
+
+def autoupdate(
+ config_file: str,
+ tags_only: bool,
+ freeze: bool,
+ repos: Sequence[str] = (),
+ jobs: int = 1,
+) -> int:
+ """Auto-update the pre-commit config to the latest versions of repos."""
+ migrate_config(config_file, quiet=True)
+ changed = False
+ retv = 0
+
+ config_repos = [
+ repo for repo in load_config(config_file)['repos']
+ if repo['repo'] not in {LOCAL, META}
+ ]
+
+ rev_infos: list[RevInfo | None] = [None] * len(config_repos)
+ jobs = jobs or xargs.cpu_count() # 0 => number of cpus
+ jobs = min(jobs, len(repos) or len(config_repos)) # max 1-per-thread
+ jobs = max(jobs, 1) # at least one thread
+ with concurrent.futures.ThreadPoolExecutor(jobs) as exe:
+ futures = [
+ exe.submit(
+ _update_one,
+ i, repo, tags_only=tags_only, freeze=freeze,
+ )
+ for i, repo in enumerate(config_repos)
+ if not repos or repo['repo'] in repos
+ ]
+ for future in concurrent.futures.as_completed(futures):
+ try:
+ i, old, new = future.result()
+ except RepositoryCannotBeUpdatedError as e:
+ output.write_line(str(e))
+ retv = 1
+ else:
+ if new.rev != old.rev:
+ changed = True
+ if new.frozen:
+ new_s = f'{new.frozen} (frozen)'
+ else:
+ new_s = new.rev
+ msg = f'updating {old.rev} -> {new_s}'
+ rev_infos[i] = new
+ else:
+ msg = 'already up to date!'
+
+ output.write_line(f'[{old.repo}] {msg}')
+
+ if changed:
+ _write_new_config(config_file, rev_infos)
+
+ return retv
diff --git a/pre_commit/commands/clean.py b/pre_commit/commands/clean.py
new file mode 100644
index 0000000..5119f64
--- /dev/null
+++ b/pre_commit/commands/clean.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+import os.path
+
+from pre_commit import output
+from pre_commit.store import Store
+from pre_commit.util import rmtree
+
+
+def clean(store: Store) -> int:
+ legacy_path = os.path.expanduser('~/.pre-commit')
+ for directory in (store.directory, legacy_path):
+ if os.path.exists(directory):
+ rmtree(directory)
+ output.write_line(f'Cleaned {directory}.')
+ return 0
diff --git a/pre_commit/commands/gc.py b/pre_commit/commands/gc.py
new file mode 100644
index 0000000..6892e09
--- /dev/null
+++ b/pre_commit/commands/gc.py
@@ -0,0 +1,89 @@
+from __future__ import annotations
+
+import os.path
+from typing import Any
+
+import pre_commit.constants as C
+from pre_commit import output
+from pre_commit.clientlib import InvalidConfigError
+from pre_commit.clientlib import InvalidManifestError
+from pre_commit.clientlib import load_config
+from pre_commit.clientlib import load_manifest
+from pre_commit.clientlib import LOCAL
+from pre_commit.clientlib import META
+from pre_commit.store import Store
+
+
+def _mark_used_repos(
+ store: Store,
+ all_repos: dict[tuple[str, str], str],
+ unused_repos: set[tuple[str, str]],
+ repo: dict[str, Any],
+) -> None:
+ if repo['repo'] == META:
+ return
+ elif repo['repo'] == LOCAL:
+ for hook in repo['hooks']:
+ deps = hook.get('additional_dependencies')
+ unused_repos.discard((
+ store.db_repo_name(repo['repo'], deps), C.LOCAL_REPO_VERSION,
+ ))
+ else:
+ key = (repo['repo'], repo['rev'])
+ path = all_repos.get(key)
+ # can't inspect manifest if it isn't cloned
+ if path is None:
+ return
+
+ try:
+ manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE))
+ except InvalidManifestError:
+ return
+ else:
+ unused_repos.discard(key)
+ by_id = {hook['id']: hook for hook in manifest}
+
+ for hook in repo['hooks']:
+ if hook['id'] not in by_id:
+ continue
+
+ deps = hook.get(
+ 'additional_dependencies',
+ by_id[hook['id']]['additional_dependencies'],
+ )
+ unused_repos.discard((
+ store.db_repo_name(repo['repo'], deps), repo['rev'],
+ ))
+
+
+def _gc_repos(store: Store) -> int:
+ configs = store.select_all_configs()
+ repos = store.select_all_repos()
+
+ # delete config paths which do not exist
+ dead_configs = [p for p in configs if not os.path.exists(p)]
+ live_configs = [p for p in configs if os.path.exists(p)]
+
+ all_repos = {(repo, ref): path for repo, ref, path in repos}
+ unused_repos = set(all_repos)
+ for config_path in live_configs:
+ try:
+ config = load_config(config_path)
+ except InvalidConfigError:
+ dead_configs.append(config_path)
+ continue
+ else:
+ for repo in config['repos']:
+ _mark_used_repos(store, all_repos, unused_repos, repo)
+
+ store.delete_configs(dead_configs)
+ for db_repo_name, ref in unused_repos:
+ store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)])
+ return len(unused_repos)
+
+
+def gc(store: Store) -> int:
+ with store.exclusive_lock():
+ repos_removed = _gc_repos(store)
+ output.write_line(f'{repos_removed} repo(s) removed.')
+ return 0
diff --git a/pre_commit/commands/hook_impl.py b/pre_commit/commands/hook_impl.py
new file mode 100644
index 0000000..49a80b7
--- /dev/null
+++ b/pre_commit/commands/hook_impl.py
@@ -0,0 +1,271 @@
+from __future__ import annotations
+
+import argparse
+import os.path
+import subprocess
+import sys
+from collections.abc import Sequence
+
+from pre_commit.commands.run import run
+from pre_commit.envcontext import envcontext
+from pre_commit.parse_shebang import normalize_cmd
+from pre_commit.store import Store
+
+Z40 = '0' * 40
+
+
+def _run_legacy(
+ hook_type: str,
+ hook_dir: str,
+ args: Sequence[str],
+) -> tuple[int, bytes]:
+ if os.environ.get('PRE_COMMIT_RUNNING_LEGACY'):
+ raise SystemExit(
+ f"bug: pre-commit's script is installed in migration mode\n"
+ f'run `pre-commit install -f --hook-type {hook_type}` to fix '
+ f'this\n\n'
+ f'Please report this bug at '
+ f'https://github.com/pre-commit/pre-commit/issues',
+ )
+
+ if hook_type == 'pre-push':
+ stdin = sys.stdin.buffer.read()
+ else:
+ stdin = b''
+
+ # not running in legacy mode
+ legacy_hook = os.path.join(hook_dir, f'{hook_type}.legacy')
+ if not os.access(legacy_hook, os.X_OK):
+ return 0, stdin
+
+ with envcontext((('PRE_COMMIT_RUNNING_LEGACY', '1'),)):
+ cmd = normalize_cmd((legacy_hook, *args))
+ return subprocess.run(cmd, input=stdin).returncode, stdin
+
+
+def _validate_config(
+ retv: int,
+ config: str,
+ skip_on_missing_config: bool,
+) -> None:
+ if not os.path.isfile(config):
+ if skip_on_missing_config or os.getenv('PRE_COMMIT_ALLOW_NO_CONFIG'):
+ print(f'`{config}` config file not found. Skipping `pre-commit`.')
+ raise SystemExit(retv)
+ else:
+ print(
+ f'No {config} file was found\n'
+ f'- To temporarily silence this, run '
+ f'`PRE_COMMIT_ALLOW_NO_CONFIG=1 git ...`\n'
+ f'- To permanently silence this, install pre-commit with the '
+ f'--allow-missing-config option\n'
+ f'- To uninstall pre-commit run `pre-commit uninstall`',
+ )
+ raise SystemExit(1)
+
+
+def _ns(
+ hook_type: str,
+ color: bool,
+ *,
+ all_files: bool = False,
+ remote_branch: str | None = None,
+ local_branch: str | None = None,
+ from_ref: str | None = None,
+ to_ref: str | None = None,
+ pre_rebase_upstream: str | None = None,
+ pre_rebase_branch: str | None = None,
+ remote_name: str | None = None,
+ remote_url: str | None = None,
+ commit_msg_filename: str | None = None,
+ prepare_commit_message_source: str | None = None,
+ commit_object_name: str | None = None,
+ checkout_type: str | None = None,
+ is_squash_merge: str | None = None,
+ rewrite_command: str | None = None,
+) -> argparse.Namespace:
+ return argparse.Namespace(
+ color=color,
+ hook_stage=hook_type,
+ remote_branch=remote_branch,
+ local_branch=local_branch,
+ from_ref=from_ref,
+ to_ref=to_ref,
+ pre_rebase_upstream=pre_rebase_upstream,
+ pre_rebase_branch=pre_rebase_branch,
+ remote_name=remote_name,
+ remote_url=remote_url,
+ commit_msg_filename=commit_msg_filename,
+ prepare_commit_message_source=prepare_commit_message_source,
+ commit_object_name=commit_object_name,
+ all_files=all_files,
+ checkout_type=checkout_type,
+ is_squash_merge=is_squash_merge,
+ rewrite_command=rewrite_command,
+ files=(),
+ hook=None,
+ verbose=False,
+ show_diff_on_failure=False,
+ )
+
+
+def _rev_exists(rev: str) -> bool:
+ return not subprocess.call(('git', 'rev-list', '--quiet', rev))
+
+
+def _pre_push_ns(
+ color: bool,
+ args: Sequence[str],
+ stdin: bytes,
+) -> argparse.Namespace | None:
+ remote_name = args[0]
+ remote_url = args[1]
+
+ for line in stdin.decode().splitlines():
+ parts = line.rsplit(maxsplit=3)
+ local_branch, local_sha, remote_branch, remote_sha = parts
+ if local_sha == Z40:
+ continue
+ elif remote_sha != Z40 and _rev_exists(remote_sha):
+ return _ns(
+ 'pre-push', color,
+ from_ref=remote_sha, to_ref=local_sha,
+ remote_branch=remote_branch,
+ local_branch=local_branch,
+ remote_name=remote_name, remote_url=remote_url,
+ )
+ else:
+ # ancestors not found in remote
+ ancestors = subprocess.check_output((
+ 'git', 'rev-list', local_sha, '--topo-order', '--reverse',
+ '--not', f'--remotes={remote_name}',
+ )).decode().strip()
+ if not ancestors:
+ continue
+ else:
+ first_ancestor = ancestors.splitlines()[0]
+ cmd = ('git', 'rev-list', '--max-parents=0', local_sha)
+ roots = set(subprocess.check_output(cmd).decode().splitlines())
+ if first_ancestor in roots:
+ # pushing the whole tree including root commit
+ return _ns(
+ 'pre-push', color,
+ all_files=True,
+ remote_name=remote_name, remote_url=remote_url,
+ remote_branch=remote_branch,
+ local_branch=local_branch,
+ )
+ else:
+ rev_cmd = ('git', 'rev-parse', f'{first_ancestor}^')
+ source = subprocess.check_output(rev_cmd).decode().strip()
+ return _ns(
+ 'pre-push', color,
+ from_ref=source, to_ref=local_sha,
+ remote_name=remote_name, remote_url=remote_url,
+ remote_branch=remote_branch,
+ local_branch=local_branch,
+ )
+
+ # nothing to push
+ return None
+
+
+_EXPECTED_ARG_LENGTH_BY_HOOK = {
+ 'commit-msg': 1,
+ 'post-checkout': 3,
+ 'post-commit': 0,
+ 'pre-commit': 0,
+ 'pre-merge-commit': 0,
+ 'post-merge': 1,
+ 'post-rewrite': 1,
+ 'pre-push': 2,
+}
+
+
+def _check_args_length(hook_type: str, args: Sequence[str]) -> None:
+ if hook_type == 'prepare-commit-msg':
+ if len(args) < 1 or len(args) > 3:
+ raise SystemExit(
+ f'hook-impl for {hook_type} expected 1, 2, or 3 arguments '
+ f'but got {len(args)}: {args}',
+ )
+ elif hook_type == 'pre-rebase':
+ if len(args) < 1 or len(args) > 2:
+ raise SystemExit(
+ f'hook-impl for {hook_type} expected 1 or 2 arguments '
+ f'but got {len(args)}: {args}',
+ )
+ elif hook_type in _EXPECTED_ARG_LENGTH_BY_HOOK:
+ expected = _EXPECTED_ARG_LENGTH_BY_HOOK[hook_type]
+ if len(args) != expected:
+ arguments_s = 'argument' if expected == 1 else 'arguments'
+ raise SystemExit(
+ f'hook-impl for {hook_type} expected {expected} {arguments_s} '
+ f'but got {len(args)}: {args}',
+ )
+ else:
+ raise AssertionError(f'unexpected hook type: {hook_type}')
+
+
+def _run_ns(
+ hook_type: str,
+ color: bool,
+ args: Sequence[str],
+ stdin: bytes,
+) -> argparse.Namespace | None:
+ _check_args_length(hook_type, args)
+ if hook_type == 'pre-push':
+ return _pre_push_ns(color, args, stdin)
+ elif hook_type in 'commit-msg':
+ return _ns(hook_type, color, commit_msg_filename=args[0])
+ elif hook_type == 'prepare-commit-msg' and len(args) == 1:
+ return _ns(hook_type, color, commit_msg_filename=args[0])
+ elif hook_type == 'prepare-commit-msg' and len(args) == 2:
+ return _ns(
+ hook_type, color, commit_msg_filename=args[0],
+ prepare_commit_message_source=args[1],
+ )
+ elif hook_type == 'prepare-commit-msg' and len(args) == 3:
+ return _ns(
+ hook_type, color, commit_msg_filename=args[0],
+ prepare_commit_message_source=args[1], commit_object_name=args[2],
+ )
+ elif hook_type in {'post-commit', 'pre-merge-commit', 'pre-commit'}:
+ return _ns(hook_type, color)
+ elif hook_type == 'post-checkout':
+ return _ns(
+ hook_type, color,
+ from_ref=args[0], to_ref=args[1], checkout_type=args[2],
+ )
+ elif hook_type == 'post-merge':
+ return _ns(hook_type, color, is_squash_merge=args[0])
+ elif hook_type == 'post-rewrite':
+ return _ns(hook_type, color, rewrite_command=args[0])
+ elif hook_type == 'pre-rebase' and len(args) == 1:
+ return _ns(hook_type, color, pre_rebase_upstream=args[0])
+ elif hook_type == 'pre-rebase' and len(args) == 2:
+ return _ns(
+ hook_type, color, pre_rebase_upstream=args[0],
+ pre_rebase_branch=args[1],
+ )
+ else:
+ raise AssertionError(f'unexpected hook type: {hook_type}')
+
+
+def hook_impl(
+ store: Store,
+ *,
+ config: str,
+ color: bool,
+ hook_type: str,
+ hook_dir: str,
+ skip_on_missing_config: bool,
+ args: Sequence[str],
+) -> int:
+ retv, stdin = _run_legacy(hook_type, hook_dir, args)
+ _validate_config(retv, config, skip_on_missing_config)
+ ns = _run_ns(hook_type, color, args, stdin)
+ if ns is None:
+ return retv
+ else:
+ return retv | run(config, store, ns)
diff --git a/pre_commit/commands/init_templatedir.py b/pre_commit/commands/init_templatedir.py
new file mode 100644
index 0000000..08af656
--- /dev/null
+++ b/pre_commit/commands/init_templatedir.py
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+import logging
+import os.path
+
+from pre_commit.commands.install_uninstall import install
+from pre_commit.store import Store
+from pre_commit.util import CalledProcessError
+from pre_commit.util import cmd_output
+
+logger = logging.getLogger('pre_commit')
+
+
+def init_templatedir(
+ config_file: str,
+ store: Store,
+ directory: str,
+ hook_types: list[str] | None,
+ skip_on_missing_config: bool = True,
+) -> int:
+ install(
+ config_file,
+ store,
+ hook_types=hook_types,
+ overwrite=True,
+ skip_on_missing_config=skip_on_missing_config,
+ git_dir=directory,
+ )
+ try:
+ _, out, _ = cmd_output('git', 'config', 'init.templateDir')
+ except CalledProcessError:
+ configured_path = None
+ else:
+ configured_path = os.path.realpath(os.path.expanduser(out.strip()))
+ dest = os.path.realpath(directory)
+ if configured_path != dest:
+ logger.warning('`init.templateDir` not set to the target directory')
+ logger.warning(f'maybe `git config --global init.templateDir {dest}`?')
+ return 0
diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py
new file mode 100644
index 0000000..d19e0d4
--- /dev/null
+++ b/pre_commit/commands/install_uninstall.py
@@ -0,0 +1,167 @@
+from __future__ import annotations
+
+import logging
+import os.path
+import shlex
+import shutil
+import sys
+
+from pre_commit import git
+from pre_commit import output
+from pre_commit.clientlib import InvalidConfigError
+from pre_commit.clientlib import load_config
+from pre_commit.repository import all_hooks
+from pre_commit.repository import install_hook_envs
+from pre_commit.store import Store
+from pre_commit.util import make_executable
+from pre_commit.util import resource_text
+
+
+logger = logging.getLogger(__name__)
+
+# This is used to identify the hook file we install
+PRIOR_HASHES = (
+ b'4d9958c90bc262f47553e2c073f14cfe',
+ b'd8ee923c46731b42cd95cc869add4062',
+ b'49fd668cb42069aa1b6048464be5d395',
+ b'79f09a650522a87b0da915d0d983b2de',
+ b'e358c9dae00eac5d06b38dfdb1e33a8c',
+)
+CURRENT_HASH = b'138fd403232d2ddd5efb44317e38bf03'
+TEMPLATE_START = '# start templated\n'
+TEMPLATE_END = '# end templated\n'
+
+
+def _hook_types(cfg_filename: str, hook_types: list[str] | None) -> list[str]:
+ if hook_types is not None:
+ return hook_types
+ else:
+ try:
+ cfg = load_config(cfg_filename)
+ except InvalidConfigError:
+ return ['pre-commit']
+ else:
+ return cfg['default_install_hook_types']
+
+
+def _hook_paths(
+ hook_type: str,
+ git_dir: str | None = None,
+) -> tuple[str, str]:
+ git_dir = git_dir if git_dir is not None else git.get_git_common_dir()
+ pth = os.path.join(git_dir, 'hooks', hook_type)
+ return pth, f'{pth}.legacy'
+
+
+def is_our_script(filename: str) -> bool:
+ if not os.path.exists(filename): # pragma: win32 no cover (symlink)
+ return False
+ with open(filename, 'rb') as f:
+ contents = f.read()
+ return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES)
+
+
+def _install_hook_script(
+ config_file: str,
+ hook_type: str,
+ overwrite: bool = False,
+ skip_on_missing_config: bool = False,
+ git_dir: str | None = None,
+) -> None:
+ hook_path, legacy_path = _hook_paths(hook_type, git_dir=git_dir)
+
+ os.makedirs(os.path.dirname(hook_path), exist_ok=True)
+
+ # If we have an existing hook, move it to pre-commit.legacy
+ if os.path.lexists(hook_path) and not is_our_script(hook_path):
+ shutil.move(hook_path, legacy_path)
+
+ # If we specify overwrite, we simply delete the legacy file
+ if overwrite and os.path.exists(legacy_path):
+ os.remove(legacy_path)
+ elif os.path.exists(legacy_path):
+ output.write_line(
+ f'Running in migration mode with existing hooks at {legacy_path}\n'
+ f'Use -f to use only pre-commit.',
+ )
+
+ args = ['hook-impl', f'--config={config_file}', f'--hook-type={hook_type}']
+ if skip_on_missing_config:
+ args.append('--skip-on-missing-config')
+
+ with open(hook_path, 'w') as hook_file:
+ contents = resource_text('hook-tmpl')
+ before, rest = contents.split(TEMPLATE_START)
+ _, after = rest.split(TEMPLATE_END)
+
+ # on windows always use `/bin/sh` since `bash` might not be on PATH
+ # though we use bash-specific features `sh` on windows is actually
+ # bash in "POSIXLY_CORRECT" mode which still supports the features we
+ # use: subshells / arrays
+ if sys.platform == 'win32': # pragma: win32 cover
+ hook_file.write('#!/bin/sh\n')
+
+ hook_file.write(before + TEMPLATE_START)
+ hook_file.write(f'INSTALL_PYTHON={shlex.quote(sys.executable)}\n')
+ args_s = shlex.join(args)
+ hook_file.write(f'ARGS=({args_s})\n')
+ hook_file.write(TEMPLATE_END + after)
+ make_executable(hook_path)
+
+ output.write_line(f'pre-commit installed at {hook_path}')
+
+
+def install(
+ config_file: str,
+ store: Store,
+ hook_types: list[str] | None,
+ overwrite: bool = False,
+ hooks: bool = False,
+ skip_on_missing_config: bool = False,
+ git_dir: str | None = None,
+) -> int:
+ if git_dir is None and git.has_core_hookpaths_set():
+ logger.error(
+ 'Cowardly refusing to install hooks with `core.hooksPath` set.\n'
+ 'hint: `git config --unset-all core.hooksPath`',
+ )
+ return 1
+
+ for hook_type in _hook_types(config_file, hook_types):
+ _install_hook_script(
+ config_file, hook_type,
+ overwrite=overwrite,
+ skip_on_missing_config=skip_on_missing_config,
+ git_dir=git_dir,
+ )
+
+ if hooks:
+ install_hooks(config_file, store)
+
+ return 0
+
+
+def install_hooks(config_file: str, store: Store) -> int:
+ install_hook_envs(all_hooks(load_config(config_file), store), store)
+ return 0
+
+
+def _uninstall_hook_script(hook_type: str) -> None:
+ hook_path, legacy_path = _hook_paths(hook_type)
+
+ # If our file doesn't exist or it isn't ours, gtfo.
+ if not os.path.exists(hook_path) or not is_our_script(hook_path):
+ return
+
+ os.remove(hook_path)
+ output.write_line(f'{hook_type} uninstalled')
+
+ if os.path.exists(legacy_path):
+ os.replace(legacy_path, hook_path)
+ output.write_line(f'Restored previous hooks to {hook_path}')
+
+
+def uninstall(config_file: str, hook_types: list[str] | None) -> int:
+ for hook_type in _hook_types(config_file, hook_types):
+ _uninstall_hook_script(hook_type)
+ return 0
diff --git a/pre_commit/commands/migrate_config.py b/pre_commit/commands/migrate_config.py
new file mode 100644
index 0000000..842fb3a
--- /dev/null
+++ b/pre_commit/commands/migrate_config.py
@@ -0,0 +1,75 @@
+from __future__ import annotations
+
+import re
+import textwrap
+
+import cfgv
+import yaml
+
+from pre_commit.clientlib import InvalidConfigError
+from pre_commit.yaml import yaml_load
+
+
+def _is_header_line(line: str) -> bool:
+ return line.startswith(('#', '---')) or not line.strip()
+
+
+def _migrate_map(contents: str) -> str:
+ if isinstance(yaml_load(contents), list):
+ # Find the first non-header line
+ lines = contents.splitlines(True)
+ i = 0
+ # Only loop on non empty configuration file
+ while i < len(lines) and _is_header_line(lines[i]):
+ i += 1
+
+ header = ''.join(lines[:i])
+ rest = ''.join(lines[i:])
+
+ # If they are using the "default" flow style of yaml, this operation
+ # will yield a valid configuration
+ try:
+ trial_contents = f'{header}repos:\n{rest}'
+ yaml_load(trial_contents)
+ contents = trial_contents
+ except yaml.YAMLError:
+ contents = f'{header}repos:\n{textwrap.indent(rest, " " * 4)}'
+
+ return contents
+
+
+def _migrate_sha_to_rev(contents: str) -> str:
+ return re.sub(r'(\n\s+)sha:', r'\1rev:', contents)
+
+
+def _migrate_python_venv(contents: str) -> str:
+ return re.sub(
+ r'(\n\s+)language: python_venv\b',
+ r'\1language: python',
+ contents,
+ )
+
+
+def migrate_config(config_file: str, quiet: bool = False) -> int:
+ with open(config_file) as f:
+ orig_contents = contents = f.read()
+
+ with cfgv.reraise_as(InvalidConfigError):
+ with cfgv.validate_context(f'File {config_file}'):
+ try:
+ yaml_load(orig_contents)
+ except Exception as e:
+ raise cfgv.ValidationError(str(e))
+
+ contents = _migrate_map(contents)
+ contents = _migrate_sha_to_rev(contents)
+ contents = _migrate_python_venv(contents)
+
+ if contents != orig_contents:
+ with open(config_file, 'w') as f:
+ f.write(contents)
+
+ print('Configuration has been migrated.')
+ elif not quiet:
+ print('Configuration is already migrated.')
+ return 0
diff --git a/pre_commit/commands/run.py b/pre_commit/commands/run.py
new file mode 100644
index 0000000..076f16d
--- /dev/null
+++ b/pre_commit/commands/run.py
@@ -0,0 +1,447 @@
+from __future__ import annotations
+
+import argparse
+import contextlib
+import functools
+import logging
+import os
+import re
+import subprocess
+import time
+import unicodedata
+from collections.abc import Generator
+from collections.abc import Iterable
+from collections.abc import MutableMapping
+from collections.abc import Sequence
+from typing import Any
+
+from identify.identify import tags_from_path
+
+from pre_commit import color
+from pre_commit import git
+from pre_commit import output
+from pre_commit.all_languages import languages
+from pre_commit.clientlib import load_config
+from pre_commit.hook import Hook
+from pre_commit.repository import all_hooks
+from pre_commit.repository import install_hook_envs
+from pre_commit.staged_files_only import staged_files_only
+from pre_commit.store import Store
+from pre_commit.util import cmd_output_b
+
+
+logger = logging.getLogger('pre_commit')
+
+
+def _len_cjk(msg: str) -> int:
+ widths = {'A': 1, 'F': 2, 'H': 1, 'N': 1, 'Na': 1, 'W': 2}
+ return sum(widths[unicodedata.east_asian_width(c)] for c in msg)
+
+
+def _start_msg(*, start: str, cols: int, end_len: int) -> str:
+ dots = '.' * (cols - _len_cjk(start) - end_len - 1)
+ return f'{start}{dots}'
+
+
+def _full_msg(
+ *,
+ start: str,
+ cols: int,
+ end_msg: str,
+ end_color: str,
+ use_color: bool,
+ postfix: str = '',
+) -> str:
+ dots = '.' * (cols - _len_cjk(start) - len(postfix) - len(end_msg) - 1)
+ end = color.format_color(end_msg, end_color, use_color)
+ return f'{start}{dots}{postfix}{end}\n'
+
+
+def filter_by_include_exclude(
+ names: Iterable[str],
+ include: str,
+ exclude: str,
+) -> Generator[str, None, None]:
+ include_re, exclude_re = re.compile(include), re.compile(exclude)
+ return (
+ filename for filename in names
+ if include_re.search(filename)
+ if not exclude_re.search(filename)
+ )
+
+
+class Classifier:
+ def __init__(self, filenames: Iterable[str]) -> None:
+ self.filenames = [f for f in filenames if os.path.lexists(f)]
+
+ @functools.cache
+ def _types_for_file(self, filename: str) -> set[str]:
+ return tags_from_path(filename)
+
+ def by_types(
+ self,
+ names: Iterable[str],
+ types: Iterable[str],
+ types_or: Iterable[str],
+ exclude_types: Iterable[str],
+ ) -> Generator[str, None, None]:
+ types = frozenset(types)
+ types_or = frozenset(types_or)
+ exclude_types = frozenset(exclude_types)
+ for filename in names:
+ tags = self._types_for_file(filename)
+ if (
+ tags >= types and
+ (not types_or or tags & types_or) and
+ not tags & exclude_types
+ ):
+ yield filename
+
+ def filenames_for_hook(self, hook: Hook) -> Generator[str, None, None]:
+ return self.by_types(
+ filter_by_include_exclude(
+ self.filenames,
+ hook.files,
+ hook.exclude,
+ ),
+ hook.types,
+ hook.types_or,
+ hook.exclude_types,
+ )
+
+ @classmethod
+ def from_config(
+ cls,
+ filenames: Iterable[str],
+ include: str,
+ exclude: str,
+ ) -> Classifier:
+ # on windows we normalize all filenames to use forward slashes
+ # this makes it easier to filter using the `files:` regex
+ # this also makes improperly quoted shell-based hooks work better
+ # see #1173
+ if os.altsep == '/' and os.sep == '\\':
+ filenames = (f.replace(os.sep, os.altsep) for f in filenames)
+ filenames = filter_by_include_exclude(filenames, include, exclude)
+ return Classifier(filenames)
+
+
+def _get_skips(environ: MutableMapping[str, str]) -> set[str]:
+ skips = environ.get('SKIP', '')
+ return {skip.strip() for skip in skips.split(',') if skip.strip()}
+
+
+SKIPPED = 'Skipped'
+NO_FILES = '(no files to check)'
+
+
+def _subtle_line(s: str, use_color: bool) -> None:
+ output.write_line(color.format_color(s, color.SUBTLE, use_color))
+
+
+def _run_single_hook(
+ classifier: Classifier,
+ hook: Hook,
+ skips: set[str],
+ cols: int,
+ diff_before: bytes,
+ verbose: bool,
+ use_color: bool,
+) -> tuple[bool, bytes]:
+ filenames = tuple(classifier.filenames_for_hook(hook))
+
+ if hook.id in skips or hook.alias in skips:
+ output.write(
+ _full_msg(
+ start=hook.name,
+ end_msg=SKIPPED,
+ end_color=color.YELLOW,
+ use_color=use_color,
+ cols=cols,
+ ),
+ )
+ duration = None
+ retcode = 0
+ diff_after = diff_before
+ files_modified = False
+ out = b''
+ elif not filenames and not hook.always_run:
+ output.write(
+ _full_msg(
+ start=hook.name,
+ postfix=NO_FILES,
+ end_msg=SKIPPED,
+ end_color=color.TURQUOISE,
+ use_color=use_color,
+ cols=cols,
+ ),
+ )
+ duration = None
+ retcode = 0
+ diff_after = diff_before
+ files_modified = False
+ out = b''
+ else:
+ # print hook and dots first in case the hook takes a while to run
+ output.write(_start_msg(start=hook.name, end_len=6, cols=cols))
+
+ if not hook.pass_filenames:
+ filenames = ()
+ time_before = time.monotonic()
+ language = languages[hook.language]
+ with language.in_env(hook.prefix, hook.language_version):
+ retcode, out = language.run_hook(
+ hook.prefix,
+ hook.entry,
+ hook.args,
+ filenames,
+ is_local=hook.src == 'local',
+ require_serial=hook.require_serial,
+ color=use_color,
+ )
+ duration = round(time.monotonic() - time_before, 2) or 0
+ diff_after = _get_diff()
+
+ # if the hook makes changes, fail the commit
+ files_modified = diff_before != diff_after
+
+ if retcode or files_modified:
+ print_color = color.RED
+ status = 'Failed'
+ else:
+ print_color = color.GREEN
+ status = 'Passed'
+
+ output.write_line(color.format_color(status, print_color, use_color))
+
+ if verbose or hook.verbose or retcode or files_modified:
+ _subtle_line(f'- hook id: {hook.id}', use_color)
+
+ if (verbose or hook.verbose) and duration is not None:
+ _subtle_line(f'- duration: {duration}s', use_color)
+
+ if retcode:
+ _subtle_line(f'- exit code: {retcode}', use_color)
+
+ # Print a message if failing due to file modifications
+ if files_modified:
+ _subtle_line('- files were modified by this hook', use_color)
+
+ if out.strip():
+ output.write_line()
+ output.write_line_b(out.strip(), logfile_name=hook.log_file)
+ output.write_line()
+
+ return files_modified or bool(retcode), diff_after
+
+
+def _compute_cols(hooks: Sequence[Hook]) -> int:
+ """Compute the number of columns to display hook messages. The widest
+ that will be displayed is in the no files skipped case:
+
+ Hook name...(no files to check) Skipped
+ """
+ if hooks:
+ name_len = max(_len_cjk(hook.name) for hook in hooks)
+ else:
+ name_len = 0
+
+ cols = name_len + 3 + len(NO_FILES) + 1 + len(SKIPPED)
+ return max(cols, 80)
+
+
+def _all_filenames(args: argparse.Namespace) -> Iterable[str]:
+ # these hooks do not operate on files
+ if args.hook_stage in {
+ 'post-checkout', 'post-commit', 'post-merge', 'post-rewrite',
+ 'pre-rebase',
+ }:
+ return ()
+ elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
+ return (args.commit_msg_filename,)
+ elif args.from_ref and args.to_ref:
+ return git.get_changed_files(args.from_ref, args.to_ref)
+ elif args.files:
+ return args.files
+ elif args.all_files:
+ return git.get_all_files()
+ elif git.is_in_merge_conflict():
+ return git.get_conflicted_files()
+ else:
+ return git.get_staged_files()
+
+
+def _get_diff() -> bytes:
+ _, out, _ = cmd_output_b(
+ 'git', 'diff', '--no-ext-diff', '--no-textconv', '--ignore-submodules',
+ check=False,
+ )
+ return out
+
+
+def _run_hooks(
+ config: dict[str, Any],
+ hooks: Sequence[Hook],
+ skips: set[str],
+ args: argparse.Namespace,
+) -> int:
+ """Actually run the hooks."""
+ cols = _compute_cols(hooks)
+ classifier = Classifier.from_config(
+ _all_filenames(args), config['files'], config['exclude'],
+ )
+ retval = 0
+ prior_diff = _get_diff()
+ for hook in hooks:
+ current_retval, prior_diff = _run_single_hook(
+ classifier, hook, skips, cols, prior_diff,
+ verbose=args.verbose, use_color=args.color,
+ )
+ retval |= current_retval
+ if retval and (config['fail_fast'] or hook.fail_fast):
+ break
+ if retval and args.show_diff_on_failure and prior_diff:
+ if args.all_files:
+ output.write_line(
+ 'pre-commit hook(s) made changes.\n'
+ 'If you are seeing this message in CI, '
+ 'reproduce locally with: `pre-commit run --all-files`.\n'
+ 'To run `pre-commit` as part of git workflow, use '
+ '`pre-commit install`.',
+ )
+ output.write_line('All changes made by hooks:')
+ # args.color is a boolean.
+ # See user_color function in color.py
+ git_color_opt = 'always' if args.color else 'never'
+ subprocess.call((
+ 'git', '--no-pager', 'diff', '--no-ext-diff',
+ f'--color={git_color_opt}',
+ ))
+
+ return retval
+
+
+def _has_unmerged_paths() -> bool:
+ _, stdout, _ = cmd_output_b('git', 'ls-files', '--unmerged')
+ return bool(stdout.strip())
+
+
+def _has_unstaged_config(config_file: str) -> bool:
+ retcode, _, _ = cmd_output_b(
+ 'git', 'diff', '--quiet', '--no-ext-diff', config_file, check=False,
+ )
+ # be explicit, other git errors don't mean it has an unstaged config.
+ return retcode == 1
+
+
+def run(
+ config_file: str,
+ store: Store,
+ args: argparse.Namespace,
+ environ: MutableMapping[str, str] = os.environ,
+) -> int:
+ stash = not args.all_files and not args.files
+
+ # Check if we have unresolved merge conflict files and fail fast.
+ if stash and _has_unmerged_paths():
+ logger.error('Unmerged files. Resolve before committing.')
+ return 1
+ if bool(args.from_ref) != bool(args.to_ref):
+ logger.error('Specify both --from-ref and --to-ref.')
+ return 1
+ if stash and _has_unstaged_config(config_file):
+ logger.error(
+ f'Your pre-commit configuration is unstaged.\n'
+ f'`git add {config_file}` to fix this.',
+ )
+ return 1
+ if (
+ args.hook_stage in {'prepare-commit-msg', 'commit-msg'} and
+ not args.commit_msg_filename
+ ):
+ logger.error(
+ f'`--commit-msg-filename` is required for '
+ f'`--hook-stage {args.hook_stage}`',
+ )
+ return 1
+ # prevent recursive post-checkout hooks (#1418)
+ if (
+ args.hook_stage == 'post-checkout' and
+ environ.get('_PRE_COMMIT_SKIP_POST_CHECKOUT')
+ ):
+ return 0
+
+ # Expose prepare_commit_message_source / commit_object_name
+ # as environment variables for the hooks
+ if args.prepare_commit_message_source:
+ environ['PRE_COMMIT_COMMIT_MSG_SOURCE'] = (
+ args.prepare_commit_message_source
+ )
+
+ if args.commit_object_name:
+ environ['PRE_COMMIT_COMMIT_OBJECT_NAME'] = args.commit_object_name
+
+ # Expose from-ref / to-ref as environment variables for hooks to consume
+ if args.from_ref and args.to_ref:
+ # legacy names
+ environ['PRE_COMMIT_ORIGIN'] = args.from_ref
+ environ['PRE_COMMIT_SOURCE'] = args.to_ref
+ # new names
+ environ['PRE_COMMIT_FROM_REF'] = args.from_ref
+ environ['PRE_COMMIT_TO_REF'] = args.to_ref
+
+ if args.pre_rebase_upstream and args.pre_rebase_branch:
+ environ['PRE_COMMIT_PRE_REBASE_UPSTREAM'] = args.pre_rebase_upstream
+ environ['PRE_COMMIT_PRE_REBASE_BRANCH'] = args.pre_rebase_branch
+
+ if (
+ args.remote_name and args.remote_url and
+ args.remote_branch and args.local_branch
+ ):
+ environ['PRE_COMMIT_LOCAL_BRANCH'] = args.local_branch
+ environ['PRE_COMMIT_REMOTE_BRANCH'] = args.remote_branch
+ environ['PRE_COMMIT_REMOTE_NAME'] = args.remote_name
+ environ['PRE_COMMIT_REMOTE_URL'] = args.remote_url
+
+ if args.checkout_type:
+ environ['PRE_COMMIT_CHECKOUT_TYPE'] = args.checkout_type
+
+ if args.is_squash_merge:
+ environ['PRE_COMMIT_IS_SQUASH_MERGE'] = args.is_squash_merge
+
+ if args.rewrite_command:
+ environ['PRE_COMMIT_REWRITE_COMMAND'] = args.rewrite_command
+
+ # Set pre_commit flag
+ environ['PRE_COMMIT'] = '1'
+
+ with contextlib.ExitStack() as exit_stack:
+ if stash:
+ exit_stack.enter_context(staged_files_only(store.directory))
+
+ config = load_config(config_file)
+ hooks = [
+ hook
+ for hook in all_hooks(config, store)
+ if not args.hook or hook.id == args.hook or hook.alias == args.hook
+ if args.hook_stage in hook.stages
+ ]
+
+ if args.hook and not hooks:
+ output.write_line(
+ f'No hook with id `{args.hook}` in stage `{args.hook_stage}`',
+ )
+ return 1
+
+ skips = _get_skips(environ)
+ to_install = [
+ hook
+ for hook in hooks
+ if hook.id not in skips and hook.alias not in skips
+ ]
+ install_hook_envs(to_install, store)
+
+ return _run_hooks(config, hooks, skips, args)
+
+ # https://github.com/python/mypy/issues/7726
+ raise AssertionError('unreachable')
diff --git a/pre_commit/commands/sample_config.py b/pre_commit/commands/sample_config.py
new file mode 100644
index 0000000..ce22f65
--- /dev/null
+++ b/pre_commit/commands/sample_config.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+SAMPLE_CONFIG = '''\
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v3.2.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ - id: check-added-large-files
+'''
+
+
+def sample_config() -> int:
+ print(SAMPLE_CONFIG, end='')
+ return 0
diff --git a/pre_commit/commands/try_repo.py b/pre_commit/commands/try_repo.py
new file mode 100644
index 0000000..539ed3c
--- /dev/null
+++ b/pre_commit/commands/try_repo.py
@@ -0,0 +1,77 @@
+from __future__ import annotations
+
+import argparse
+import logging
+import os.path
+import tempfile
+
+import pre_commit.constants as C
+from pre_commit import git
+from pre_commit import output
+from pre_commit.clientlib import load_manifest
+from pre_commit.commands.run import run
+from pre_commit.store import Store
+from pre_commit.util import cmd_output_b
+from pre_commit.xargs import xargs
+from pre_commit.yaml import yaml_dump
+
+logger = logging.getLogger(__name__)
+
+
+def _repo_ref(tmpdir: str, repo: str, ref: str | None) -> tuple[str, str]:
+ # if `ref` is explicitly passed, use it
+ if ref is not None:
+ return repo, ref
+
+ ref = git.head_rev(repo)
+ # if it exists on disk, we'll try and clone it with the local changes
+ if os.path.exists(repo) and git.has_diff('HEAD', repo=repo):
+ logger.warning('Creating temporary repo with uncommitted changes...')
+
+ shadow = os.path.join(tmpdir, 'shadow-repo')
+ cmd_output_b('git', 'clone', repo, shadow)
+ cmd_output_b('git', 'checkout', ref, '-b', '_pc_tmp', cwd=shadow)
+
+ idx = git.git_path('index', repo=shadow)
+ objs = git.git_path('objects', repo=shadow)
+ env = dict(os.environ, GIT_INDEX_FILE=idx, GIT_OBJECT_DIRECTORY=objs)
+
+ staged_files = git.get_staged_files(cwd=repo)
+ if staged_files:
+ xargs(('git', 'add', '--'), staged_files, cwd=repo, env=env)
+
+ cmd_output_b('git', 'add', '-u', cwd=repo, env=env)
+ git.commit(repo=shadow)
+
+ return shadow, git.head_rev(shadow)
+ else:
+ return repo, ref
+
+
+def try_repo(args: argparse.Namespace) -> int:
+ with tempfile.TemporaryDirectory() as tempdir:
+ repo, ref = _repo_ref(tempdir, args.repo, args.ref)
+
+ store = Store(tempdir)
+ if args.hook:
+ hooks = [{'id': args.hook}]
+ else:
+ repo_path = store.clone(repo, ref)
+ manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
+ manifest = sorted(manifest, key=lambda hook: hook['id'])
+ hooks = [{'id': hook['id']} for hook in manifest]
+
+ config = {'repos': [{'repo': repo, 'rev': ref, 'hooks': hooks}]}
+ config_s = yaml_dump(config)
+
+ config_filename = os.path.join(tempdir, C.CONFIG_FILE)
+ with open(config_filename, 'w') as cfg:
+ cfg.write(config_s)
+
+ output.write_line('=' * 79)
+ output.write_line('Using config:')
+ output.write_line('=' * 79)
+ output.write(config_s)
+ output.write_line('=' * 79)
+
+ return run(config_filename, store, args)
diff --git a/pre_commit/commands/validate_config.py b/pre_commit/commands/validate_config.py
new file mode 100644
index 0000000..b3de635
--- /dev/null
+++ b/pre_commit/commands/validate_config.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from collections.abc import Sequence
+
+from pre_commit import clientlib
+
+
+def validate_config(filenames: Sequence[str]) -> int:
+ ret = 0
+
+ for filename in filenames:
+ try:
+ clientlib.load_config(filename)
+ except clientlib.InvalidConfigError as e:
+ print(e)
+ ret = 1
+
+ return ret
diff --git a/pre_commit/commands/validate_manifest.py b/pre_commit/commands/validate_manifest.py
new file mode 100644
index 0000000..8493c6e
--- /dev/null
+++ b/pre_commit/commands/validate_manifest.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from collections.abc import Sequence
+
+from pre_commit import clientlib
+
+
+def validate_manifest(filenames: Sequence[str]) -> int:
+ ret = 0
+
+ for filename in filenames:
+ try:
+ clientlib.load_manifest(filename)
+ except clientlib.InvalidManifestError as e:
+ print(e)
+ ret = 1
+
+ return ret