1
0
Fork 0

Merging upstream version 2.5.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-09 21:20:28 +01:00
parent 344ec6ad68
commit 46a56c0856
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
37 changed files with 457 additions and 213 deletions

View file

@ -84,7 +84,9 @@ def _check_hooks_still_exist_at_rev(
)
REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([^\s#]+)(.*)(\r?\n)$', re.DOTALL)
REV_LINE_RE = re.compile(
r'^(\s+)rev:(\s*)([\'"]?)([^\s#]+)(.*)(\r?\n)$', re.DOTALL,
)
def _original_lines(
@ -116,15 +118,15 @@ def _write_new_config(path: str, rev_infos: List[Optional[RevInfo]]) -> None:
continue
match = REV_LINE_RE.match(lines[idx])
assert match is not None
new_rev_s = yaml_dump({'rev': rev_info.rev})
new_rev_s = yaml_dump({'rev': rev_info.rev}, default_style=match[3])
new_rev = new_rev_s.split(':', 1)[1].strip()
if rev_info.frozen is not None:
comment = f' # frozen: {rev_info.frozen}'
elif match[4].strip().startswith('# frozen:'):
elif match[5].strip().startswith('# frozen:'):
comment = ''
else:
comment = match[4]
lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[5]}'
comment = match[5]
lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[6]}'
with open(path, 'w', newline='') as f:
f.write(''.join(lines))

View file

@ -150,6 +150,7 @@ def _pre_push_ns(
_EXPECTED_ARG_LENGTH_BY_HOOK = {
'commit-msg': 1,
'post-checkout': 3,
'post-commit': 0,
'pre-commit': 0,
'pre-merge-commit': 0,
'pre-push': 2,
@ -186,7 +187,7 @@ def _run_ns(
return _pre_push_ns(color, args, stdin)
elif hook_type in {'commit-msg', 'prepare-commit-msg'}:
return _ns(hook_type, color, commit_msg_filename=args[0])
elif hook_type in {'pre-merge-commit', 'pre-commit'}:
elif hook_type in {'post-commit', 'pre-merge-commit', 'pre-commit'}:
return _ns(hook_type, color)
elif hook_type == 'post-checkout':
return _ns(

View file

@ -2,6 +2,7 @@ import re
import yaml
from pre_commit.clientlib import load_config
from pre_commit.util import yaml_load
@ -43,6 +44,9 @@ def _migrate_sha_to_rev(contents: str) -> str:
def migrate_config(config_file: str, quiet: bool = False) -> int:
# ensure that the configuration is a valid pre-commit configuration
load_config(config_file)
with open(config_file) as f:
orig_contents = contents = f.read()

View file

@ -72,13 +72,7 @@ def filter_by_include_exclude(
class Classifier:
def __init__(self, filenames: Sequence[str]) -> None:
# on windows we normalize all filenames to use forward slashes
# this makes it easier to filter using the `files:` regex
# this also makes improperly quoted shell-based hooks work better
# see #1173
if os.altsep == '/' and os.sep == '\\':
filenames = [f.replace(os.sep, os.altsep) for f in filenames]
def __init__(self, filenames: Collection[str]) -> None:
self.filenames = [f for f in filenames if os.path.lexists(f)]
@functools.lru_cache(maxsize=None)
@ -105,6 +99,22 @@ class Classifier:
names = self.by_types(names, hook.types, hook.exclude_types)
return tuple(names)
@classmethod
def from_config(
cls,
filenames: Collection[str],
include: str,
exclude: str,
) -> 'Classifier':
# on windows we normalize all filenames to use forward slashes
# this makes it easier to filter using the `files:` regex
# this also makes improperly quoted shell-based hooks work better
# see #1173
if os.altsep == '/' and os.sep == '\\':
filenames = [f.replace(os.sep, os.altsep) for f in filenames]
filenames = filter_by_include_exclude(filenames, include, exclude)
return Classifier(filenames)
def _get_skips(environ: EnvironT) -> Set[str]:
skips = environ.get('SKIP', '')
@ -221,7 +231,8 @@ def _compute_cols(hooks: Sequence[Hook]) -> int:
def _all_filenames(args: argparse.Namespace) -> Collection[str]:
if args.hook_stage == 'post-checkout': # no files for post-checkout
# these hooks do not operate on files
if args.hook_stage in {'post-checkout', 'post-commit'}:
return ()
elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
return (args.commit_msg_filename,)
@ -246,10 +257,9 @@ def _run_hooks(
"""Actually run the hooks."""
skips = _get_skips(environ)
cols = _compute_cols(hooks)
filenames = filter_by_include_exclude(
classifier = Classifier.from_config(
_all_filenames(args), config['files'], config['exclude'],
)
classifier = Classifier(filenames)
retval = 0
for hook in hooks:
retval |= _run_single_hook(
@ -323,6 +333,12 @@ def run(
f'`--hook-stage {args.hook_stage}`',
)
return 1
# prevent recursive post-checkout hooks (#1418)
if (
args.hook_stage == 'post-checkout' and
environ.get('_PRE_COMMIT_SKIP_POST_CHECKOUT')
):
return 0
# Expose from-ref / to-ref as environment variables for hooks to consume
if args.from_ref and args.to_ref:
@ -340,6 +356,9 @@ def run(
if args.checkout_type:
environ['PRE_COMMIT_CHECKOUT_TYPE'] = args.checkout_type
# Set pre_commit flag
environ['PRE_COMMIT'] = '1'
with contextlib.ExitStack() as exit_stack:
if stash:
exit_stack.enter_context(staged_files_only(store.directory))