1
0
Fork 0

Adding upstream version 4.0.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-09 21:51:25 +01:00
parent 0de0fde28c
commit c4faf5b6cb
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
44 changed files with 596 additions and 105 deletions

View file

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0 rev: v5.0.0
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
- id: end-of-file-fixer - id: end-of-file-fixer
@ -24,7 +24,7 @@ repos:
hooks: hooks:
- id: add-trailing-comma - id: add-trailing-comma
- repo: https://github.com/asottile/pyupgrade - repo: https://github.com/asottile/pyupgrade
rev: v3.16.0 rev: v3.17.0
hooks: hooks:
- id: pyupgrade - id: pyupgrade
args: [--py39-plus] args: [--py39-plus]
@ -33,11 +33,11 @@ repos:
hooks: hooks:
- id: autopep8 - id: autopep8
- repo: https://github.com/PyCQA/flake8 - repo: https://github.com/PyCQA/flake8
rev: 7.1.0 rev: 7.1.1
hooks: hooks:
- id: flake8 - id: flake8
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.0 rev: v1.11.2
hooks: hooks:
- id: mypy - id: mypy
additional_dependencies: [types-pyyaml] additional_dependencies: [types-pyyaml]

View file

@ -1,3 +1,37 @@
4.0.1 - 2024-10-08
==================
### Fixes
- Fix `pre-commit migrate-config` for unquoted deprecated stages names with
purelib `pyyaml`.
- #3324 PR by @asottile.
- pre-commit-ci/issues#234 issue by @lorenzwalthert.
4.0.0 - 2024-10-05
==================
### Features
- Improve `pre-commit migrate-config` to handle more yaml formats.
- #3301 PR by @asottile.
- Handle `stages` deprecation in `pre-commit migrate-config`.
- #3302 PR by @asottile.
- #2732 issue by @asottile.
- Upgrade `ruby-build`.
- #3199 PR by @ThisGuyCodes.
- Add "sensible regex" warnings to `repo: meta`.
- #3311 PR by @asottile.
- Add warnings for deprecated `stages` (`commit` -> `pre-commit`, `push` ->
`pre-push`, `merge-commit` -> `pre-merge-commit`).
- #3312 PR by @asottile.
- #3313 PR by @asottile.
- #3315 PR by @asottile.
- #2732 issue by @asottile.
### Migrating
- `language: python_venv` has been removed -- use `language: python` instead.
- #3320 PR by @asottile.
- #2734 issue by @asottile.
3.8.0 - 2024-07-28 3.8.0 - 2024-07-28
================== ==================

View file

@ -44,7 +44,5 @@ languages: dict[str, Language] = {
'script': script, 'script': script,
'swift': swift, 'swift': swift,
'system': system, 'system': system,
# TODO: fully deprecate `python_venv`
'python_venv': python,
} }
language_names = sorted(languages) language_names = sorted(languages)

View file

@ -2,6 +2,7 @@ from __future__ import annotations
import functools import functools
import logging import logging
import os.path
import re import re
import shlex import shlex
import sys import sys
@ -70,6 +71,43 @@ def transform_stage(stage: str) -> str:
return _STAGES.get(stage, stage) return _STAGES.get(stage, stage)
MINIMAL_MANIFEST_SCHEMA = cfgv.Array(
cfgv.Map(
'Hook', 'id',
cfgv.Required('id', cfgv.check_string),
cfgv.Optional('stages', cfgv.check_array(cfgv.check_string), []),
),
)
def warn_for_stages_on_repo_init(repo: str, directory: str) -> None:
try:
manifest = cfgv.load_from_filename(
os.path.join(directory, C.MANIFEST_FILE),
schema=MINIMAL_MANIFEST_SCHEMA,
load_strategy=yaml_load,
exc_tp=InvalidManifestError,
)
except InvalidManifestError:
return # they'll get a better error message when it actually loads!
legacy_stages = {} # sorted set
for hook in manifest:
for stage in hook.get('stages', ()):
if stage in _STAGES:
legacy_stages[stage] = True
if legacy_stages:
logger.warning(
f'repo `{repo}` uses deprecated stage names '
f'({", ".join(legacy_stages)}) which will be removed in a '
f'future version. '
f'Hint: often `pre-commit autoupdate --repo {shlex.quote(repo)}` '
f'will fix this. '
f'if it does not -- consider reporting an issue to that repo.',
)
class StagesMigrationNoDefault(NamedTuple): class StagesMigrationNoDefault(NamedTuple):
key: str key: str
default: Sequence[str] default: Sequence[str]
@ -99,6 +137,58 @@ class StagesMigration(StagesMigrationNoDefault):
super().apply_default(dct) super().apply_default(dct)
class DeprecatedStagesWarning(NamedTuple):
key: str
def check(self, dct: dict[str, Any]) -> None:
if self.key not in dct:
return
val = dct[self.key]
cfgv.check_array(cfgv.check_any)(val)
legacy_stages = [stage for stage in val if stage in _STAGES]
if legacy_stages:
logger.warning(
f'hook id `{dct["id"]}` uses deprecated stage names '
f'({", ".join(legacy_stages)}) which will be removed in a '
f'future version. '
f'run: `pre-commit migrate-config` to automatically fix this.',
)
def apply_default(self, dct: dict[str, Any]) -> None:
pass
def remove_default(self, dct: dict[str, Any]) -> None:
raise NotImplementedError
class DeprecatedDefaultStagesWarning(NamedTuple):
key: str
def check(self, dct: dict[str, Any]) -> None:
if self.key not in dct:
return
val = dct[self.key]
cfgv.check_array(cfgv.check_any)(val)
legacy_stages = [stage for stage in val if stage in _STAGES]
if legacy_stages:
logger.warning(
f'top-level `default_stages` uses deprecated stage names '
f'({", ".join(legacy_stages)}) which will be removed in a '
f'future version. '
f'run: `pre-commit migrate-config` to automatically fix this.',
)
def apply_default(self, dct: dict[str, Any]) -> None:
pass
def remove_default(self, dct: dict[str, Any]) -> None:
raise NotImplementedError
MANIFEST_HOOK_DICT = cfgv.Map( MANIFEST_HOOK_DICT = cfgv.Map(
'Hook', 'id', 'Hook', 'id',
@ -267,6 +357,12 @@ class NotAllowed(cfgv.OptionalNoDefault):
raise cfgv.ValidationError(f'{self.key!r} cannot be overridden') raise cfgv.ValidationError(f'{self.key!r} cannot be overridden')
_COMMON_HOOK_WARNINGS = (
OptionalSensibleRegexAtHook('files', cfgv.check_string),
OptionalSensibleRegexAtHook('exclude', cfgv.check_string),
DeprecatedStagesWarning('stages'),
)
META_HOOK_DICT = cfgv.Map( META_HOOK_DICT = cfgv.Map(
'Hook', 'id', 'Hook', 'id',
cfgv.Required('id', cfgv.check_string), cfgv.Required('id', cfgv.check_string),
@ -289,6 +385,7 @@ META_HOOK_DICT = cfgv.Map(
item item
for item in MANIFEST_HOOK_DICT.items for item in MANIFEST_HOOK_DICT.items
), ),
*_COMMON_HOOK_WARNINGS,
) )
CONFIG_HOOK_DICT = cfgv.Map( CONFIG_HOOK_DICT = cfgv.Map(
'Hook', 'id', 'Hook', 'id',
@ -306,16 +403,13 @@ CONFIG_HOOK_DICT = cfgv.Map(
if item.key != 'stages' if item.key != 'stages'
), ),
StagesMigrationNoDefault('stages', []), StagesMigrationNoDefault('stages', []),
OptionalSensibleRegexAtHook('files', cfgv.check_string), *_COMMON_HOOK_WARNINGS,
OptionalSensibleRegexAtHook('exclude', cfgv.check_string),
) )
LOCAL_HOOK_DICT = cfgv.Map( LOCAL_HOOK_DICT = cfgv.Map(
'Hook', 'id', 'Hook', 'id',
*MANIFEST_HOOK_DICT.items, *MANIFEST_HOOK_DICT.items,
*_COMMON_HOOK_WARNINGS,
OptionalSensibleRegexAtHook('files', cfgv.check_string),
OptionalSensibleRegexAtHook('exclude', cfgv.check_string),
) )
CONFIG_REPO_DICT = cfgv.Map( CONFIG_REPO_DICT = cfgv.Map(
'Repository', 'repo', 'Repository', 'repo',
@ -368,6 +462,7 @@ CONFIG_SCHEMA = cfgv.Map(
'default_language_version', DEFAULT_LANGUAGE_VERSION, {}, 'default_language_version', DEFAULT_LANGUAGE_VERSION, {},
), ),
StagesMigration('default_stages', STAGES), StagesMigration('default_stages', STAGES),
DeprecatedDefaultStagesWarning('default_stages'),
cfgv.Optional('files', check_string_regex, ''), cfgv.Optional('files', check_string_regex, ''),
cfgv.Optional('exclude', check_string_regex, '^$'), cfgv.Optional('exclude', check_string_regex, '^$'),
cfgv.Optional('fail_fast', cfgv.check_bool, False), cfgv.Optional('fail_fast', cfgv.check_bool, False),

View file

@ -1,13 +1,21 @@
from __future__ import annotations from __future__ import annotations
import re import functools
import itertools
import textwrap import textwrap
from typing import Callable
import cfgv import cfgv
import yaml import yaml
from yaml.nodes import ScalarNode
from pre_commit.clientlib import InvalidConfigError from pre_commit.clientlib import InvalidConfigError
from pre_commit.yaml import yaml_compose
from pre_commit.yaml import yaml_load from pre_commit.yaml import yaml_load
from pre_commit.yaml_rewrite import MappingKey
from pre_commit.yaml_rewrite import MappingValue
from pre_commit.yaml_rewrite import match
from pre_commit.yaml_rewrite import SequenceItem
def _is_header_line(line: str) -> bool: def _is_header_line(line: str) -> bool:
@ -38,16 +46,69 @@ def _migrate_map(contents: str) -> str:
return contents return contents
def _migrate_sha_to_rev(contents: str) -> str: def _preserve_style(n: ScalarNode, *, s: str) -> str:
return re.sub(r'(\n\s+)sha:', r'\1rev:', contents) style = n.style or ''
return f'{style}{s}{style}'
def _migrate_python_venv(contents: str) -> str: def _fix_stage(n: ScalarNode) -> str:
return re.sub( return _preserve_style(n, s=f'pre-{n.value}')
r'(\n\s+)language: python_venv\b',
r'\1language: python',
contents, def _migrate_composed(contents: str) -> str:
tree = yaml_compose(contents)
rewrites: list[tuple[ScalarNode, Callable[[ScalarNode], str]]] = []
# sha -> rev
sha_to_rev_replace = functools.partial(_preserve_style, s='rev')
sha_to_rev_matcher = (
MappingValue('repos'),
SequenceItem(),
MappingKey('sha'),
) )
for node in match(tree, sha_to_rev_matcher):
rewrites.append((node, sha_to_rev_replace))
# python_venv -> python
language_matcher = (
MappingValue('repos'),
SequenceItem(),
MappingValue('hooks'),
SequenceItem(),
MappingValue('language'),
)
python_venv_replace = functools.partial(_preserve_style, s='python')
for node in match(tree, language_matcher):
if node.value == 'python_venv':
rewrites.append((node, python_venv_replace))
# stages rewrites
default_stages_matcher = (MappingValue('default_stages'), SequenceItem())
default_stages_match = match(tree, default_stages_matcher)
hook_stages_matcher = (
MappingValue('repos'),
SequenceItem(),
MappingValue('hooks'),
SequenceItem(),
MappingValue('stages'),
SequenceItem(),
)
hook_stages_match = match(tree, hook_stages_matcher)
for node in itertools.chain(default_stages_match, hook_stages_match):
if node.value in {'commit', 'push', 'merge-commit'}:
rewrites.append((node, _fix_stage))
rewrites.sort(reverse=True, key=lambda nf: nf[0].start_mark.index)
src_parts = []
end: int | None = None
for node, func in rewrites:
src_parts.append(contents[node.end_mark.index:end])
src_parts.append(func(node))
end = node.start_mark.index
src_parts.append(contents[:end])
src_parts.reverse()
return ''.join(src_parts)
def migrate_config(config_file: str, quiet: bool = False) -> int: def migrate_config(config_file: str, quiet: bool = False) -> int:
@ -62,8 +123,7 @@ def migrate_config(config_file: str, quiet: bool = False) -> int:
raise cfgv.ValidationError(str(e)) raise cfgv.ValidationError(str(e))
contents = _migrate_map(contents) contents = _migrate_map(contents)
contents = _migrate_sha_to_rev(contents) contents = _migrate_composed(contents)
contents = _migrate_python_venv(contents)
if contents != orig_contents: if contents != orig_contents:
with open(config_file, 'w') as f: with open(config_file, 'w') as f:

View file

@ -61,7 +61,7 @@ def filter_by_include_exclude(
names: Iterable[str], names: Iterable[str],
include: str, include: str,
exclude: str, exclude: str,
) -> Generator[str, None, None]: ) -> Generator[str]:
include_re, exclude_re = re.compile(include), re.compile(exclude) include_re, exclude_re = re.compile(include), re.compile(exclude)
return ( return (
filename for filename in names filename for filename in names
@ -84,7 +84,7 @@ class Classifier:
types: Iterable[str], types: Iterable[str],
types_or: Iterable[str], types_or: Iterable[str],
exclude_types: Iterable[str], exclude_types: Iterable[str],
) -> Generator[str, None, None]: ) -> Generator[str]:
types = frozenset(types) types = frozenset(types)
types_or = frozenset(types_or) types_or = frozenset(types_or)
exclude_types = frozenset(exclude_types) exclude_types = frozenset(exclude_types)
@ -97,7 +97,7 @@ class Classifier:
): ):
yield filename yield filename
def filenames_for_hook(self, hook: Hook) -> Generator[str, None, None]: def filenames_for_hook(self, hook: Hook) -> Generator[str]:
return self.by_types( return self.by_types(
filter_by_include_exclude( filter_by_include_exclude(
self.filenames, self.filenames,

View file

@ -33,7 +33,7 @@ def format_env(parts: SubstitutionT, env: MutableMapping[str, str]) -> str:
def envcontext( def envcontext(
patch: PatchesT, patch: PatchesT,
_env: MutableMapping[str, str] | None = None, _env: MutableMapping[str, str] | None = None,
) -> Generator[None, None, None]: ) -> Generator[None]:
"""In this context, `os.environ` is modified according to `patch`. """In this context, `os.environ` is modified according to `patch`.
`patch` is an iterable of 2-tuples (key, value): `patch` is an iterable of 2-tuples (key, value):

View file

@ -68,7 +68,7 @@ def _log_and_exit(
@contextlib.contextmanager @contextlib.contextmanager
def error_handler() -> Generator[None, None, None]: def error_handler() -> Generator[None]:
try: try:
yield yield
except (Exception, KeyboardInterrupt) as e: except (Exception, KeyboardInterrupt) as e:

View file

@ -20,7 +20,7 @@ if sys.platform == 'win32': # pragma: no cover (windows)
def _locked( def _locked(
fileno: int, fileno: int,
blocked_cb: Callable[[], None], blocked_cb: Callable[[], None],
) -> Generator[None, None, None]: ) -> Generator[None]:
try: try:
msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region) msvcrt.locking(fileno, msvcrt.LK_NBLCK, _region)
except OSError: except OSError:
@ -53,7 +53,7 @@ else: # pragma: win32 no cover
def _locked( def _locked(
fileno: int, fileno: int,
blocked_cb: Callable[[], None], blocked_cb: Callable[[], None],
) -> Generator[None, None, None]: ) -> Generator[None]:
try: try:
fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB)
except OSError: # pragma: no cover (tests are single-threaded) except OSError: # pragma: no cover (tests are single-threaded)
@ -69,7 +69,7 @@ else: # pragma: win32 no cover
def lock( def lock(
path: str, path: str,
blocked_cb: Callable[[], None], blocked_cb: Callable[[], None],
) -> Generator[None, None, None]: ) -> Generator[None]:
with open(path, 'a+') as f: with open(path, 'a+') as f:
with _locked(f.fileno(), blocked_cb): with _locked(f.fileno(), blocked_cb):
yield yield

View file

@ -127,7 +127,7 @@ def no_install(
@contextlib.contextmanager @contextlib.contextmanager
def no_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def no_env(prefix: Prefix, version: str) -> Generator[None]:
yield yield

View file

@ -41,7 +41,7 @@ def get_env_patch(env: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -70,7 +70,7 @@ def get_env_patch(target_dir: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -29,7 +29,7 @@ def get_env_patch(venv: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -30,14 +30,14 @@ def get_env_patch(venv: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield
@contextlib.contextmanager @contextlib.contextmanager
def _nuget_config_no_sources() -> Generator[str, None, None]: def _nuget_config_no_sources() -> Generator[str]:
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
nuget_config = os.path.join(tmpdir, 'nuget.config') nuget_config = os.path.join(tmpdir, 'nuget.config')
with open(nuget_config, 'w') as f: with open(nuget_config, 'w') as f:

View file

@ -121,7 +121,7 @@ def _install_go(version: str, dest: str) -> None:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir, version)): with envcontext(get_env_patch(envdir, version)):
yield yield

View file

@ -24,7 +24,7 @@ def get_env_patch(target_dir: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -44,7 +44,7 @@ def get_env_patch(d: str) -> PatchesT: # pragma: win32 no cover
@contextlib.contextmanager # pragma: win32 no cover @contextlib.contextmanager # pragma: win32 no cover
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -59,7 +59,7 @@ def get_env_patch(venv: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -33,7 +33,7 @@ def get_env_patch(venv: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -152,7 +152,7 @@ def norm_version(version: str) -> str | None:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -85,7 +85,7 @@ def health_check(prefix: Prefix, version: str) -> str | None:
@contextlib.contextmanager @contextlib.contextmanager
def _r_code_in_tempfile(code: str) -> Generator[str, None, None]: def _r_code_in_tempfile(code: str) -> Generator[str]:
""" """
To avoid quoting and escaping issues, avoid `Rscript [options] -e {expr}` To avoid quoting and escaping issues, avoid `Rscript [options] -e {expr}`
but use `Rscript [options] path/to/file_with_expr.R` but use `Rscript [options] path/to/file_with_expr.R`
@ -105,7 +105,7 @@ def get_env_patch(venv: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -73,7 +73,7 @@ def get_env_patch(
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir, version)): with envcontext(get_env_patch(envdir, version)):
yield yield

View file

@ -61,7 +61,7 @@ def get_env_patch(target_dir: str, version: str) -> PatchesT:
@contextlib.contextmanager @contextlib.contextmanager
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir, version)): with envcontext(get_env_patch(envdir, version)):
yield yield

View file

@ -27,7 +27,7 @@ def get_env_patch(venv: str) -> PatchesT: # pragma: win32 no cover
@contextlib.contextmanager # pragma: win32 no cover @contextlib.contextmanager # pragma: win32 no cover
def in_env(prefix: Prefix, version: str) -> Generator[None, None, None]: def in_env(prefix: Prefix, version: str) -> Generator[None]:
envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version) envdir = lang_base.environment_dir(prefix, ENVIRONMENT_DIR, version)
with envcontext(get_env_patch(envdir)): with envcontext(get_env_patch(envdir)):
yield yield

View file

@ -32,7 +32,7 @@ class LoggingHandler(logging.Handler):
@contextlib.contextmanager @contextlib.contextmanager
def logging_handler(use_color: bool) -> Generator[None, None, None]: def logging_handler(use_color: bool) -> Generator[None]:
handler = LoggingHandler(use_color) handler = LoggingHandler(use_color)
logger.addHandler(handler) logger.addHandler(handler)
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)

View file

@ -3,7 +3,6 @@ from __future__ import annotations
import json import json
import logging import logging
import os import os
import shlex
from collections.abc import Sequence from collections.abc import Sequence
from typing import Any from typing import Any
@ -68,14 +67,6 @@ def _hook_install(hook: Hook) -> None:
logger.info('Once installed this environment will be reused.') logger.info('Once installed this environment will be reused.')
logger.info('This may take a few minutes...') logger.info('This may take a few minutes...')
if hook.language == 'python_venv':
logger.warning(
f'`repo: {hook.src}` uses deprecated `language: python_venv`. '
f'This is an alias for `language: python`. '
f'Often `pre-commit autoupdate --repo {shlex.quote(hook.src)}` '
f'will fix this.',
)
lang = languages[hook.language] lang = languages[hook.language]
assert lang.ENVIRONMENT_DIR is not None assert lang.ENVIRONMENT_DIR is not None

Binary file not shown.

View file

@ -33,7 +33,7 @@ def _git_apply(patch: str) -> None:
@contextlib.contextmanager @contextlib.contextmanager
def _intent_to_add_cleared() -> Generator[None, None, None]: def _intent_to_add_cleared() -> Generator[None]:
intent_to_add = git.intent_to_add_files() intent_to_add = git.intent_to_add_files()
if intent_to_add: if intent_to_add:
logger.warning('Unstaged intent-to-add files detected.') logger.warning('Unstaged intent-to-add files detected.')
@ -48,7 +48,7 @@ def _intent_to_add_cleared() -> Generator[None, None, None]:
@contextlib.contextmanager @contextlib.contextmanager
def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]: def _unstaged_changes_cleared(patch_dir: str) -> Generator[None]:
tree = cmd_output('git', 'write-tree')[1].strip() tree = cmd_output('git', 'write-tree')[1].strip()
diff_cmd = ( diff_cmd = (
'git', 'diff-index', '--ignore-submodules', '--binary', 'git', 'diff-index', '--ignore-submodules', '--binary',
@ -105,7 +105,7 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
@contextlib.contextmanager @contextlib.contextmanager
def staged_files_only(patch_dir: str) -> Generator[None, None, None]: def staged_files_only(patch_dir: str) -> Generator[None]:
"""Clear any unstaged changes from the git working directory inside this """Clear any unstaged changes from the git working directory inside this
context. context.
""" """

View file

@ -10,6 +10,7 @@ from collections.abc import Sequence
from typing import Callable from typing import Callable
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit import clientlib
from pre_commit import file_lock from pre_commit import file_lock
from pre_commit import git from pre_commit import git
from pre_commit.util import CalledProcessError from pre_commit.util import CalledProcessError
@ -101,7 +102,7 @@ class Store:
os.replace(tmpfile, self.db_path) os.replace(tmpfile, self.db_path)
@contextlib.contextmanager @contextlib.contextmanager
def exclusive_lock(self) -> Generator[None, None, None]: def exclusive_lock(self) -> Generator[None]:
def blocked_cb() -> None: # pragma: no cover (tests are in-process) def blocked_cb() -> None: # pragma: no cover (tests are in-process)
logger.info('Locking pre-commit directory') logger.info('Locking pre-commit directory')
@ -112,7 +113,7 @@ class Store:
def connect( def connect(
self, self,
db_path: str | None = None, db_path: str | None = None,
) -> Generator[sqlite3.Connection, None, None]: ) -> Generator[sqlite3.Connection]:
db_path = db_path or self.db_path db_path = db_path or self.db_path
# sqlite doesn't close its fd with its contextmanager >.< # sqlite doesn't close its fd with its contextmanager >.<
# contextlib.closing fixes this. # contextlib.closing fixes this.
@ -136,6 +137,7 @@ class Store:
deps: Sequence[str], deps: Sequence[str],
make_strategy: Callable[[str], None], make_strategy: Callable[[str], None],
) -> str: ) -> str:
original_repo = repo
repo = self.db_repo_name(repo, deps) repo = self.db_repo_name(repo, deps)
def _get_result() -> str | None: def _get_result() -> str | None:
@ -168,6 +170,9 @@ class Store:
'INSERT INTO repos (repo, ref, path) VALUES (?, ?, ?)', 'INSERT INTO repos (repo, ref, path) VALUES (?, ?, ?)',
[repo, ref, directory], [repo, ref, directory],
) )
clientlib.warn_for_stages_on_repo_init(original_repo, directory)
return directory return directory
def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None: def _complete_clone(self, ref: str, git_cmd: Callable[..., None]) -> None:

View file

@ -25,7 +25,7 @@ def force_bytes(exc: Any) -> bytes:
@contextlib.contextmanager @contextlib.contextmanager
def clean_path_on_failure(path: str) -> Generator[None, None, None]: def clean_path_on_failure(path: str) -> Generator[None]:
"""Cleans up the directory on an exceptional failure.""" """Cleans up the directory on an exceptional failure."""
try: try:
yield yield

View file

@ -120,7 +120,6 @@ def partition(
@contextlib.contextmanager @contextlib.contextmanager
def _thread_mapper(maxsize: int) -> Generator[ def _thread_mapper(maxsize: int) -> Generator[
Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]], Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]],
None, None,
]: ]:
if maxsize == 1: if maxsize == 1:
yield map yield map

View file

@ -6,6 +6,7 @@ from typing import Any
import yaml import yaml
Loader = getattr(yaml, 'CSafeLoader', yaml.SafeLoader) Loader = getattr(yaml, 'CSafeLoader', yaml.SafeLoader)
yaml_compose = functools.partial(yaml.compose, Loader=Loader)
yaml_load = functools.partial(yaml.load, Loader=Loader) yaml_load = functools.partial(yaml.load, Loader=Loader)
Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper) Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper)

View file

@ -0,0 +1,52 @@
from __future__ import annotations
from collections.abc import Generator
from collections.abc import Iterable
from typing import NamedTuple
from typing import Protocol
from yaml.nodes import MappingNode
from yaml.nodes import Node
from yaml.nodes import ScalarNode
from yaml.nodes import SequenceNode
class _Matcher(Protocol):
def match(self, n: Node) -> Generator[Node]: ...
class MappingKey(NamedTuple):
k: str
def match(self, n: Node) -> Generator[Node]:
if isinstance(n, MappingNode):
for k, _ in n.value:
if k.value == self.k:
yield k
class MappingValue(NamedTuple):
k: str
def match(self, n: Node) -> Generator[Node]:
if isinstance(n, MappingNode):
for k, v in n.value:
if k.value == self.k:
yield v
class SequenceItem(NamedTuple):
def match(self, n: Node) -> Generator[Node]:
if isinstance(n, SequenceNode):
yield from n.value
def _match(gen: Iterable[Node], m: _Matcher) -> Iterable[Node]:
return (n for src in gen for n in m.match(src))
def match(n: Node, matcher: tuple[_Matcher, ...]) -> Generator[ScalarNode]:
gen: Iterable[Node] = (n,)
for m in matcher:
gen = _match(gen, m)
return (n for n in gen if isinstance(n, ScalarNode))

View file

@ -1,6 +1,6 @@
[metadata] [metadata]
name = pre_commit name = pre_commit
version = 3.8.0 version = 4.0.1
description = A framework for managing and maintaining multi-language pre-commit hooks. description = A framework for managing and maintaining multi-language pre-commit hooks.
long_description = file: README.md long_description = file: README.md
long_description_content_type = text/markdown long_description_content_type = text/markdown

View file

@ -17,7 +17,7 @@ from collections.abc import Sequence
REPOS = ( REPOS = (
('rbenv', 'https://github.com/rbenv/rbenv', '38e1fbb'), ('rbenv', 'https://github.com/rbenv/rbenv', '38e1fbb'),
('ruby-build', 'https://github.com/rbenv/ruby-build', '855b963'), ('ruby-build', 'https://github.com/rbenv/ruby-build', 'ed384c8'),
( (
'ruby-download', 'ruby-download',
'https://github.com/garnieretienne/rvm-download', 'https://github.com/garnieretienne/rvm-download',

View file

@ -1,7 +0,0 @@
from __future__ import annotations
from pre_commit.all_languages import languages
def test_python_venv_is_an_alias_to_python():
assert languages['python_venv'] is languages['python']

View file

@ -256,6 +256,24 @@ def test_validate_optional_sensible_regex_at_local_hook(caplog):
] ]
def test_validate_optional_sensible_regex_at_meta_hook(caplog):
config_obj = {
'repo': 'meta',
'hooks': [{'id': 'identity', 'files': 'dir/*.py'}],
}
cfgv.validate(config_obj, CONFIG_REPO_DICT)
assert caplog.record_tuples == [
(
'pre_commit',
logging.WARNING,
"The 'files' field in hook 'identity' is a regex, not a glob "
"-- matching '/*' probably isn't what you want here",
),
]
@pytest.mark.parametrize( @pytest.mark.parametrize(
('regex', 'warning'), ('regex', 'warning'),
( (
@ -291,6 +309,56 @@ def test_validate_optional_sensible_regex_at_top_level(caplog, regex, warning):
assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)] assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)]
def test_warning_for_deprecated_stages(caplog):
config_obj = sample_local_config()
config_obj['hooks'][0]['stages'] = ['commit', 'push']
cfgv.validate(config_obj, CONFIG_REPO_DICT)
assert caplog.record_tuples == [
(
'pre_commit',
logging.WARNING,
'hook id `do_not_commit` uses deprecated stage names '
'(commit, push) which will be removed in a future version. '
'run: `pre-commit migrate-config` to automatically fix this.',
),
]
def test_no_warning_for_non_deprecated_stages(caplog):
config_obj = sample_local_config()
config_obj['hooks'][0]['stages'] = ['pre-commit', 'pre-push']
cfgv.validate(config_obj, CONFIG_REPO_DICT)
assert caplog.record_tuples == []
def test_warning_for_deprecated_default_stages(caplog):
cfg = {'default_stages': ['commit', 'push'], 'repos': []}
cfgv.validate(cfg, CONFIG_SCHEMA)
assert caplog.record_tuples == [
(
'pre_commit',
logging.WARNING,
'top-level `default_stages` uses deprecated stage names '
'(commit, push) which will be removed in a future version. '
'run: `pre-commit migrate-config` to automatically fix this.',
),
]
def test_no_warning_for_non_deprecated_default_stages(caplog):
cfg = {'default_stages': ['pre-commit', 'pre-push'], 'repos': []}
cfgv.validate(cfg, CONFIG_SCHEMA)
assert caplog.record_tuples == []
@pytest.mark.parametrize( @pytest.mark.parametrize(
'manifest_obj', 'manifest_obj',
( (

View file

@ -1,10 +1,26 @@
from __future__ import annotations from __future__ import annotations
from unittest import mock
import pytest import pytest
import yaml
import pre_commit.constants as C import pre_commit.constants as C
from pre_commit.clientlib import InvalidConfigError from pre_commit.clientlib import InvalidConfigError
from pre_commit.commands.migrate_config import migrate_config from pre_commit.commands.migrate_config import migrate_config
from pre_commit.yaml import yaml_compose
@pytest.fixture(autouse=True, params=['c', 'pure'])
def switch_pyyaml_impl(request):
if request.param == 'c':
yield
else:
with mock.patch.dict(
yaml_compose.keywords,
{'Loader': yaml.SafeLoader},
):
yield
def test_migrate_config_normal_format(tmpdir, capsys): def test_migrate_config_normal_format(tmpdir, capsys):
@ -134,6 +150,27 @@ def test_migrate_config_sha_to_rev(tmpdir):
) )
def test_migrate_config_sha_to_rev_json(tmp_path):
contents = """\
{"repos": [{
"repo": "https://github.com/pre-commit/pre-commit-hooks",
"sha": "v1.2.0",
"hooks": []
}]}
"""
expected = """\
{"repos": [{
"repo": "https://github.com/pre-commit/pre-commit-hooks",
"rev": "v1.2.0",
"hooks": []
}]}
"""
cfg = tmp_path.joinpath('cfg.yaml')
cfg.write_text(contents)
assert not migrate_config(str(cfg))
assert cfg.read_text() == expected
def test_migrate_config_language_python_venv(tmp_path): def test_migrate_config_language_python_venv(tmp_path):
src = '''\ src = '''\
repos: repos:
@ -167,6 +204,73 @@ repos:
assert cfg.read_text() == expected assert cfg.read_text() == expected
def test_migrate_config_quoted_python_venv(tmp_path):
src = '''\
repos:
- repo: local
hooks:
- id: example
name: example
entry: example
language: "python_venv"
'''
expected = '''\
repos:
- repo: local
hooks:
- id: example
name: example
entry: example
language: "python"
'''
cfg = tmp_path.joinpath('cfg.yaml')
cfg.write_text(src)
assert migrate_config(str(cfg)) == 0
assert cfg.read_text() == expected
def test_migrate_config_default_stages(tmp_path):
src = '''\
default_stages: [commit, push, merge-commit, commit-msg]
repos: []
'''
expected = '''\
default_stages: [pre-commit, pre-push, pre-merge-commit, commit-msg]
repos: []
'''
cfg = tmp_path.joinpath('cfg.yaml')
cfg.write_text(src)
assert migrate_config(str(cfg)) == 0
assert cfg.read_text() == expected
def test_migrate_config_hook_stages(tmp_path):
src = '''\
repos:
- repo: local
hooks:
- id: example
name: example
entry: example
language: system
stages: ["commit", "push", "merge-commit", "commit-msg"]
'''
expected = '''\
repos:
- repo: local
hooks:
- id: example
name: example
entry: example
language: system
stages: ["pre-commit", "pre-push", "pre-merge-commit", "commit-msg"]
'''
cfg = tmp_path.joinpath('cfg.yaml')
cfg.write_text(src)
assert migrate_config(str(cfg)) == 0
assert cfg.read_text() == expected
def test_migrate_config_invalid_yaml(tmpdir): def test_migrate_config_invalid_yaml(tmpdir):
contents = '[' contents = '['
cfg = tmpdir.join(C.CONFIG_FILE) cfg = tmpdir.join(C.CONFIG_FILE)

View file

@ -2,7 +2,6 @@ from __future__ import annotations
import functools import functools
import io import io
import logging
import os.path import os.path
from unittest import mock from unittest import mock
@ -203,12 +202,6 @@ def store(tempdir_factory):
yield Store(os.path.join(tempdir_factory.get(), '.pre-commit')) yield Store(os.path.join(tempdir_factory.get(), '.pre-commit'))
@pytest.fixture
def log_info_mock():
with mock.patch.object(logging.getLogger('pre_commit'), 'info') as mck:
yield mck
class Fixture: class Fixture:
def __init__(self, stream: io.BytesIO) -> None: def __init__(self, stream: io.BytesIO) -> None:
self._stream = stream self._stream = stream

View file

@ -80,24 +80,6 @@ def _test_hook_repo(
assert out == expected assert out == expected
def test_python_venv_deprecation(store, caplog):
config = {
'repo': 'local',
'hooks': [{
'id': 'example',
'name': 'example',
'language': 'python_venv',
'entry': 'echo hi',
}],
}
_get_hook(config, store, 'example')
assert caplog.messages[-1] == (
'`repo: local` uses deprecated `language: python_venv`. '
'This is an alias for `language: python`. '
'Often `pre-commit autoupdate --repo local` will fix this.'
)
def test_system_hook_with_spaces(tempdir_factory, store): def test_system_hook_with_spaces(tempdir_factory, store):
_test_hook_repo( _test_hook_repo(
tempdir_factory, store, 'system_hook_with_spaces_repo', tempdir_factory, store, 'system_hook_with_spaces_repo',
@ -240,16 +222,16 @@ def test_unknown_keys(store, caplog):
assert msg == 'Unexpected key(s) present on local => too-much: foo, hello' assert msg == 'Unexpected key(s) present on local => too-much: foo, hello'
def test_reinstall(tempdir_factory, store, log_info_mock): def test_reinstall(tempdir_factory, store, caplog):
path = make_repo(tempdir_factory, 'python_hooks_repo') path = make_repo(tempdir_factory, 'python_hooks_repo')
config = make_config_from_repo(path) config = make_config_from_repo(path)
_get_hook(config, store, 'foo') _get_hook(config, store, 'foo')
# We print some logging during clone (1) + install (3) # We print some logging during clone (1) + install (3)
assert log_info_mock.call_count == 4 assert len(caplog.record_tuples) == 4
log_info_mock.reset_mock() caplog.clear()
# Reinstall on another run should not trigger another install # Reinstall on another run should not trigger another install
_get_hook(config, store, 'foo') _get_hook(config, store, 'foo')
assert log_info_mock.call_count == 0 assert len(caplog.record_tuples) == 0
def test_control_c_control_c_on_install(tempdir_factory, store): def test_control_c_control_c_on_install(tempdir_factory, store):

View file

@ -1,12 +1,15 @@
from __future__ import annotations from __future__ import annotations
import logging
import os.path import os.path
import shlex
import sqlite3 import sqlite3
import stat import stat
from unittest import mock from unittest import mock
import pytest import pytest
import pre_commit.constants as C
from pre_commit import git from pre_commit import git
from pre_commit.store import _get_default_directory from pre_commit.store import _get_default_directory
from pre_commit.store import _LOCAL_RESOURCES from pre_commit.store import _LOCAL_RESOURCES
@ -65,7 +68,7 @@ def test_store_init(store):
assert text_line in readme_contents assert text_line in readme_contents
def test_clone(store, tempdir_factory, log_info_mock): def test_clone(store, tempdir_factory, caplog):
path = git_dir(tempdir_factory) path = git_dir(tempdir_factory)
with cwd(path): with cwd(path):
git_commit() git_commit()
@ -74,7 +77,7 @@ def test_clone(store, tempdir_factory, log_info_mock):
ret = store.clone(path, rev) ret = store.clone(path, rev)
# Should have printed some stuff # Should have printed some stuff
assert log_info_mock.call_args_list[0][0][0].startswith( assert caplog.record_tuples[0][-1].startswith(
'Initializing environment for ', 'Initializing environment for ',
) )
@ -91,6 +94,72 @@ def test_clone(store, tempdir_factory, log_info_mock):
assert store.select_all_repos() == [(path, rev, ret)] assert store.select_all_repos() == [(path, rev, ret)]
def test_warning_for_deprecated_stages_on_init(store, tempdir_factory, caplog):
manifest = '''\
- id: hook1
name: hook1
language: system
entry: echo hook1
stages: [commit, push]
- id: hook2
name: hook2
language: system
entry: echo hook2
stages: [push, merge-commit]
'''
path = git_dir(tempdir_factory)
with open(os.path.join(path, C.MANIFEST_FILE), 'w') as f:
f.write(manifest)
cmd_output('git', 'add', '.', cwd=path)
git_commit(cwd=path)
rev = git.head_rev(path)
store.clone(path, rev)
assert caplog.record_tuples[1] == (
'pre_commit',
logging.WARNING,
f'repo `{path}` uses deprecated stage names '
f'(commit, push, merge-commit) which will be removed in a future '
f'version. '
f'Hint: often `pre-commit autoupdate --repo {shlex.quote(path)}` '
f'will fix this. '
f'if it does not -- consider reporting an issue to that repo.',
)
# should not re-warn
caplog.clear()
store.clone(path, rev)
assert caplog.record_tuples == []
def test_no_warning_for_non_deprecated_stages_on_init(
store, tempdir_factory, caplog,
):
manifest = '''\
- id: hook1
name: hook1
language: system
entry: echo hook1
stages: [pre-commit, pre-push]
- id: hook2
name: hook2
language: system
entry: echo hook2
stages: [pre-push, pre-merge-commit]
'''
path = git_dir(tempdir_factory)
with open(os.path.join(path, C.MANIFEST_FILE), 'w') as f:
f.write(manifest)
cmd_output('git', 'add', '.', cwd=path)
git_commit(cwd=path)
rev = git.head_rev(path)
store.clone(path, rev)
assert logging.WARNING not in {tup[1] for tup in caplog.record_tuples}
def test_clone_cleans_up_on_checkout_failure(store): def test_clone_cleans_up_on_checkout_failure(store):
with pytest.raises(Exception) as excinfo: with pytest.raises(Exception) as excinfo:
# This raises an exception because you can't clone something that # This raises an exception because you can't clone something that
@ -118,7 +187,7 @@ def test_clone_when_repo_already_exists(store):
def test_clone_shallow_failure_fallback_to_complete( def test_clone_shallow_failure_fallback_to_complete(
store, tempdir_factory, store, tempdir_factory,
log_info_mock, caplog,
): ):
path = git_dir(tempdir_factory) path = git_dir(tempdir_factory)
with cwd(path): with cwd(path):
@ -134,7 +203,7 @@ def test_clone_shallow_failure_fallback_to_complete(
ret = store.clone(path, rev) ret = store.clone(path, rev)
# Should have printed some stuff # Should have printed some stuff
assert log_info_mock.call_args_list[0][0][0].startswith( assert caplog.record_tuples[0][-1].startswith(
'Initializing environment for ', 'Initializing environment for ',
) )

View file

@ -0,0 +1,47 @@
from __future__ import annotations
import pytest
from pre_commit.yaml import yaml_compose
from pre_commit.yaml_rewrite import MappingKey
from pre_commit.yaml_rewrite import MappingValue
from pre_commit.yaml_rewrite import match
from pre_commit.yaml_rewrite import SequenceItem
def test_match_produces_scalar_values_only():
src = '''\
- name: foo
- name: [not, foo] # not a scalar: should be skipped!
- name: bar
'''
matcher = (SequenceItem(), MappingValue('name'))
ret = [n.value for n in match(yaml_compose(src), matcher)]
assert ret == ['foo', 'bar']
@pytest.mark.parametrize('cls', (MappingKey, MappingValue))
def test_mapping_not_a_map(cls):
m = cls('s')
assert list(m.match(yaml_compose('[foo]'))) == []
def test_sequence_item_not_a_sequence():
assert list(SequenceItem().match(yaml_compose('s: val'))) == []
def test_mapping_key():
m = MappingKey('s')
ret = [n.value for n in m.match(yaml_compose('s: val\nt: val2'))]
assert ret == ['s']
def test_mapping_value():
m = MappingValue('s')
ret = [n.value for n in m.match(yaml_compose('s: val\nt: val2'))]
assert ret == ['val']
def test_sequence_item():
ret = [n.value for n in SequenceItem().match(yaml_compose('[a, b, c]'))]
assert ret == ['a', 'b', 'c']