Adding upstream version 2.5.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
080d7f9289
commit
bb6dbf8636
37 changed files with 457 additions and 213 deletions
|
@ -12,25 +12,25 @@ repos:
|
||||||
- id: requirements-txt-fixer
|
- id: requirements-txt-fixer
|
||||||
- id: double-quote-string-fixer
|
- id: double-quote-string-fixer
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
rev: 3.7.9
|
rev: 3.8.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
additional_dependencies: [flake8-typing-imports==1.6.0]
|
additional_dependencies: [flake8-typing-imports==1.6.0]
|
||||||
- repo: https://github.com/pre-commit/mirrors-autopep8
|
- repo: https://github.com/pre-commit/mirrors-autopep8
|
||||||
rev: v1.5.1
|
rev: v1.5.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: autopep8
|
- id: autopep8
|
||||||
- repo: https://github.com/pre-commit/pre-commit
|
- repo: https://github.com/pre-commit/pre-commit
|
||||||
rev: v2.2.0
|
rev: v2.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate_manifest
|
- id: validate_manifest
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v2.1.0
|
rev: v2.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
args: [--py36-plus]
|
args: [--py36-plus]
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/asottile/reorder_python_imports
|
||||||
rev: v2.1.0
|
rev: v2.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
args: [--py3-plus]
|
args: [--py3-plus]
|
||||||
|
@ -40,7 +40,7 @@ repos:
|
||||||
- id: add-trailing-comma
|
- id: add-trailing-comma
|
||||||
args: [--py36-plus]
|
args: [--py36-plus]
|
||||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||||
rev: v1.8.2
|
rev: v1.9.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: setup-cfg-fmt
|
- id: setup-cfg-fmt
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
|
67
CHANGELOG.md
67
CHANGELOG.md
|
@ -1,3 +1,70 @@
|
||||||
|
2.5.1 - 2020-06-09
|
||||||
|
==================
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Prevent infinite recursion of post-checkout on clone
|
||||||
|
- #1497 PR by @asottile.
|
||||||
|
- #1496 issue by @admorgan.
|
||||||
|
|
||||||
|
2.5.0 - 2020-06-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Expose a `PRE_COMMIT=1` environment variable when running hooks
|
||||||
|
- #1467 PR by @tech-chad.
|
||||||
|
- #1426 issue by @lorenzwalthert.
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix `UnicodeDecodeError` on windows when using the `py` launcher to detect
|
||||||
|
executables with non-ascii characters in the path
|
||||||
|
- #1474 PR by @asottile.
|
||||||
|
- #1472 issue by DrFobos.
|
||||||
|
- Fix `DeprecationWarning` on python3.9 for `random.shuffle` method
|
||||||
|
- #1480 PR by @asottile.
|
||||||
|
- #1479 issue by @isidentical.
|
||||||
|
- Normalize slashes earlier such that global `files` / `exclude` use forward
|
||||||
|
slashes on windows as well.
|
||||||
|
- #1494 PR by @asottile.
|
||||||
|
- #1476 issue by @harrybiddle.
|
||||||
|
|
||||||
|
2.4.0 - 2020-05-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Add support for `post-commit` hooks
|
||||||
|
- #1415 PR by @ModischFabrications.
|
||||||
|
- #1411 issue by @ModischFabrications.
|
||||||
|
- Silence pip version warning in python installation error
|
||||||
|
- #1412 PR by @asottile.
|
||||||
|
- Improve python `healthy()` when upgrading operating systems.
|
||||||
|
- #1431 PR by @asottile.
|
||||||
|
- #1427 issue by @ahonnecke.
|
||||||
|
- `language: python_venv` is now an alias to `language: python` (and will be
|
||||||
|
removed in a future version).
|
||||||
|
- #1431 PR by @asottile.
|
||||||
|
- Speed up python `healthy()` check.
|
||||||
|
- #1431 PR by @asottile.
|
||||||
|
- `pre-commit autoupdate` now tries to maintain quoting style of `rev`.
|
||||||
|
- #1435 PR by @marcjay.
|
||||||
|
- #1434 issue by @marcjay.
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix installation of go modules in `repo: local`.
|
||||||
|
- #1428 PR by @scop.
|
||||||
|
- Fix committing with unstaged files and a failing `post-checkout` hook.
|
||||||
|
- #1422 PR by @domodwyer.
|
||||||
|
- #1418 issue by @domodwyer.
|
||||||
|
- Fix installation of node hooks with system node installed on freebsd.
|
||||||
|
- #1443 PR by @asottile.
|
||||||
|
- #1440 issue by @jockej.
|
||||||
|
- Fix ruby hooks when `GEM_PATH` is set globally.
|
||||||
|
- #1442 PR by @tdeo.
|
||||||
|
- Improve error message when `pre-commit autoupdate` /
|
||||||
|
`pre-commit migrate-config` are run but the pre-commit configuration is not
|
||||||
|
valid yaml.
|
||||||
|
- #1448 PR by @asottile.
|
||||||
|
- #1447 issue by @rpdelaney.
|
||||||
|
|
||||||
2.3.0 - 2020-04-22
|
2.3.0 - 2020-04-22
|
||||||
==================
|
==================
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,8 @@ This is useful for running specific tests. The easiest way to set this up
|
||||||
is to run:
|
is to run:
|
||||||
|
|
||||||
1. `tox --devenv venv` (note: requires tox>=3.13)
|
1. `tox --devenv venv` (note: requires tox>=3.13)
|
||||||
2. `. venv/bin/activate`
|
2. `. venv/bin/activate` (or follow the [activation instructions] for your
|
||||||
|
platform)
|
||||||
|
|
||||||
This will create and put you into a virtualenv which has an editable
|
This will create and put you into a virtualenv which has an editable
|
||||||
installation of pre-commit. Hack away! Running `pre-commit` will reflect
|
installation of pre-commit. Hack away! Running `pre-commit` will reflect
|
||||||
|
@ -144,3 +145,5 @@ This is usually the easiest to implement, most of them look the same as the
|
||||||
`node` hook implementation:
|
`node` hook implementation:
|
||||||
|
|
||||||
https://github.com/pre-commit/pre-commit/blob/160238220f022035c8ef869c9a8642f622c02118/pre_commit/languages/node.py#L72-L74
|
https://github.com/pre-commit/pre-commit/blob/160238220f022035c8ef869c9a8642f622c02118/pre_commit/languages/node.py#L72-L74
|
||||||
|
|
||||||
|
[activation instructions]: https://virtualenv.pypa.io/en/latest/user_guide.html#activators
|
||||||
|
|
|
@ -10,7 +10,7 @@ resources:
|
||||||
type: github
|
type: github
|
||||||
endpoint: github
|
endpoint: github
|
||||||
name: asottile/azure-pipeline-templates
|
name: asottile/azure-pipeline-templates
|
||||||
ref: refs/tags/v1.0.0
|
ref: refs/tags/v2.0.0
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- template: job--pre-commit.yml@asottile
|
- template: job--pre-commit.yml@asottile
|
||||||
|
@ -40,7 +40,7 @@ jobs:
|
||||||
displayName: install swift
|
displayName: install swift
|
||||||
- template: job--python-tox.yml@asottile
|
- template: job--python-tox.yml@asottile
|
||||||
parameters:
|
parameters:
|
||||||
toxenvs: [pypy3, py36, py37, py38]
|
toxenvs: [pypy3, py36, py37, py38, py39]
|
||||||
os: linux
|
os: linux
|
||||||
pre_test:
|
pre_test:
|
||||||
- task: UseRubyVersion@0
|
- task: UseRubyVersion@0
|
||||||
|
|
|
@ -84,7 +84,9 @@ def _check_hooks_still_exist_at_rev(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([^\s#]+)(.*)(\r?\n)$', re.DOTALL)
|
REV_LINE_RE = re.compile(
|
||||||
|
r'^(\s+)rev:(\s*)([\'"]?)([^\s#]+)(.*)(\r?\n)$', re.DOTALL,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _original_lines(
|
def _original_lines(
|
||||||
|
@ -116,15 +118,15 @@ def _write_new_config(path: str, rev_infos: List[Optional[RevInfo]]) -> None:
|
||||||
continue
|
continue
|
||||||
match = REV_LINE_RE.match(lines[idx])
|
match = REV_LINE_RE.match(lines[idx])
|
||||||
assert match is not None
|
assert match is not None
|
||||||
new_rev_s = yaml_dump({'rev': rev_info.rev})
|
new_rev_s = yaml_dump({'rev': rev_info.rev}, default_style=match[3])
|
||||||
new_rev = new_rev_s.split(':', 1)[1].strip()
|
new_rev = new_rev_s.split(':', 1)[1].strip()
|
||||||
if rev_info.frozen is not None:
|
if rev_info.frozen is not None:
|
||||||
comment = f' # frozen: {rev_info.frozen}'
|
comment = f' # frozen: {rev_info.frozen}'
|
||||||
elif match[4].strip().startswith('# frozen:'):
|
elif match[5].strip().startswith('# frozen:'):
|
||||||
comment = ''
|
comment = ''
|
||||||
else:
|
else:
|
||||||
comment = match[4]
|
comment = match[5]
|
||||||
lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[5]}'
|
lines[idx] = f'{match[1]}rev:{match[2]}{new_rev}{comment}{match[6]}'
|
||||||
|
|
||||||
with open(path, 'w', newline='') as f:
|
with open(path, 'w', newline='') as f:
|
||||||
f.write(''.join(lines))
|
f.write(''.join(lines))
|
||||||
|
|
|
@ -150,6 +150,7 @@ def _pre_push_ns(
|
||||||
_EXPECTED_ARG_LENGTH_BY_HOOK = {
|
_EXPECTED_ARG_LENGTH_BY_HOOK = {
|
||||||
'commit-msg': 1,
|
'commit-msg': 1,
|
||||||
'post-checkout': 3,
|
'post-checkout': 3,
|
||||||
|
'post-commit': 0,
|
||||||
'pre-commit': 0,
|
'pre-commit': 0,
|
||||||
'pre-merge-commit': 0,
|
'pre-merge-commit': 0,
|
||||||
'pre-push': 2,
|
'pre-push': 2,
|
||||||
|
@ -186,7 +187,7 @@ def _run_ns(
|
||||||
return _pre_push_ns(color, args, stdin)
|
return _pre_push_ns(color, args, stdin)
|
||||||
elif hook_type in {'commit-msg', 'prepare-commit-msg'}:
|
elif hook_type in {'commit-msg', 'prepare-commit-msg'}:
|
||||||
return _ns(hook_type, color, commit_msg_filename=args[0])
|
return _ns(hook_type, color, commit_msg_filename=args[0])
|
||||||
elif hook_type in {'pre-merge-commit', 'pre-commit'}:
|
elif hook_type in {'post-commit', 'pre-merge-commit', 'pre-commit'}:
|
||||||
return _ns(hook_type, color)
|
return _ns(hook_type, color)
|
||||||
elif hook_type == 'post-checkout':
|
elif hook_type == 'post-checkout':
|
||||||
return _ns(
|
return _ns(
|
||||||
|
|
|
@ -2,6 +2,7 @@ import re
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
from pre_commit.clientlib import load_config
|
||||||
from pre_commit.util import yaml_load
|
from pre_commit.util import yaml_load
|
||||||
|
|
||||||
|
|
||||||
|
@ -43,6 +44,9 @@ def _migrate_sha_to_rev(contents: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def migrate_config(config_file: str, quiet: bool = False) -> int:
|
def migrate_config(config_file: str, quiet: bool = False) -> int:
|
||||||
|
# ensure that the configuration is a valid pre-commit configuration
|
||||||
|
load_config(config_file)
|
||||||
|
|
||||||
with open(config_file) as f:
|
with open(config_file) as f:
|
||||||
orig_contents = contents = f.read()
|
orig_contents = contents = f.read()
|
||||||
|
|
||||||
|
|
|
@ -72,13 +72,7 @@ def filter_by_include_exclude(
|
||||||
|
|
||||||
|
|
||||||
class Classifier:
|
class Classifier:
|
||||||
def __init__(self, filenames: Sequence[str]) -> None:
|
def __init__(self, filenames: Collection[str]) -> None:
|
||||||
# on windows we normalize all filenames to use forward slashes
|
|
||||||
# this makes it easier to filter using the `files:` regex
|
|
||||||
# this also makes improperly quoted shell-based hooks work better
|
|
||||||
# see #1173
|
|
||||||
if os.altsep == '/' and os.sep == '\\':
|
|
||||||
filenames = [f.replace(os.sep, os.altsep) for f in filenames]
|
|
||||||
self.filenames = [f for f in filenames if os.path.lexists(f)]
|
self.filenames = [f for f in filenames if os.path.lexists(f)]
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
|
@ -105,6 +99,22 @@ class Classifier:
|
||||||
names = self.by_types(names, hook.types, hook.exclude_types)
|
names = self.by_types(names, hook.types, hook.exclude_types)
|
||||||
return tuple(names)
|
return tuple(names)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(
|
||||||
|
cls,
|
||||||
|
filenames: Collection[str],
|
||||||
|
include: str,
|
||||||
|
exclude: str,
|
||||||
|
) -> 'Classifier':
|
||||||
|
# on windows we normalize all filenames to use forward slashes
|
||||||
|
# this makes it easier to filter using the `files:` regex
|
||||||
|
# this also makes improperly quoted shell-based hooks work better
|
||||||
|
# see #1173
|
||||||
|
if os.altsep == '/' and os.sep == '\\':
|
||||||
|
filenames = [f.replace(os.sep, os.altsep) for f in filenames]
|
||||||
|
filenames = filter_by_include_exclude(filenames, include, exclude)
|
||||||
|
return Classifier(filenames)
|
||||||
|
|
||||||
|
|
||||||
def _get_skips(environ: EnvironT) -> Set[str]:
|
def _get_skips(environ: EnvironT) -> Set[str]:
|
||||||
skips = environ.get('SKIP', '')
|
skips = environ.get('SKIP', '')
|
||||||
|
@ -221,7 +231,8 @@ def _compute_cols(hooks: Sequence[Hook]) -> int:
|
||||||
|
|
||||||
|
|
||||||
def _all_filenames(args: argparse.Namespace) -> Collection[str]:
|
def _all_filenames(args: argparse.Namespace) -> Collection[str]:
|
||||||
if args.hook_stage == 'post-checkout': # no files for post-checkout
|
# these hooks do not operate on files
|
||||||
|
if args.hook_stage in {'post-checkout', 'post-commit'}:
|
||||||
return ()
|
return ()
|
||||||
elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
|
elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}:
|
||||||
return (args.commit_msg_filename,)
|
return (args.commit_msg_filename,)
|
||||||
|
@ -246,10 +257,9 @@ def _run_hooks(
|
||||||
"""Actually run the hooks."""
|
"""Actually run the hooks."""
|
||||||
skips = _get_skips(environ)
|
skips = _get_skips(environ)
|
||||||
cols = _compute_cols(hooks)
|
cols = _compute_cols(hooks)
|
||||||
filenames = filter_by_include_exclude(
|
classifier = Classifier.from_config(
|
||||||
_all_filenames(args), config['files'], config['exclude'],
|
_all_filenames(args), config['files'], config['exclude'],
|
||||||
)
|
)
|
||||||
classifier = Classifier(filenames)
|
|
||||||
retval = 0
|
retval = 0
|
||||||
for hook in hooks:
|
for hook in hooks:
|
||||||
retval |= _run_single_hook(
|
retval |= _run_single_hook(
|
||||||
|
@ -323,6 +333,12 @@ def run(
|
||||||
f'`--hook-stage {args.hook_stage}`',
|
f'`--hook-stage {args.hook_stage}`',
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
# prevent recursive post-checkout hooks (#1418)
|
||||||
|
if (
|
||||||
|
args.hook_stage == 'post-checkout' and
|
||||||
|
environ.get('_PRE_COMMIT_SKIP_POST_CHECKOUT')
|
||||||
|
):
|
||||||
|
return 0
|
||||||
|
|
||||||
# Expose from-ref / to-ref as environment variables for hooks to consume
|
# Expose from-ref / to-ref as environment variables for hooks to consume
|
||||||
if args.from_ref and args.to_ref:
|
if args.from_ref and args.to_ref:
|
||||||
|
@ -340,6 +356,9 @@ def run(
|
||||||
if args.checkout_type:
|
if args.checkout_type:
|
||||||
environ['PRE_COMMIT_CHECKOUT_TYPE'] = args.checkout_type
|
environ['PRE_COMMIT_CHECKOUT_TYPE'] = args.checkout_type
|
||||||
|
|
||||||
|
# Set pre_commit flag
|
||||||
|
environ['PRE_COMMIT'] = '1'
|
||||||
|
|
||||||
with contextlib.ExitStack() as exit_stack:
|
with contextlib.ExitStack() as exit_stack:
|
||||||
if stash:
|
if stash:
|
||||||
exit_stack.enter_context(staged_files_only(store.directory))
|
exit_stack.enter_context(staged_files_only(store.directory))
|
||||||
|
|
|
@ -17,8 +17,8 @@ VERSION = importlib_metadata.version('pre_commit')
|
||||||
|
|
||||||
# `manual` is not invoked by any installed git hook. See #719
|
# `manual` is not invoked by any installed git hook. See #719
|
||||||
STAGES = (
|
STAGES = (
|
||||||
'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg', 'manual',
|
'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg',
|
||||||
'post-checkout', 'push',
|
'post-commit', 'manual', 'post-checkout', 'push',
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT = 'default'
|
DEFAULT = 'default'
|
||||||
|
|
|
@ -158,7 +158,8 @@ def init_repo(path: str, remote: str) -> None:
|
||||||
remote = os.path.abspath(remote)
|
remote = os.path.abspath(remote)
|
||||||
|
|
||||||
env = no_git_env()
|
env = no_git_env()
|
||||||
cmd_output_b('git', 'init', path, env=env)
|
# avoid the user's template so that hooks do not recurse
|
||||||
|
cmd_output_b('git', 'init', '--template=', path, env=env)
|
||||||
cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)
|
cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@ from pre_commit.languages import node
|
||||||
from pre_commit.languages import perl
|
from pre_commit.languages import perl
|
||||||
from pre_commit.languages import pygrep
|
from pre_commit.languages import pygrep
|
||||||
from pre_commit.languages import python
|
from pre_commit.languages import python
|
||||||
from pre_commit.languages import python_venv
|
|
||||||
from pre_commit.languages import ruby
|
from pre_commit.languages import ruby
|
||||||
from pre_commit.languages import rust
|
from pre_commit.languages import rust
|
||||||
from pre_commit.languages import script
|
from pre_commit.languages import script
|
||||||
|
@ -49,7 +48,6 @@ languages = {
|
||||||
'perl': Language(name='perl', ENVIRONMENT_DIR=perl.ENVIRONMENT_DIR, get_default_version=perl.get_default_version, healthy=perl.healthy, install_environment=perl.install_environment, run_hook=perl.run_hook), # noqa: E501
|
'perl': Language(name='perl', ENVIRONMENT_DIR=perl.ENVIRONMENT_DIR, get_default_version=perl.get_default_version, healthy=perl.healthy, install_environment=perl.install_environment, run_hook=perl.run_hook), # noqa: E501
|
||||||
'pygrep': Language(name='pygrep', ENVIRONMENT_DIR=pygrep.ENVIRONMENT_DIR, get_default_version=pygrep.get_default_version, healthy=pygrep.healthy, install_environment=pygrep.install_environment, run_hook=pygrep.run_hook), # noqa: E501
|
'pygrep': Language(name='pygrep', ENVIRONMENT_DIR=pygrep.ENVIRONMENT_DIR, get_default_version=pygrep.get_default_version, healthy=pygrep.healthy, install_environment=pygrep.install_environment, run_hook=pygrep.run_hook), # noqa: E501
|
||||||
'python': Language(name='python', ENVIRONMENT_DIR=python.ENVIRONMENT_DIR, get_default_version=python.get_default_version, healthy=python.healthy, install_environment=python.install_environment, run_hook=python.run_hook), # noqa: E501
|
'python': Language(name='python', ENVIRONMENT_DIR=python.ENVIRONMENT_DIR, get_default_version=python.get_default_version, healthy=python.healthy, install_environment=python.install_environment, run_hook=python.run_hook), # noqa: E501
|
||||||
'python_venv': Language(name='python_venv', ENVIRONMENT_DIR=python_venv.ENVIRONMENT_DIR, get_default_version=python_venv.get_default_version, healthy=python_venv.healthy, install_environment=python_venv.install_environment, run_hook=python_venv.run_hook), # noqa: E501
|
|
||||||
'ruby': Language(name='ruby', ENVIRONMENT_DIR=ruby.ENVIRONMENT_DIR, get_default_version=ruby.get_default_version, healthy=ruby.healthy, install_environment=ruby.install_environment, run_hook=ruby.run_hook), # noqa: E501
|
'ruby': Language(name='ruby', ENVIRONMENT_DIR=ruby.ENVIRONMENT_DIR, get_default_version=ruby.get_default_version, healthy=ruby.healthy, install_environment=ruby.install_environment, run_hook=ruby.run_hook), # noqa: E501
|
||||||
'rust': Language(name='rust', ENVIRONMENT_DIR=rust.ENVIRONMENT_DIR, get_default_version=rust.get_default_version, healthy=rust.healthy, install_environment=rust.install_environment, run_hook=rust.run_hook), # noqa: E501
|
'rust': Language(name='rust', ENVIRONMENT_DIR=rust.ENVIRONMENT_DIR, get_default_version=rust.get_default_version, healthy=rust.healthy, install_environment=rust.install_environment, run_hook=rust.run_hook), # noqa: E501
|
||||||
'script': Language(name='script', ENVIRONMENT_DIR=script.ENVIRONMENT_DIR, get_default_version=script.get_default_version, healthy=script.healthy, install_environment=script.install_environment, run_hook=script.run_hook), # noqa: E501
|
'script': Language(name='script', ENVIRONMENT_DIR=script.ENVIRONMENT_DIR, get_default_version=script.get_default_version, healthy=script.healthy, install_environment=script.install_environment, run_hook=script.run_hook), # noqa: E501
|
||||||
|
@ -57,4 +55,6 @@ languages = {
|
||||||
'system': Language(name='system', ENVIRONMENT_DIR=system.ENVIRONMENT_DIR, get_default_version=system.get_default_version, healthy=system.healthy, install_environment=system.install_environment, run_hook=system.run_hook), # noqa: E501
|
'system': Language(name='system', ENVIRONMENT_DIR=system.ENVIRONMENT_DIR, get_default_version=system.get_default_version, healthy=system.healthy, install_environment=system.install_environment, run_hook=system.run_hook), # noqa: E501
|
||||||
# END GENERATED
|
# END GENERATED
|
||||||
}
|
}
|
||||||
|
# TODO: fully deprecate `python_venv`
|
||||||
|
languages['python_venv'] = languages['python']
|
||||||
all_languages = sorted(languages)
|
all_languages = sorted(languages)
|
||||||
|
|
|
@ -18,7 +18,7 @@ from pre_commit.xargs import xargs
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import NoReturn
|
from typing import NoReturn
|
||||||
|
|
||||||
FIXED_RANDOM_SEED = 1542676186
|
FIXED_RANDOM_SEED = 1542676187
|
||||||
|
|
||||||
|
|
||||||
def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:
|
def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:
|
||||||
|
@ -92,7 +92,7 @@ def _shuffled(seq: Sequence[str]) -> List[str]:
|
||||||
fixed_random.seed(FIXED_RANDOM_SEED, version=1)
|
fixed_random.seed(FIXED_RANDOM_SEED, version=1)
|
||||||
|
|
||||||
seq = list(seq)
|
seq = list(seq)
|
||||||
random.shuffle(seq, random=fixed_random.random)
|
fixed_random.shuffle(seq)
|
||||||
return seq
|
return seq
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,7 @@ def install_environment(
|
||||||
|
|
||||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx?f=255&MSPPError=-2147217396#maxpath
|
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx?f=255&MSPPError=-2147217396#maxpath
|
||||||
if sys.platform == 'win32': # pragma: no cover
|
if sys.platform == 'win32': # pragma: no cover
|
||||||
envdir = f'\\\\?\\{os.path.normpath(envdir)}'
|
envdir = fr'\\?\{os.path.normpath(envdir)}'
|
||||||
with clean_path_on_failure(envdir):
|
with clean_path_on_failure(envdir):
|
||||||
cmd = [
|
cmd = [
|
||||||
sys.executable, '-mnodeenv', '--prebuilt', '--clean-src', envdir,
|
sys.executable, '-mnodeenv', '--prebuilt', '--clean-src', envdir,
|
||||||
|
|
|
@ -2,8 +2,7 @@ import contextlib
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Callable
|
from typing import Dict
|
||||||
from typing import ContextManager
|
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from typing import Sequence
|
from typing import Sequence
|
||||||
|
@ -26,6 +25,28 @@ from pre_commit.util import cmd_output_b
|
||||||
ENVIRONMENT_DIR = 'py_env'
|
ENVIRONMENT_DIR = 'py_env'
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _version_info(exe: str) -> str:
|
||||||
|
prog = 'import sys;print(".".join(str(p) for p in sys.version_info))'
|
||||||
|
try:
|
||||||
|
return cmd_output(exe, '-S', '-c', prog)[1].strip()
|
||||||
|
except CalledProcessError:
|
||||||
|
return f'<<error retrieving version from {exe}>>'
|
||||||
|
|
||||||
|
|
||||||
|
def _read_pyvenv_cfg(filename: str) -> Dict[str, str]:
|
||||||
|
ret = {}
|
||||||
|
with open(filename) as f:
|
||||||
|
for line in f:
|
||||||
|
try:
|
||||||
|
k, v = line.split('=')
|
||||||
|
except ValueError: # blank line / comment / etc.
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
ret[k.strip()] = v.strip()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def bin_dir(venv: str) -> str:
|
def bin_dir(venv: str) -> str:
|
||||||
"""On windows there's a different directory for the virtualenv"""
|
"""On windows there's a different directory for the virtualenv"""
|
||||||
bin_part = 'Scripts' if os.name == 'nt' else 'bin'
|
bin_part = 'Scripts' if os.name == 'nt' else 'bin'
|
||||||
|
@ -34,6 +55,7 @@ def bin_dir(venv: str) -> str:
|
||||||
|
|
||||||
def get_env_patch(venv: str) -> PatchesT:
|
def get_env_patch(venv: str) -> PatchesT:
|
||||||
return (
|
return (
|
||||||
|
('PIP_DISABLE_PIP_VERSION_CHECK', '1'),
|
||||||
('PYTHONHOME', UNSET),
|
('PYTHONHOME', UNSET),
|
||||||
('VIRTUAL_ENV', venv),
|
('VIRTUAL_ENV', venv),
|
||||||
('PATH', (bin_dir(venv), os.pathsep, Var('PATH'))),
|
('PATH', (bin_dir(venv), os.pathsep, Var('PATH'))),
|
||||||
|
@ -45,9 +67,10 @@ def _find_by_py_launcher(
|
||||||
) -> Optional[str]: # pragma: no cover (windows only)
|
) -> Optional[str]: # pragma: no cover (windows only)
|
||||||
if version.startswith('python'):
|
if version.startswith('python'):
|
||||||
num = version[len('python'):]
|
num = version[len('python'):]
|
||||||
|
cmd = ('py', f'-{num}', '-c', 'import sys; print(sys.executable)')
|
||||||
|
env = dict(os.environ, PYTHONIOENCODING='UTF-8')
|
||||||
try:
|
try:
|
||||||
cmd = ('py', f'-{num}', '-c', 'import sys; print(sys.executable)')
|
return cmd_output(*cmd, env=env)[1].strip()
|
||||||
return cmd_output(*cmd)[1].strip()
|
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
|
@ -115,6 +138,9 @@ def _sys_executable_matches(version: str) -> bool:
|
||||||
|
|
||||||
|
|
||||||
def norm_version(version: str) -> str:
|
def norm_version(version: str) -> str:
|
||||||
|
if version == C.DEFAULT:
|
||||||
|
return os.path.realpath(sys.executable)
|
||||||
|
|
||||||
# first see if our current executable is appropriate
|
# first see if our current executable is appropriate
|
||||||
if _sys_executable_matches(version):
|
if _sys_executable_matches(version):
|
||||||
return sys.executable
|
return sys.executable
|
||||||
|
@ -139,70 +165,59 @@ def norm_version(version: str) -> str:
|
||||||
return os.path.expanduser(version)
|
return os.path.expanduser(version)
|
||||||
|
|
||||||
|
|
||||||
def py_interface(
|
@contextlib.contextmanager
|
||||||
_dir: str,
|
def in_env(
|
||||||
_make_venv: Callable[[str, str], None],
|
prefix: Prefix,
|
||||||
) -> Tuple[
|
language_version: str,
|
||||||
Callable[[Prefix, str], ContextManager[None]],
|
) -> Generator[None, None, None]:
|
||||||
Callable[[Prefix, str], bool],
|
directory = helpers.environment_dir(ENVIRONMENT_DIR, language_version)
|
||||||
Callable[[Hook, Sequence[str], bool], Tuple[int, bytes]],
|
envdir = prefix.path(directory)
|
||||||
Callable[[Prefix, str, Sequence[str]], None],
|
with envcontext(get_env_patch(envdir)):
|
||||||
]:
|
yield
|
||||||
@contextlib.contextmanager
|
|
||||||
def in_env(
|
|
||||||
prefix: Prefix,
|
|
||||||
language_version: str,
|
|
||||||
) -> Generator[None, None, None]:
|
|
||||||
envdir = prefix.path(helpers.environment_dir(_dir, language_version))
|
|
||||||
with envcontext(get_env_patch(envdir)):
|
|
||||||
yield
|
|
||||||
|
|
||||||
def healthy(prefix: Prefix, language_version: str) -> bool:
|
|
||||||
envdir = helpers.environment_dir(_dir, language_version)
|
|
||||||
exe_name = 'python.exe' if sys.platform == 'win32' else 'python'
|
|
||||||
py_exe = prefix.path(bin_dir(envdir), exe_name)
|
|
||||||
with in_env(prefix, language_version):
|
|
||||||
retcode, _, _ = cmd_output_b(
|
|
||||||
py_exe, '-c', 'import ctypes, datetime, io, os, ssl, weakref',
|
|
||||||
cwd='/',
|
|
||||||
retcode=None,
|
|
||||||
)
|
|
||||||
return retcode == 0
|
|
||||||
|
|
||||||
def run_hook(
|
|
||||||
hook: Hook,
|
|
||||||
file_args: Sequence[str],
|
|
||||||
color: bool,
|
|
||||||
) -> Tuple[int, bytes]:
|
|
||||||
with in_env(hook.prefix, hook.language_version):
|
|
||||||
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
|
||||||
|
|
||||||
def install_environment(
|
|
||||||
prefix: Prefix,
|
|
||||||
version: str,
|
|
||||||
additional_dependencies: Sequence[str],
|
|
||||||
) -> None:
|
|
||||||
directory = helpers.environment_dir(_dir, version)
|
|
||||||
install = ('python', '-mpip', 'install', '.', *additional_dependencies)
|
|
||||||
|
|
||||||
env_dir = prefix.path(directory)
|
|
||||||
with clean_path_on_failure(env_dir):
|
|
||||||
if version != C.DEFAULT:
|
|
||||||
python = norm_version(version)
|
|
||||||
else:
|
|
||||||
python = os.path.realpath(sys.executable)
|
|
||||||
_make_venv(env_dir, python)
|
|
||||||
with in_env(prefix, version):
|
|
||||||
helpers.run_setup_cmd(prefix, install)
|
|
||||||
|
|
||||||
return in_env, healthy, run_hook, install_environment
|
|
||||||
|
|
||||||
|
|
||||||
def make_venv(envdir: str, python: str) -> None:
|
def healthy(prefix: Prefix, language_version: str) -> bool:
|
||||||
env = dict(os.environ, VIRTUALENV_NO_DOWNLOAD='1')
|
directory = helpers.environment_dir(ENVIRONMENT_DIR, language_version)
|
||||||
cmd = (sys.executable, '-mvirtualenv', envdir, '-p', python)
|
envdir = prefix.path(directory)
|
||||||
cmd_output_b(*cmd, env=env, cwd='/')
|
pyvenv_cfg = os.path.join(envdir, 'pyvenv.cfg')
|
||||||
|
|
||||||
|
# created with "old" virtualenv
|
||||||
|
if not os.path.exists(pyvenv_cfg):
|
||||||
|
return False
|
||||||
|
|
||||||
|
exe_name = 'python.exe' if sys.platform == 'win32' else 'python'
|
||||||
|
py_exe = prefix.path(bin_dir(envdir), exe_name)
|
||||||
|
cfg = _read_pyvenv_cfg(pyvenv_cfg)
|
||||||
|
|
||||||
|
return (
|
||||||
|
'version_info' in cfg and
|
||||||
|
_version_info(py_exe) == cfg['version_info'] and (
|
||||||
|
'base-executable' not in cfg or
|
||||||
|
_version_info(cfg['base-executable']) == cfg['version_info']
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_interface = py_interface(ENVIRONMENT_DIR, make_venv)
|
def install_environment(
|
||||||
in_env, healthy, run_hook, install_environment = _interface
|
prefix: Prefix,
|
||||||
|
version: str,
|
||||||
|
additional_dependencies: Sequence[str],
|
||||||
|
) -> None:
|
||||||
|
envdir = prefix.path(helpers.environment_dir(ENVIRONMENT_DIR, version))
|
||||||
|
python = norm_version(version)
|
||||||
|
venv_cmd = (sys.executable, '-mvirtualenv', envdir, '-p', python)
|
||||||
|
install_cmd = ('python', '-mpip', 'install', '.', *additional_dependencies)
|
||||||
|
|
||||||
|
with clean_path_on_failure(envdir):
|
||||||
|
cmd_output_b(*venv_cmd, cwd='/')
|
||||||
|
with in_env(prefix, version):
|
||||||
|
helpers.run_setup_cmd(prefix, install_cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def run_hook(
|
||||||
|
hook: Hook,
|
||||||
|
file_args: Sequence[str],
|
||||||
|
color: bool,
|
||||||
|
) -> Tuple[int, bytes]:
|
||||||
|
with in_env(hook.prefix, hook.language_version):
|
||||||
|
return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
import os.path
|
|
||||||
|
|
||||||
from pre_commit.languages import python
|
|
||||||
from pre_commit.util import CalledProcessError
|
|
||||||
from pre_commit.util import cmd_output
|
|
||||||
from pre_commit.util import cmd_output_b
|
|
||||||
|
|
||||||
ENVIRONMENT_DIR = 'py_venv'
|
|
||||||
get_default_version = python.get_default_version
|
|
||||||
|
|
||||||
|
|
||||||
def orig_py_exe(exe: str) -> str: # pragma: no cover (platform specific)
|
|
||||||
"""A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
|
|
||||||
packages to the incorrect location. Attempt to find the _original_ exe
|
|
||||||
and invoke `-mvenv` from there.
|
|
||||||
|
|
||||||
See:
|
|
||||||
- https://github.com/pre-commit/pre-commit/issues/755
|
|
||||||
- https://github.com/pypa/virtualenv/issues/1095
|
|
||||||
- https://bugs.python.org/issue30811
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
prefix_script = 'import sys; print(sys.real_prefix)'
|
|
||||||
_, prefix, _ = cmd_output(exe, '-c', prefix_script)
|
|
||||||
prefix = prefix.strip()
|
|
||||||
except CalledProcessError:
|
|
||||||
# not created from -mvirtualenv
|
|
||||||
return exe
|
|
||||||
|
|
||||||
if os.name == 'nt':
|
|
||||||
expected = os.path.join(prefix, 'python.exe')
|
|
||||||
else:
|
|
||||||
expected = os.path.join(prefix, 'bin', os.path.basename(exe))
|
|
||||||
|
|
||||||
if os.path.exists(expected):
|
|
||||||
return expected
|
|
||||||
else:
|
|
||||||
return exe
|
|
||||||
|
|
||||||
|
|
||||||
def make_venv(envdir: str, python: str) -> None:
|
|
||||||
cmd_output_b(orig_py_exe(python), '-mvenv', envdir, cwd='/')
|
|
||||||
|
|
||||||
|
|
||||||
_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)
|
|
||||||
in_env, healthy, run_hook, install_environment = _interface
|
|
|
@ -9,6 +9,7 @@ from typing import Tuple
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
from pre_commit.envcontext import envcontext
|
from pre_commit.envcontext import envcontext
|
||||||
from pre_commit.envcontext import PatchesT
|
from pre_commit.envcontext import PatchesT
|
||||||
|
from pre_commit.envcontext import UNSET
|
||||||
from pre_commit.envcontext import Var
|
from pre_commit.envcontext import Var
|
||||||
from pre_commit.hook import Hook
|
from pre_commit.hook import Hook
|
||||||
from pre_commit.languages import helpers
|
from pre_commit.languages import helpers
|
||||||
|
@ -28,6 +29,7 @@ def get_env_patch(
|
||||||
) -> PatchesT: # pragma: win32 no cover
|
) -> PatchesT: # pragma: win32 no cover
|
||||||
patches: PatchesT = (
|
patches: PatchesT = (
|
||||||
('GEM_HOME', os.path.join(venv, 'gems')),
|
('GEM_HOME', os.path.join(venv, 'gems')),
|
||||||
|
('GEM_PATH', UNSET),
|
||||||
('RBENV_ROOT', venv),
|
('RBENV_ROOT', venv),
|
||||||
('BUNDLE_IGNORE_CONFIG', '1'),
|
('BUNDLE_IGNORE_CONFIG', '1'),
|
||||||
(
|
(
|
||||||
|
|
|
@ -79,7 +79,7 @@ def _add_hook_type_option(parser: argparse.ArgumentParser) -> None:
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-t', '--hook-type', choices=(
|
'-t', '--hook-type', choices=(
|
||||||
'pre-commit', 'pre-merge-commit', 'pre-push',
|
'pre-commit', 'pre-merge-commit', 'pre-push',
|
||||||
'prepare-commit-msg', 'commit-msg', 'post-checkout',
|
'prepare-commit-msg', 'commit-msg', 'post-commit', 'post-checkout',
|
||||||
),
|
),
|
||||||
action=AppendReplaceDefault,
|
action=AppendReplaceDefault,
|
||||||
default=['pre-commit'],
|
default=['pre-commit'],
|
||||||
|
|
|
@ -11,10 +11,13 @@ from pre_commit.store import Store
|
||||||
|
|
||||||
|
|
||||||
def check_all_hooks_match_files(config_file: str) -> int:
|
def check_all_hooks_match_files(config_file: str) -> int:
|
||||||
classifier = Classifier(git.get_all_files())
|
config = load_config(config_file)
|
||||||
|
classifier = Classifier.from_config(
|
||||||
|
git.get_all_files(), config['files'], config['exclude'],
|
||||||
|
)
|
||||||
retv = 0
|
retv = 0
|
||||||
|
|
||||||
for hook in all_hooks(load_config(config_file), Store()):
|
for hook in all_hooks(config, Store()):
|
||||||
if hook.always_run or hook.language == 'fail':
|
if hook.always_run or hook.language == 'fail':
|
||||||
continue
|
continue
|
||||||
elif not classifier.filenames_for_hook(hook):
|
elif not classifier.filenames_for_hook(hook):
|
||||||
|
|
|
@ -28,11 +28,14 @@ def exclude_matches_any(
|
||||||
|
|
||||||
def check_useless_excludes(config_file: str) -> int:
|
def check_useless_excludes(config_file: str) -> int:
|
||||||
config = load_config(config_file)
|
config = load_config(config_file)
|
||||||
classifier = Classifier(git.get_all_files())
|
filenames = git.get_all_files()
|
||||||
|
classifier = Classifier.from_config(
|
||||||
|
filenames, config['files'], config['exclude'],
|
||||||
|
)
|
||||||
retv = 0
|
retv = 0
|
||||||
|
|
||||||
exclude = config['exclude']
|
exclude = config['exclude']
|
||||||
if not exclude_matches_any(classifier.filenames, '', exclude):
|
if not exclude_matches_any(filenames, '', exclude):
|
||||||
print(
|
print(
|
||||||
f'The global exclude pattern {exclude!r} does not match any files',
|
f'The global exclude pattern {exclude!r} does not match any files',
|
||||||
)
|
)
|
||||||
|
|
0
pre_commit/resources/empty_template_go.mod
Normal file
0
pre_commit/resources/empty_template_go.mod
Normal file
|
@ -56,8 +56,10 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
|
||||||
with open(patch_filename, 'wb') as patch_file:
|
with open(patch_filename, 'wb') as patch_file:
|
||||||
patch_file.write(diff_stdout_binary)
|
patch_file.write(diff_stdout_binary)
|
||||||
|
|
||||||
# Clear the working directory of unstaged changes
|
# prevent recursive post-checkout hooks (#1418)
|
||||||
cmd_output_b('git', 'checkout', '--', '.')
|
no_checkout_env = dict(os.environ, _PRE_COMMIT_SKIP_POST_CHECKOUT='1')
|
||||||
|
cmd_output_b('git', 'checkout', '--', '.', env=no_checkout_env)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
|
@ -72,8 +74,9 @@ def _unstaged_changes_cleared(patch_dir: str) -> Generator[None, None, None]:
|
||||||
# We failed to apply the patch, presumably due to fixes made
|
# We failed to apply the patch, presumably due to fixes made
|
||||||
# by hooks.
|
# by hooks.
|
||||||
# Roll back the changes made by hooks.
|
# Roll back the changes made by hooks.
|
||||||
cmd_output_b('git', 'checkout', '--', '.')
|
cmd_output_b('git', 'checkout', '--', '.', env=no_checkout_env)
|
||||||
_git_apply(patch_filename)
|
_git_apply(patch_filename)
|
||||||
|
|
||||||
logger.info(f'Restored changes from {patch_filename}.')
|
logger.info(f'Restored changes from {patch_filename}.')
|
||||||
else:
|
else:
|
||||||
# There weren't any staged files so we don't need to do anything
|
# There weren't any staged files so we don't need to do anything
|
||||||
|
|
|
@ -30,10 +30,11 @@ def _get_default_directory() -> str:
|
||||||
`Store.get_default_directory` can be mocked in tests and
|
`Store.get_default_directory` can be mocked in tests and
|
||||||
`_get_default_directory` can be tested.
|
`_get_default_directory` can be tested.
|
||||||
"""
|
"""
|
||||||
return os.environ.get('PRE_COMMIT_HOME') or os.path.join(
|
ret = os.environ.get('PRE_COMMIT_HOME') or os.path.join(
|
||||||
os.environ.get('XDG_CACHE_HOME') or os.path.expanduser('~/.cache'),
|
os.environ.get('XDG_CACHE_HOME') or os.path.expanduser('~/.cache'),
|
||||||
'pre-commit',
|
'pre-commit',
|
||||||
)
|
)
|
||||||
|
return os.path.realpath(ret)
|
||||||
|
|
||||||
|
|
||||||
class Store:
|
class Store:
|
||||||
|
@ -182,9 +183,9 @@ class Store:
|
||||||
return self._new_repo(repo, ref, deps, clone_strategy)
|
return self._new_repo(repo, ref, deps, clone_strategy)
|
||||||
|
|
||||||
LOCAL_RESOURCES = (
|
LOCAL_RESOURCES = (
|
||||||
'Cargo.toml', 'main.go', 'main.rs', '.npmignore', 'package.json',
|
'Cargo.toml', 'main.go', 'go.mod', 'main.rs', '.npmignore',
|
||||||
'pre_commit_dummy_package.gemspec', 'setup.py', 'environment.yml',
|
'package.json', 'pre_commit_dummy_package.gemspec', 'setup.py',
|
||||||
'Makefile.PL',
|
'environment.yml', 'Makefile.PL',
|
||||||
)
|
)
|
||||||
|
|
||||||
def make_local(self, deps: Sequence[str]) -> str:
|
def make_local(self, deps: Sequence[str]) -> str:
|
||||||
|
|
|
@ -36,10 +36,11 @@ yaml_load = functools.partial(yaml.load, Loader=Loader)
|
||||||
Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper)
|
Dumper = getattr(yaml, 'CSafeDumper', yaml.SafeDumper)
|
||||||
|
|
||||||
|
|
||||||
def yaml_dump(o: Any) -> str:
|
def yaml_dump(o: Any, **kwargs: Any) -> str:
|
||||||
# when python/mypy#1484 is solved, this can be `functools.partial`
|
# when python/mypy#1484 is solved, this can be `functools.partial`
|
||||||
return yaml.dump(
|
return yaml.dump(
|
||||||
o, Dumper=Dumper, default_flow_style=False, indent=4, sort_keys=False,
|
o, Dumper=Dumper, default_flow_style=False, indent=4, sort_keys=False,
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[metadata]
|
[metadata]
|
||||||
name = pre_commit
|
name = pre_commit
|
||||||
version = 2.3.0
|
version = 2.5.1
|
||||||
description = A framework for managing and maintaining multi-language pre-commit hooks.
|
description = A framework for managing and maintaining multi-language pre-commit hooks.
|
||||||
long_description = file: README.md
|
long_description = file: README.md
|
||||||
long_description_content_type = text/markdown
|
long_description_content_type = text/markdown
|
||||||
|
@ -27,7 +27,7 @@ install_requires =
|
||||||
nodeenv>=0.11.1
|
nodeenv>=0.11.1
|
||||||
pyyaml>=5.1
|
pyyaml>=5.1
|
||||||
toml
|
toml
|
||||||
virtualenv>=15.2
|
virtualenv>=20.0.8
|
||||||
importlib-metadata;python_version<"3.8"
|
importlib-metadata;python_version<"3.8"
|
||||||
importlib-resources;python_version<"3.7"
|
importlib-resources;python_version<"3.7"
|
||||||
python_requires = >=3.6.1
|
python_requires = >=3.6.1
|
||||||
|
|
|
@ -3,8 +3,7 @@ import sys
|
||||||
|
|
||||||
LANGUAGES = [
|
LANGUAGES = [
|
||||||
'conda', 'docker', 'docker_image', 'fail', 'golang', 'node', 'perl',
|
'conda', 'docker', 'docker_image', 'fail', 'golang', 'node', 'perl',
|
||||||
'pygrep', 'python', 'python_venv', 'ruby', 'rust', 'script', 'swift',
|
'pygrep', 'python', 'ruby', 'rust', 'script', 'swift', 'system',
|
||||||
'system',
|
|
||||||
]
|
]
|
||||||
FIELDS = [
|
FIELDS = [
|
||||||
'ENVIRONMENT_DIR', 'get_default_version', 'healthy', 'install_environment',
|
'ENVIRONMENT_DIR', 'get_default_version', 'healthy', 'install_environment',
|
||||||
|
|
|
@ -45,20 +45,6 @@ xfailif_windows_no_ruby = pytest.mark.xfail(
|
||||||
xfailif_windows = pytest.mark.xfail(os.name == 'nt', reason='windows')
|
xfailif_windows = pytest.mark.xfail(os.name == 'nt', reason='windows')
|
||||||
|
|
||||||
|
|
||||||
def supports_venv(): # pragma: no cover (platform specific)
|
|
||||||
try:
|
|
||||||
__import__('ensurepip')
|
|
||||||
__import__('venv')
|
|
||||||
return True
|
|
||||||
except ImportError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
xfailif_no_venv = pytest.mark.xfail(
|
|
||||||
not supports_venv(), reason='Does not support venv module',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def run_opts(
|
def run_opts(
|
||||||
all_files=False,
|
all_files=False,
|
||||||
files=(),
|
files=(),
|
||||||
|
@ -103,10 +89,12 @@ def cwd(path):
|
||||||
os.chdir(original_cwd)
|
os.chdir(original_cwd)
|
||||||
|
|
||||||
|
|
||||||
def git_commit(*args, fn=cmd_output, msg='commit!', **kwargs):
|
def git_commit(*args, fn=cmd_output, msg='commit!', all_files=True, **kwargs):
|
||||||
kwargs.setdefault('stderr', subprocess.STDOUT)
|
kwargs.setdefault('stderr', subprocess.STDOUT)
|
||||||
|
|
||||||
cmd = ('git', 'commit', '--allow-empty', '--no-gpg-sign', '-a') + args
|
cmd = ('git', 'commit', '--allow-empty', '--no-gpg-sign', *args)
|
||||||
|
if all_files: # allow skipping `-a` with `all_files=False`
|
||||||
|
cmd += ('-a',)
|
||||||
if msg is not None: # allow skipping `-m` with `msg=None`
|
if msg is not None: # allow skipping `-m` with `msg=None`
|
||||||
cmd += ('-m', msg)
|
cmd += ('-m', msg)
|
||||||
ret, out, _ = fn(*cmd, **kwargs)
|
ret, out, _ = fn(*cmd, **kwargs)
|
||||||
|
|
|
@ -414,9 +414,9 @@ def test_autoupdate_local_hooks(in_git_dir, store):
|
||||||
config = sample_local_config()
|
config = sample_local_config()
|
||||||
add_config_to_repo('.', config)
|
add_config_to_repo('.', config)
|
||||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||||
new_config_writen = read_config('.')
|
new_config_written = read_config('.')
|
||||||
assert len(new_config_writen['repos']) == 1
|
assert len(new_config_written['repos']) == 1
|
||||||
assert new_config_writen['repos'][0] == config
|
assert new_config_written['repos'][0] == config
|
||||||
|
|
||||||
|
|
||||||
def test_autoupdate_local_hooks_with_out_of_date_repo(
|
def test_autoupdate_local_hooks_with_out_of_date_repo(
|
||||||
|
@ -429,9 +429,9 @@ def test_autoupdate_local_hooks_with_out_of_date_repo(
|
||||||
config = {'repos': [local_config, stale_config]}
|
config = {'repos': [local_config, stale_config]}
|
||||||
write_config('.', config)
|
write_config('.', config)
|
||||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||||
new_config_writen = read_config('.')
|
new_config_written = read_config('.')
|
||||||
assert len(new_config_writen['repos']) == 2
|
assert len(new_config_written['repos']) == 2
|
||||||
assert new_config_writen['repos'][0] == local_config
|
assert new_config_written['repos'][0] == local_config
|
||||||
|
|
||||||
|
|
||||||
def test_autoupdate_meta_hooks(tmpdir, store):
|
def test_autoupdate_meta_hooks(tmpdir, store):
|
||||||
|
@ -474,3 +474,23 @@ def test_updates_old_format_to_new_format(tmpdir, capsys, store):
|
||||||
)
|
)
|
||||||
out, _ = capsys.readouterr()
|
out, _ = capsys.readouterr()
|
||||||
assert out == 'Configuration has been migrated.\n'
|
assert out == 'Configuration has been migrated.\n'
|
||||||
|
|
||||||
|
|
||||||
|
def test_maintains_rev_quoting_style(tmpdir, out_of_date, store):
|
||||||
|
fmt = (
|
||||||
|
'repos:\n'
|
||||||
|
'- repo: {path}\n'
|
||||||
|
' rev: "{rev}"\n'
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
'- repo: {path}\n'
|
||||||
|
" rev: '{rev}'\n"
|
||||||
|
' hooks:\n'
|
||||||
|
' - id: foo\n'
|
||||||
|
)
|
||||||
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
|
cfg.write(fmt.format(path=out_of_date.path, rev=out_of_date.original_rev))
|
||||||
|
|
||||||
|
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||||
|
expected = fmt.format(path=out_of_date.path, rev=out_of_date.head_rev)
|
||||||
|
assert cfg.read() == expected
|
||||||
|
|
|
@ -96,6 +96,7 @@ def test_run_legacy_recursive(tmpdir):
|
||||||
('pre-merge-commit', []),
|
('pre-merge-commit', []),
|
||||||
('pre-push', ['branch_name', 'remote_name']),
|
('pre-push', ['branch_name', 'remote_name']),
|
||||||
('commit-msg', ['.git/COMMIT_EDITMSG']),
|
('commit-msg', ['.git/COMMIT_EDITMSG']),
|
||||||
|
('post-commit', []),
|
||||||
('post-checkout', ['old_head', 'new_head', '1']),
|
('post-checkout', ['old_head', 'new_head', '1']),
|
||||||
# multiple choices for commit-editmsg
|
# multiple choices for commit-editmsg
|
||||||
('prepare-commit-msg', ['.git/COMMIT_EDITMSG']),
|
('prepare-commit-msg', ['.git/COMMIT_EDITMSG']),
|
||||||
|
@ -117,7 +118,7 @@ def test_check_args_length_error_too_many_plural():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_args_length_error_too_many_singluar():
|
def test_check_args_length_error_too_many_singular():
|
||||||
with pytest.raises(SystemExit) as excinfo:
|
with pytest.raises(SystemExit) as excinfo:
|
||||||
hook_impl._check_args_length('commit-msg', [])
|
hook_impl._check_args_length('commit-msg', [])
|
||||||
msg, = excinfo.value.args
|
msg, = excinfo.value.args
|
||||||
|
@ -149,6 +150,13 @@ def test_run_ns_commit_msg():
|
||||||
assert ns.commit_msg_filename == '.git/COMMIT_MSG'
|
assert ns.commit_msg_filename == '.git/COMMIT_MSG'
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_ns_post_commit():
|
||||||
|
ns = hook_impl._run_ns('post-commit', True, (), b'')
|
||||||
|
assert ns is not None
|
||||||
|
assert ns.hook_stage == 'post-commit'
|
||||||
|
assert ns.color is True
|
||||||
|
|
||||||
|
|
||||||
def test_run_ns_post_checkout():
|
def test_run_ns_post_checkout():
|
||||||
ns = hook_impl._run_ns('post-checkout', True, ('a', 'b', 'c'), b'')
|
ns = hook_impl._run_ns('post-checkout', True, ('a', 'b', 'c'), b'')
|
||||||
assert ns is not None
|
assert ns is not None
|
||||||
|
|
|
@ -726,6 +726,32 @@ def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store):
|
||||||
assert second_line.startswith('Must have "Signed off by:"...')
|
assert second_line.startswith('Must have "Signed off by:"...')
|
||||||
|
|
||||||
|
|
||||||
|
def test_post_commit_integration(tempdir_factory, store):
|
||||||
|
path = git_dir(tempdir_factory)
|
||||||
|
config = [
|
||||||
|
{
|
||||||
|
'repo': 'local',
|
||||||
|
'hooks': [{
|
||||||
|
'id': 'post-commit',
|
||||||
|
'name': 'Post commit',
|
||||||
|
'entry': 'touch post-commit.tmp',
|
||||||
|
'language': 'system',
|
||||||
|
'always_run': True,
|
||||||
|
'verbose': True,
|
||||||
|
'stages': ['post-commit'],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
write_config(path, config)
|
||||||
|
with cwd(path):
|
||||||
|
_get_commit_output(tempdir_factory)
|
||||||
|
assert not os.path.exists('post-commit.tmp')
|
||||||
|
|
||||||
|
install(C.CONFIG_FILE, store, hook_types=['post-commit'])
|
||||||
|
_get_commit_output(tempdir_factory)
|
||||||
|
assert os.path.exists('post-commit.tmp')
|
||||||
|
|
||||||
|
|
||||||
def test_post_checkout_integration(tempdir_factory, store):
|
def test_post_checkout_integration(tempdir_factory, store):
|
||||||
path = git_dir(tempdir_factory)
|
path = git_dir(tempdir_factory)
|
||||||
config = [
|
config = [
|
||||||
|
@ -763,6 +789,37 @@ def test_post_checkout_integration(tempdir_factory, store):
|
||||||
assert 'some_file' not in stderr
|
assert 'some_file' not in stderr
|
||||||
|
|
||||||
|
|
||||||
|
def test_skips_post_checkout_unstaged_changes(tempdir_factory, store):
|
||||||
|
path = git_dir(tempdir_factory)
|
||||||
|
config = {
|
||||||
|
'repo': 'local',
|
||||||
|
'hooks': [{
|
||||||
|
'id': 'fail',
|
||||||
|
'name': 'fail',
|
||||||
|
'entry': 'fail',
|
||||||
|
'language': 'fail',
|
||||||
|
'always_run': True,
|
||||||
|
'stages': ['post-checkout'],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
write_config(path, config)
|
||||||
|
with cwd(path):
|
||||||
|
cmd_output('git', 'add', '.')
|
||||||
|
_get_commit_output(tempdir_factory)
|
||||||
|
|
||||||
|
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||||
|
install(C.CONFIG_FILE, store, hook_types=['post-checkout'])
|
||||||
|
|
||||||
|
# make an unstaged change so staged_files_only fires
|
||||||
|
open('file', 'a').close()
|
||||||
|
cmd_output('git', 'add', 'file')
|
||||||
|
with open('file', 'w') as f:
|
||||||
|
f.write('unstaged changes')
|
||||||
|
|
||||||
|
retc, out = _get_commit_output(tempdir_factory, all_files=False)
|
||||||
|
assert retc == 0
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_commit_msg_integration_failing(
|
def test_prepare_commit_msg_integration_failing(
|
||||||
failing_prepare_commit_msg_repo, tempdir_factory, store,
|
failing_prepare_commit_msg_repo, tempdir_factory, store,
|
||||||
):
|
):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
|
from pre_commit.clientlib import InvalidConfigError
|
||||||
from pre_commit.commands.migrate_config import _indent
|
from pre_commit.commands.migrate_config import _indent
|
||||||
from pre_commit.commands.migrate_config import migrate_config
|
from pre_commit.commands.migrate_config import migrate_config
|
||||||
|
|
||||||
|
@ -147,10 +148,10 @@ def test_migrate_config_sha_to_rev(tmpdir):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('contents', ('', '\n'))
|
@pytest.mark.parametrize('contents', ('', '\n'))
|
||||||
def test_empty_configuration_file_user_error(tmpdir, contents):
|
def test_migrate_config_invalid_configuration(tmpdir, contents):
|
||||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||||
cfg.write(contents)
|
cfg.write(contents)
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd(), pytest.raises(InvalidConfigError):
|
||||||
assert not migrate_config(C.CONFIG_FILE)
|
migrate_config(C.CONFIG_FILE)
|
||||||
# even though the config is invalid, this should be a noop
|
# even though the config is invalid, this should be a noop
|
||||||
assert cfg.read() == contents
|
assert cfg.read() == contents
|
||||||
|
|
|
@ -939,7 +939,7 @@ def test_classifier_normalizes_filenames_on_windows_to_forward_slashes(tmpdir):
|
||||||
tmpdir.join('a/b/c').ensure()
|
tmpdir.join('a/b/c').ensure()
|
||||||
with mock.patch.object(os, 'altsep', '/'):
|
with mock.patch.object(os, 'altsep', '/'):
|
||||||
with mock.patch.object(os, 'sep', '\\'):
|
with mock.patch.object(os, 'sep', '\\'):
|
||||||
classifier = Classifier((r'a\b\c',))
|
classifier = Classifier.from_config((r'a\b\c',), '', '^$')
|
||||||
assert classifier.filenames == ['a/b/c']
|
assert classifier.filenames == ['a/b/c']
|
||||||
|
|
||||||
|
|
||||||
|
@ -947,7 +947,7 @@ def test_classifier_does_not_normalize_backslashes_non_windows(tmpdir):
|
||||||
with mock.patch.object(os.path, 'lexists', return_value=True):
|
with mock.patch.object(os.path, 'lexists', return_value=True):
|
||||||
with mock.patch.object(os, 'altsep', None):
|
with mock.patch.object(os, 'altsep', None):
|
||||||
with mock.patch.object(os, 'sep', '/'):
|
with mock.patch.object(os, 'sep', '/'):
|
||||||
classifier = Classifier((r'a/b\c',))
|
classifier = Classifier.from_config((r'a/b\c',), '', '^$')
|
||||||
assert classifier.filenames == [r'a/b\c']
|
assert classifier.filenames == [r'a/b\c']
|
||||||
|
|
||||||
|
|
||||||
|
@ -1022,3 +1022,18 @@ def test_args_hook_only(cap_out, store, repo_with_passing_hook):
|
||||||
run_opts(hook='do_not_commit'),
|
run_opts(hook='do_not_commit'),
|
||||||
)
|
)
|
||||||
assert b'identity-copy' not in printed
|
assert b'identity-copy' not in printed
|
||||||
|
|
||||||
|
|
||||||
|
def test_skipped_without_any_setup_for_post_checkout(in_git_dir, store):
|
||||||
|
environ = {'_PRE_COMMIT_SKIP_POST_CHECKOUT': '1'}
|
||||||
|
opts = run_opts(hook_stage='post-checkout')
|
||||||
|
assert run(C.CONFIG_FILE, store, opts, environ=environ) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_pre_commit_env_variable_set(cap_out, store, repo_with_passing_hook):
|
||||||
|
args = run_opts()
|
||||||
|
environ: EnvironT = {}
|
||||||
|
ret, printed = _do_run(
|
||||||
|
cap_out, store, repo_with_passing_hook, args, environ,
|
||||||
|
)
|
||||||
|
assert environ['PRE_COMMIT'] == '1'
|
||||||
|
|
|
@ -186,3 +186,8 @@ def test_no_git_env():
|
||||||
'GIT_SSH': '/usr/bin/ssh',
|
'GIT_SSH': '/usr/bin/ssh',
|
||||||
'GIT_SSH_COMMAND': 'ssh -o',
|
'GIT_SSH_COMMAND': 'ssh -o',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_repo_no_hooks(tmpdir):
|
||||||
|
git.init_repo(str(tmpdir), remote='dne')
|
||||||
|
assert not tmpdir.join('.git/hooks').exists()
|
||||||
|
|
|
@ -78,5 +78,5 @@ def test_target_concurrency_cpu_count_not_implemented():
|
||||||
|
|
||||||
def test_shuffled_is_deterministic():
|
def test_shuffled_is_deterministic():
|
||||||
seq = [str(i) for i in range(10)]
|
seq = [str(i) for i in range(10)]
|
||||||
expected = ['3', '7', '8', '2', '4', '6', '5', '1', '0', '9']
|
expected = ['4', '0', '5', '1', '8', '6', '2', '3', '7', '9']
|
||||||
assert helpers._shuffled(seq) == expected
|
assert helpers._shuffled(seq) == expected
|
||||||
|
|
|
@ -5,10 +5,23 @@ from unittest import mock
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import pre_commit.constants as C
|
import pre_commit.constants as C
|
||||||
|
from pre_commit.envcontext import envcontext
|
||||||
from pre_commit.languages import python
|
from pre_commit.languages import python
|
||||||
from pre_commit.prefix import Prefix
|
from pre_commit.prefix import Prefix
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_pyvenv_cfg(tmpdir):
|
||||||
|
pyvenv_cfg = tmpdir.join('pyvenv.cfg')
|
||||||
|
pyvenv_cfg.write(
|
||||||
|
'# I am a comment\n'
|
||||||
|
'\n'
|
||||||
|
'foo = bar\n'
|
||||||
|
'version-info=123\n',
|
||||||
|
)
|
||||||
|
expected = {'foo': 'bar', 'version-info': '123'}
|
||||||
|
assert python._read_pyvenv_cfg(pyvenv_cfg) == expected
|
||||||
|
|
||||||
|
|
||||||
def test_norm_version_expanduser():
|
def test_norm_version_expanduser():
|
||||||
home = os.path.expanduser('~')
|
home = os.path.expanduser('~')
|
||||||
if os.name == 'nt': # pragma: nt cover
|
if os.name == 'nt': # pragma: nt cover
|
||||||
|
@ -21,6 +34,10 @@ def test_norm_version_expanduser():
|
||||||
assert result == expected_path
|
assert result == expected_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_norm_version_of_default_is_sys_executable():
|
||||||
|
assert python.norm_version('default') == os.path.realpath(sys.executable)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('v', ('python3.6', 'python3', 'python'))
|
@pytest.mark.parametrize('v', ('python3.6', 'python3', 'python'))
|
||||||
def test_sys_executable_matches(v):
|
def test_sys_executable_matches(v):
|
||||||
with mock.patch.object(sys, 'version_info', (3, 6, 7)):
|
with mock.patch.object(sys, 'version_info', (3, 6, 7)):
|
||||||
|
@ -49,27 +66,78 @@ def test_find_by_sys_executable(exe, realpath, expected):
|
||||||
assert python._find_by_sys_executable() == expected
|
assert python._find_by_sys_executable() == expected
|
||||||
|
|
||||||
|
|
||||||
def test_healthy_types_py_in_cwd(tmpdir):
|
@pytest.fixture
|
||||||
|
def python_dir(tmpdir):
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
prefix = tmpdir.join('prefix').ensure_dir()
|
prefix = tmpdir.join('prefix').ensure_dir()
|
||||||
prefix.join('setup.py').write('import setuptools; setuptools.setup()')
|
prefix.join('setup.py').write('import setuptools; setuptools.setup()')
|
||||||
prefix = Prefix(str(prefix))
|
prefix = Prefix(str(prefix))
|
||||||
|
yield prefix, tmpdir
|
||||||
|
|
||||||
|
|
||||||
|
def test_healthy_default_creator(python_dir):
|
||||||
|
prefix, tmpdir = python_dir
|
||||||
|
|
||||||
|
python.install_environment(prefix, C.DEFAULT, ())
|
||||||
|
|
||||||
|
# should be healthy right after creation
|
||||||
|
assert python.healthy(prefix, C.DEFAULT) is True
|
||||||
|
|
||||||
|
# even if a `types.py` file exists, should still be healthy
|
||||||
|
tmpdir.join('types.py').ensure()
|
||||||
|
assert python.healthy(prefix, C.DEFAULT) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_healthy_venv_creator(python_dir):
|
||||||
|
# venv creator produces slightly different pyvenv.cfg
|
||||||
|
prefix, tmpdir = python_dir
|
||||||
|
|
||||||
|
with envcontext((('VIRTUALENV_CREATOR', 'venv'),)):
|
||||||
python.install_environment(prefix, C.DEFAULT, ())
|
python.install_environment(prefix, C.DEFAULT, ())
|
||||||
|
|
||||||
# even if a `types.py` file exists, should still be healthy
|
assert python.healthy(prefix, C.DEFAULT) is True
|
||||||
tmpdir.join('types.py').ensure()
|
|
||||||
assert python.healthy(prefix, C.DEFAULT) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_healthy_python_goes_missing(tmpdir):
|
def test_unhealthy_python_goes_missing(python_dir):
|
||||||
with tmpdir.as_cwd():
|
prefix, tmpdir = python_dir
|
||||||
prefix = tmpdir.join('prefix').ensure_dir()
|
|
||||||
prefix.join('setup.py').write('import setuptools; setuptools.setup()')
|
|
||||||
prefix = Prefix(str(prefix))
|
|
||||||
python.install_environment(prefix, C.DEFAULT, ())
|
|
||||||
|
|
||||||
exe_name = 'python' if sys.platform != 'win32' else 'python.exe'
|
python.install_environment(prefix, C.DEFAULT, ())
|
||||||
py_exe = prefix.path(python.bin_dir('py_env-default'), exe_name)
|
|
||||||
os.remove(py_exe)
|
|
||||||
|
|
||||||
assert python.healthy(prefix, C.DEFAULT) is False
|
exe_name = 'python' if sys.platform != 'win32' else 'python.exe'
|
||||||
|
py_exe = prefix.path(python.bin_dir('py_env-default'), exe_name)
|
||||||
|
os.remove(py_exe)
|
||||||
|
|
||||||
|
assert python.healthy(prefix, C.DEFAULT) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_unhealthy_with_version_change(python_dir):
|
||||||
|
prefix, tmpdir = python_dir
|
||||||
|
|
||||||
|
python.install_environment(prefix, C.DEFAULT, ())
|
||||||
|
|
||||||
|
with open(prefix.path('py_env-default/pyvenv.cfg'), 'w') as f:
|
||||||
|
f.write('version_info = 1.2.3\n')
|
||||||
|
|
||||||
|
assert python.healthy(prefix, C.DEFAULT) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_unhealthy_system_version_changes(python_dir):
|
||||||
|
prefix, tmpdir = python_dir
|
||||||
|
|
||||||
|
python.install_environment(prefix, C.DEFAULT, ())
|
||||||
|
|
||||||
|
with open(prefix.path('py_env-default/pyvenv.cfg'), 'a') as f:
|
||||||
|
f.write('base-executable = /does/not/exist\n')
|
||||||
|
|
||||||
|
assert python.healthy(prefix, C.DEFAULT) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_unhealthy_old_virtualenv(python_dir):
|
||||||
|
prefix, tmpdir = python_dir
|
||||||
|
|
||||||
|
python.install_environment(prefix, C.DEFAULT, ())
|
||||||
|
|
||||||
|
# simulate "old" virtualenv by deleting this file
|
||||||
|
os.remove(prefix.path('py_env-default/pyvenv.cfg'))
|
||||||
|
|
||||||
|
assert python.healthy(prefix, C.DEFAULT) is False
|
||||||
|
|
|
@ -33,7 +33,7 @@ from testing.util import cwd
|
||||||
from testing.util import get_resource_path
|
from testing.util import get_resource_path
|
||||||
from testing.util import skipif_cant_run_docker
|
from testing.util import skipif_cant_run_docker
|
||||||
from testing.util import skipif_cant_run_swift
|
from testing.util import skipif_cant_run_swift
|
||||||
from testing.util import xfailif_no_venv
|
from testing.util import xfailif_windows
|
||||||
from testing.util import xfailif_windows_no_ruby
|
from testing.util import xfailif_windows_no_ruby
|
||||||
|
|
||||||
|
|
||||||
|
@ -163,7 +163,6 @@ def test_python_hook_weird_setup_cfg(in_git_dir, tempdir_factory, store):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@xfailif_no_venv
|
|
||||||
def test_python_venv(tempdir_factory, store): # pragma: no cover (no venv)
|
def test_python_venv(tempdir_factory, store): # pragma: no cover (no venv)
|
||||||
_test_hook_repo(
|
_test_hook_repo(
|
||||||
tempdir_factory, store, 'python_venv_hooks_repo',
|
tempdir_factory, store, 'python_venv_hooks_repo',
|
||||||
|
@ -243,6 +242,7 @@ def test_run_a_node_hook(tempdir_factory, store):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@xfailif_windows # pragma: win32 no cover
|
||||||
def test_run_a_node_hook_default_version(tempdir_factory, store):
|
def test_run_a_node_hook_default_version(tempdir_factory, store):
|
||||||
# make sure that this continues to work for platforms where node is not
|
# make sure that this continues to work for platforms where node is not
|
||||||
# installed at the system
|
# installed at the system
|
||||||
|
@ -252,6 +252,7 @@ def test_run_a_node_hook_default_version(tempdir_factory, store):
|
||||||
test_run_a_node_hook(tempdir_factory, store)
|
test_run_a_node_hook(tempdir_factory, store)
|
||||||
|
|
||||||
|
|
||||||
|
@xfailif_windows # pragma: win32 no cover
|
||||||
def test_run_versioned_node_hook(tempdir_factory, store):
|
def test_run_versioned_node_hook(tempdir_factory, store):
|
||||||
_test_hook_repo(
|
_test_hook_repo(
|
||||||
tempdir_factory, store, 'node_versioned_hooks_repo',
|
tempdir_factory, store, 'node_versioned_hooks_repo',
|
||||||
|
@ -534,6 +535,7 @@ def test_additional_ruby_dependencies_installed(tempdir_factory, store):
|
||||||
assert 'tins' in output
|
assert 'tins' in output
|
||||||
|
|
||||||
|
|
||||||
|
@xfailif_windows # pragma: win32 no cover
|
||||||
def test_additional_node_dependencies_installed(tempdir_factory, store):
|
def test_additional_node_dependencies_installed(tempdir_factory, store):
|
||||||
path = make_repo(tempdir_factory, 'node_hooks_repo')
|
path = make_repo(tempdir_factory, 'node_hooks_repo')
|
||||||
config = make_config_from_repo(path)
|
config = make_config_from_repo(path)
|
||||||
|
@ -880,7 +882,7 @@ def test_manifest_hooks(tempdir_factory, store):
|
||||||
require_serial=False,
|
require_serial=False,
|
||||||
stages=(
|
stages=(
|
||||||
'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg',
|
'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg',
|
||||||
'manual', 'post-checkout', 'push',
|
'post-commit', 'manual', 'post-checkout', 'push',
|
||||||
),
|
),
|
||||||
types=['file'],
|
types=['file'],
|
||||||
verbose=False,
|
verbose=False,
|
||||||
|
|
|
@ -25,7 +25,8 @@ def test_our_session_fixture_works():
|
||||||
def test_get_default_directory_defaults_to_home():
|
def test_get_default_directory_defaults_to_home():
|
||||||
# Not we use the module level one which is not mocked
|
# Not we use the module level one which is not mocked
|
||||||
ret = _get_default_directory()
|
ret = _get_default_directory()
|
||||||
assert ret == os.path.join(os.path.expanduser('~/.cache'), 'pre-commit')
|
expected = os.path.realpath(os.path.expanduser('~/.cache/pre-commit'))
|
||||||
|
assert ret == expected
|
||||||
|
|
||||||
|
|
||||||
def test_adheres_to_xdg_specification():
|
def test_adheres_to_xdg_specification():
|
||||||
|
@ -33,7 +34,8 @@ def test_adheres_to_xdg_specification():
|
||||||
os.environ, {'XDG_CACHE_HOME': '/tmp/fakehome'},
|
os.environ, {'XDG_CACHE_HOME': '/tmp/fakehome'},
|
||||||
):
|
):
|
||||||
ret = _get_default_directory()
|
ret = _get_default_directory()
|
||||||
assert ret == os.path.join('/tmp/fakehome', 'pre-commit')
|
expected = os.path.realpath('/tmp/fakehome/pre-commit')
|
||||||
|
assert ret == expected
|
||||||
|
|
||||||
|
|
||||||
def test_uses_environment_variable_when_present():
|
def test_uses_environment_variable_when_present():
|
||||||
|
@ -41,7 +43,8 @@ def test_uses_environment_variable_when_present():
|
||||||
os.environ, {'PRE_COMMIT_HOME': '/tmp/pre_commit_home'},
|
os.environ, {'PRE_COMMIT_HOME': '/tmp/pre_commit_home'},
|
||||||
):
|
):
|
||||||
ret = _get_default_directory()
|
ret = _get_default_directory()
|
||||||
assert ret == '/tmp/pre_commit_home'
|
expected = os.path.realpath('/tmp/pre_commit_home')
|
||||||
|
assert ret == expected
|
||||||
|
|
||||||
|
|
||||||
def test_store_init(store):
|
def test_store_init(store):
|
||||||
|
|
1
tox.ini
1
tox.ini
|
@ -8,7 +8,6 @@ commands =
|
||||||
coverage erase
|
coverage erase
|
||||||
coverage run -m pytest {posargs:tests}
|
coverage run -m pytest {posargs:tests}
|
||||||
coverage report
|
coverage report
|
||||||
pre-commit install
|
|
||||||
|
|
||||||
[testenv:pre-commit]
|
[testenv:pre-commit]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
|
|
Loading…
Add table
Reference in a new issue