Merging upstream version 2.16.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
2b79a68a34
commit
ada93679f9
34 changed files with 301 additions and 126 deletions
|
@ -247,38 +247,64 @@ def test_warn_mutable_rev_conditional():
|
|||
cfgv.validate(config_obj, CONFIG_REPO_DICT)
|
||||
|
||||
|
||||
def test_validate_optional_sensible_regex_at_hook_level(caplog):
|
||||
config_obj = {
|
||||
'id': 'flake8',
|
||||
'files': 'dir/*.py',
|
||||
}
|
||||
cfgv.validate(config_obj, CONFIG_HOOK_DICT)
|
||||
|
||||
assert caplog.record_tuples == [
|
||||
@pytest.mark.parametrize(
|
||||
('regex', 'warning'),
|
||||
(
|
||||
(
|
||||
'pre_commit',
|
||||
logging.WARNING,
|
||||
r'dir/*.py',
|
||||
"The 'files' field in hook 'flake8' is a regex, not a glob -- "
|
||||
"matching '/*' probably isn't what you want here",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_validate_optional_sensible_regex_at_top_level(caplog):
|
||||
(
|
||||
r'dir[\/].*\.py',
|
||||
r"pre-commit normalizes slashes in the 'files' field in hook "
|
||||
r"'flake8' to forward slashes, so you can use / instead of [\/]",
|
||||
),
|
||||
(
|
||||
r'dir[/\\].*\.py',
|
||||
r"pre-commit normalizes slashes in the 'files' field in hook "
|
||||
r"'flake8' to forward slashes, so you can use / instead of [/\\]",
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_validate_optional_sensible_regex_at_hook(caplog, regex, warning):
|
||||
config_obj = {
|
||||
'files': 'dir/*.py',
|
||||
'id': 'flake8',
|
||||
'files': regex,
|
||||
}
|
||||
cfgv.validate(config_obj, CONFIG_HOOK_DICT)
|
||||
|
||||
assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('regex', 'warning'),
|
||||
(
|
||||
(
|
||||
r'dir/*.py',
|
||||
"The top-level 'files' field is a regex, not a glob -- "
|
||||
"matching '/*' probably isn't what you want here",
|
||||
),
|
||||
(
|
||||
r'dir[\/].*\.py',
|
||||
r"pre-commit normalizes the slashes in the top-level 'files' "
|
||||
r'field to forward slashes, so you can use / instead of [\/]',
|
||||
),
|
||||
(
|
||||
r'dir[/\\].*\.py',
|
||||
r"pre-commit normalizes the slashes in the top-level 'files' "
|
||||
r'field to forward slashes, so you can use / instead of [/\\]',
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_validate_optional_sensible_regex_at_top_level(caplog, regex, warning):
|
||||
config_obj = {
|
||||
'files': regex,
|
||||
'repos': [],
|
||||
}
|
||||
cfgv.validate(config_obj, CONFIG_SCHEMA)
|
||||
|
||||
assert caplog.record_tuples == [
|
||||
(
|
||||
'pre_commit',
|
||||
logging.WARNING,
|
||||
"The top-level 'files' field is a regex, not a glob -- matching "
|
||||
"'/*' probably isn't what you want here",
|
||||
),
|
||||
]
|
||||
assert caplog.record_tuples == [('pre_commit', logging.WARNING, warning)]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main))
|
||||
|
|
|
@ -5,6 +5,7 @@ import pytest
|
|||
import yaml
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import envcontext
|
||||
from pre_commit import git
|
||||
from pre_commit import util
|
||||
from pre_commit.commands.autoupdate import _check_hooks_still_exist_at_rev
|
||||
|
@ -176,6 +177,14 @@ def test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store):
|
|||
assert cfg.read() == fmt.format(out_of_date.path, out_of_date.head_rev)
|
||||
|
||||
|
||||
def test_autoupdate_with_core_useBuiltinFSMonitor(out_of_date, tmpdir, store):
|
||||
# force the setting on "globally" for git
|
||||
home = tmpdir.join('fakehome').ensure_dir()
|
||||
home.join('.gitconfig').write('[core]\nuseBuiltinFSMonitor = true\n')
|
||||
with envcontext.envcontext((('HOME', str(home)),)):
|
||||
test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store)
|
||||
|
||||
|
||||
def test_autoupdate_pure_yaml(out_of_date, tmpdir, store):
|
||||
with mock.patch.object(util, 'Dumper', yaml.SafeDumper):
|
||||
test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store)
|
||||
|
|
|
@ -278,11 +278,7 @@ def test_environment_not_sourced(tempdir_factory, store):
|
|||
hook = os.path.join(path, '.git/hooks/pre-commit')
|
||||
with open(hook) as f:
|
||||
src = f.read()
|
||||
src = re.sub(
|
||||
'\nINSTALL_PYTHON =.*\n',
|
||||
'\nINSTALL_PYTHON = "/dne"\n',
|
||||
src,
|
||||
)
|
||||
src = re.sub('\nINSTALL_PYTHON=.*\n', '\nINSTALL_PYTHON="/dne"\n', src)
|
||||
with open(hook, 'w') as f:
|
||||
f.write(src)
|
||||
|
||||
|
|
|
@ -985,6 +985,18 @@ def test_fail_fast(cap_out, store, repo_with_failing_hook):
|
|||
assert printed.count(b'Failing hook') == 1
|
||||
|
||||
|
||||
def test_fail_fast_per_hook(cap_out, store, repo_with_failing_hook):
|
||||
with modify_config() as config:
|
||||
# More than one hook
|
||||
config['repos'][0]['hooks'] *= 2
|
||||
config['repos'][0]['hooks'][0]['fail_fast'] = True
|
||||
stage_a_file()
|
||||
|
||||
ret, printed = _do_run(cap_out, store, repo_with_failing_hook, run_opts())
|
||||
# it should have only run one hook
|
||||
assert printed.count(b'Failing hook') == 1
|
||||
|
||||
|
||||
def test_classifier_removes_dne():
|
||||
classifier = Classifier(('this_file_does_not_exist',))
|
||||
assert classifier.filenames == []
|
||||
|
|
|
@ -227,6 +227,11 @@ def test_no_git_env():
|
|||
'GIT_SSH': '/usr/bin/ssh',
|
||||
'GIT_SSH_COMMAND': 'ssh -o',
|
||||
'GIT_DIR': '/none/shall/pass',
|
||||
'GIT_CONFIG_KEY_0': 'user.name',
|
||||
'GIT_CONFIG_VALUE_0': 'anthony',
|
||||
'GIT_CONFIG_KEY_1': 'user.email',
|
||||
'GIT_CONFIG_VALUE_1': 'asottile@example.com',
|
||||
'GIT_CONFIG_COUNT': '2',
|
||||
}
|
||||
no_git_env = git.no_git_env(env)
|
||||
assert no_git_env == {
|
||||
|
@ -234,6 +239,11 @@ def test_no_git_env():
|
|||
'GIT_EXEC_PATH': '/some/git/exec/path',
|
||||
'GIT_SSH': '/usr/bin/ssh',
|
||||
'GIT_SSH_COMMAND': 'ssh -o',
|
||||
'GIT_CONFIG_KEY_0': 'user.name',
|
||||
'GIT_CONFIG_VALUE_0': 'anthony',
|
||||
'GIT_CONFIG_KEY_1': 'user.email',
|
||||
'GIT_CONFIG_VALUE_1': 'asottile@example.com',
|
||||
'GIT_CONFIG_COUNT': '2',
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -111,8 +111,8 @@ def test_local_conda_additional_dependencies(store):
|
|||
'name': 'local-conda',
|
||||
'entry': 'python',
|
||||
'language': 'conda',
|
||||
'args': ['-c', 'import tzdata; print("OK")'],
|
||||
'additional_dependencies': ['python-tzdata'],
|
||||
'args': ['-c', 'import botocore; print("OK")'],
|
||||
'additional_dependencies': ['botocore'],
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'local-conda')
|
||||
|
@ -164,7 +164,7 @@ def test_python_hook_weird_setup_cfg(in_git_dir, tempdir_factory, store):
|
|||
)
|
||||
|
||||
|
||||
def test_python_venv(tempdir_factory, store): # pragma: no cover (no venv)
|
||||
def test_python_venv(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_venv_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
|
@ -245,7 +245,6 @@ def test_run_a_docker_image_hook(tempdir_factory, store, hook_id):
|
|||
)
|
||||
|
||||
|
||||
@xfailif_windows # pragma: win32 no cover
|
||||
def test_run_a_node_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'node_hooks_repo',
|
||||
|
@ -253,7 +252,6 @@ def test_run_a_node_hook(tempdir_factory, store):
|
|||
)
|
||||
|
||||
|
||||
@xfailif_windows # pragma: win32 no cover
|
||||
def test_run_a_node_hook_default_version(tempdir_factory, store):
|
||||
# make sure that this continues to work for platforms where node is not
|
||||
# installed at the system
|
||||
|
@ -263,7 +261,6 @@ def test_run_a_node_hook_default_version(tempdir_factory, store):
|
|||
test_run_a_node_hook(tempdir_factory, store)
|
||||
|
||||
|
||||
@xfailif_windows # pragma: win32 no cover
|
||||
def test_run_versioned_node_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'node_versioned_hooks_repo',
|
||||
|
@ -271,7 +268,6 @@ def test_run_versioned_node_hook(tempdir_factory, store):
|
|||
)
|
||||
|
||||
|
||||
@xfailif_windows # pragma: win32 no cover
|
||||
def test_node_hook_with_npm_userconfig_set(tempdir_factory, store, tmpdir):
|
||||
cfg = tmpdir.join('cfg')
|
||||
cfg.write('cache=/dne\n')
|
||||
|
@ -653,7 +649,6 @@ def test_additional_ruby_dependencies_installed(tempdir_factory, store):
|
|||
assert 'tins' in output
|
||||
|
||||
|
||||
@xfailif_windows # pragma: win32 no cover
|
||||
def test_additional_node_dependencies_installed(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'node_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
|
@ -1007,6 +1002,7 @@ def test_manifest_hooks(tempdir_factory, store):
|
|||
types=['file'],
|
||||
types_or=[],
|
||||
verbose=False,
|
||||
fail_fast=False,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1025,13 +1021,13 @@ def test_local_perl_additional_dependencies(store):
|
|||
'name': 'hello',
|
||||
'entry': 'perltidy --version',
|
||||
'language': 'perl',
|
||||
'additional_dependencies': ['SHANCOCK/Perl-Tidy-20200110.tar.gz'],
|
||||
'additional_dependencies': ['SHANCOCK/Perl-Tidy-20211029.tar.gz'],
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'hello')
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out).startswith(b'This is perltidy, v20200110')
|
||||
assert _norm_out(out).startswith(b'This is perltidy, v20211029')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
|
@ -181,9 +181,11 @@ def test_img_conflict(img_staged, patch_dir):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def submodule_with_commits(tempdir_factory):
|
||||
def repo_with_commits(tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
open('foo', 'a+').close()
|
||||
cmd_output('git', 'add', 'foo')
|
||||
git_commit()
|
||||
rev1 = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
|
||||
git_commit()
|
||||
|
@ -196,18 +198,21 @@ def checkout_submodule(rev):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def sub_staged(submodule_with_commits, tempdir_factory):
|
||||
def sub_staged(repo_with_commits, tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
open('bar', 'a+').close()
|
||||
cmd_output('git', 'add', 'bar')
|
||||
git_commit()
|
||||
cmd_output(
|
||||
'git', 'submodule', 'add', submodule_with_commits.path, 'sub',
|
||||
'git', 'submodule', 'add', repo_with_commits.path, 'sub',
|
||||
)
|
||||
checkout_submodule(submodule_with_commits.rev1)
|
||||
checkout_submodule(repo_with_commits.rev1)
|
||||
cmd_output('git', 'add', 'sub')
|
||||
yield auto_namedtuple(
|
||||
path=path,
|
||||
sub_path=os.path.join(path, 'sub'),
|
||||
submodule=submodule_with_commits,
|
||||
submodule=repo_with_commits,
|
||||
)
|
||||
|
||||
|
||||
|
@ -242,6 +247,34 @@ def test_sub_something_unstaged(sub_staged, patch_dir):
|
|||
_test_sub_state(sub_staged, 'rev2', 'AM')
|
||||
|
||||
|
||||
def test_submodule_does_not_discard_changes(sub_staged, patch_dir):
|
||||
with open('bar', 'w') as f:
|
||||
f.write('unstaged changes')
|
||||
|
||||
foo_path = os.path.join(sub_staged.sub_path, 'foo')
|
||||
with open(foo_path, 'w') as f:
|
||||
f.write('foo contents')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
with open('bar') as f:
|
||||
assert f.read() == ''
|
||||
|
||||
with open(foo_path) as f:
|
||||
assert f.read() == 'foo contents'
|
||||
|
||||
with open('bar') as f:
|
||||
assert f.read() == 'unstaged changes'
|
||||
|
||||
with open(foo_path) as f:
|
||||
assert f.read() == 'foo contents'
|
||||
|
||||
|
||||
def test_submodule_does_not_discard_changes_recurse(sub_staged, patch_dir):
|
||||
cmd_output('git', 'config', 'submodule.recurse', '1', cwd=sub_staged.path)
|
||||
|
||||
test_submodule_does_not_discard_changes(sub_staged, patch_dir)
|
||||
|
||||
|
||||
def test_stage_utf8_changes(foo_staged, patch_dir):
|
||||
contents = '\u2603'
|
||||
with open('foo', 'w', encoding='UTF-8') as foo_file:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue