Adding upstream version 2.2.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
18c908e4f3
commit
c0d06915b7
199 changed files with 14930 additions and 0 deletions
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
313
tests/clientlib_test.py
Normal file
313
tests/clientlib_test.py
Normal file
|
@ -0,0 +1,313 @@
|
|||
import logging
|
||||
|
||||
import cfgv
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib import check_type_tag
|
||||
from pre_commit.clientlib import CONFIG_HOOK_DICT
|
||||
from pre_commit.clientlib import CONFIG_REPO_DICT
|
||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||
from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION
|
||||
from pre_commit.clientlib import MANIFEST_SCHEMA
|
||||
from pre_commit.clientlib import MigrateShaToRev
|
||||
from pre_commit.clientlib import validate_config_main
|
||||
from pre_commit.clientlib import validate_manifest_main
|
||||
from testing.fixtures import sample_local_config
|
||||
|
||||
|
||||
def is_valid_according_to_schema(obj, obj_schema):
|
||||
try:
|
||||
cfgv.validate(obj, obj_schema)
|
||||
return True
|
||||
except cfgv.ValidationError:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel'))
|
||||
def test_check_type_tag_failures(value):
|
||||
with pytest.raises(cfgv.ValidationError):
|
||||
check_type_tag(value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('config_obj', 'expected'), (
|
||||
(
|
||||
{
|
||||
'repos': [{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
|
||||
}],
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
{
|
||||
'repos': [{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'pyflakes',
|
||||
'files': '\\.py$',
|
||||
'args': ['foo', 'bar', 'baz'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
{
|
||||
'repos': [{
|
||||
'repo': 'git@github.com:pre-commit/pre-commit-hooks',
|
||||
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'pyflakes',
|
||||
'files': '\\.py$',
|
||||
# Exclude pattern must be a string
|
||||
'exclude': 0,
|
||||
'args': ['foo', 'bar', 'baz'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
},
|
||||
False,
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_config_valid(config_obj, expected):
|
||||
ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA)
|
||||
assert ret is expected
|
||||
|
||||
|
||||
def test_local_hooks_with_rev_fails():
|
||||
config_obj = {'repos': [dict(sample_local_config(), rev='foo')]}
|
||||
with pytest.raises(cfgv.ValidationError):
|
||||
cfgv.validate(config_obj, CONFIG_SCHEMA)
|
||||
|
||||
|
||||
def test_config_with_local_hooks_definition_passes():
|
||||
config_obj = {'repos': [sample_local_config()]}
|
||||
cfgv.validate(config_obj, CONFIG_SCHEMA)
|
||||
|
||||
|
||||
def test_config_schema_does_not_contain_defaults():
|
||||
"""Due to the way our merging works, if this schema has any defaults they
|
||||
will clobber potentially useful values in the backing manifest. #227
|
||||
"""
|
||||
for item in CONFIG_HOOK_DICT.items:
|
||||
assert not isinstance(item, cfgv.Optional)
|
||||
|
||||
|
||||
def test_validate_manifest_main_ok():
|
||||
assert not validate_manifest_main(('.pre-commit-hooks.yaml',))
|
||||
|
||||
|
||||
def test_validate_config_main_ok():
|
||||
assert not validate_config_main(('.pre-commit-config.yaml',))
|
||||
|
||||
|
||||
def test_validate_config_old_list_format_ok(tmpdir):
|
||||
f = tmpdir.join('cfg.yaml')
|
||||
f.write('- {repo: meta, hooks: [{id: identity}]}')
|
||||
assert not validate_config_main((f.strpath,))
|
||||
|
||||
|
||||
def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog):
|
||||
f = tmpdir.join('cfg.yaml')
|
||||
f.write(
|
||||
'- repo: https://gitlab.com/pycqa/flake8\n'
|
||||
' rev: 3.7.7\n'
|
||||
' hooks:\n'
|
||||
' - id: flake8\n'
|
||||
' args: [--some-args]\n',
|
||||
)
|
||||
ret_val = validate_config_main((f.strpath,))
|
||||
assert not ret_val
|
||||
assert caplog.record_tuples == [
|
||||
(
|
||||
'pre_commit',
|
||||
logging.WARNING,
|
||||
'Unexpected key(s) present on https://gitlab.com/pycqa/flake8: '
|
||||
'args',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog):
|
||||
f = tmpdir.join('cfg.yaml')
|
||||
f.write(
|
||||
'repos:\n'
|
||||
'- repo: https://gitlab.com/pycqa/flake8\n'
|
||||
' rev: 3.7.7\n'
|
||||
' hooks:\n'
|
||||
' - id: flake8\n'
|
||||
'foo:\n'
|
||||
' id: 1.0.0\n',
|
||||
)
|
||||
ret_val = validate_config_main((f.strpath,))
|
||||
assert not ret_val
|
||||
assert caplog.record_tuples == [
|
||||
(
|
||||
'pre_commit',
|
||||
logging.WARNING,
|
||||
'Unexpected key(s) present at root: foo',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main))
|
||||
def test_mains_not_ok(tmpdir, fn):
|
||||
not_yaml = tmpdir.join('f.notyaml')
|
||||
not_yaml.write('{')
|
||||
not_schema = tmpdir.join('notconfig.yaml')
|
||||
not_schema.write('{}')
|
||||
|
||||
assert fn(('does-not-exist',))
|
||||
assert fn((not_yaml.strpath,))
|
||||
assert fn((not_schema.strpath,))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('manifest_obj', 'expected'),
|
||||
(
|
||||
(
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'files': r'\.py$',
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'language_version': 'python3.4',
|
||||
'files': r'\.py$',
|
||||
}],
|
||||
True,
|
||||
),
|
||||
(
|
||||
# A regression in 0.13.5: always_run and files are permissible
|
||||
[{
|
||||
'id': 'a',
|
||||
'name': 'b',
|
||||
'entry': 'c',
|
||||
'language': 'python',
|
||||
'files': '',
|
||||
'always_run': True,
|
||||
}],
|
||||
True,
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_valid_manifests(manifest_obj, expected):
|
||||
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA)
|
||||
assert ret is expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dct',
|
||||
(
|
||||
{'repo': 'local'}, {'repo': 'meta'},
|
||||
{'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'},
|
||||
),
|
||||
)
|
||||
def test_migrate_sha_to_rev_ok(dct):
|
||||
MigrateShaToRev().check(dct)
|
||||
|
||||
|
||||
def test_migrate_sha_to_rev_dont_specify_both():
|
||||
with pytest.raises(cfgv.ValidationError) as excinfo:
|
||||
MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'})
|
||||
msg, = excinfo.value.args
|
||||
assert msg == 'Cannot specify both sha and rev'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dct',
|
||||
(
|
||||
{'repo': 'a'},
|
||||
{'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'},
|
||||
),
|
||||
)
|
||||
def test_migrate_sha_to_rev_conditional_check_failures(dct):
|
||||
with pytest.raises(cfgv.ValidationError):
|
||||
MigrateShaToRev().check(dct)
|
||||
|
||||
|
||||
def test_migrate_to_sha_apply_default():
|
||||
dct = {'repo': 'a', 'sha': 'b'}
|
||||
MigrateShaToRev().apply_default(dct)
|
||||
assert dct == {'repo': 'a', 'rev': 'b'}
|
||||
|
||||
|
||||
def test_migrate_to_sha_ok():
|
||||
dct = {'repo': 'a', 'rev': 'b'}
|
||||
MigrateShaToRev().apply_default(dct)
|
||||
assert dct == {'repo': 'a', 'rev': 'b'}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'config_repo',
|
||||
(
|
||||
# i-dont-exist isn't a valid hook
|
||||
{'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]},
|
||||
# invalid to set a language for a meta hook
|
||||
{'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]},
|
||||
# name override must be string
|
||||
{'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]},
|
||||
),
|
||||
)
|
||||
def test_meta_hook_invalid(config_repo):
|
||||
with pytest.raises(cfgv.ValidationError):
|
||||
cfgv.validate(config_repo, CONFIG_REPO_DICT)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'mapping',
|
||||
(
|
||||
# invalid language key
|
||||
{'pony': '1.0'},
|
||||
# not a string for version
|
||||
{'python': 3},
|
||||
),
|
||||
)
|
||||
def test_default_language_version_invalid(mapping):
|
||||
with pytest.raises(cfgv.ValidationError):
|
||||
cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION)
|
||||
|
||||
|
||||
def test_minimum_pre_commit_version_failing():
|
||||
with pytest.raises(cfgv.ValidationError) as excinfo:
|
||||
cfg = {'repos': [], 'minimum_pre_commit_version': '999'}
|
||||
cfgv.validate(cfg, CONFIG_SCHEMA)
|
||||
assert str(excinfo.value) == (
|
||||
f'\n'
|
||||
f'==> At Config()\n'
|
||||
f'==> At key: minimum_pre_commit_version\n'
|
||||
f'=====> pre-commit version 999 is required but version {C.VERSION} '
|
||||
f'is installed. Perhaps run `pip install --upgrade pre-commit`.'
|
||||
)
|
||||
|
||||
|
||||
def test_minimum_pre_commit_version_passing():
|
||||
cfg = {'repos': [], 'minimum_pre_commit_version': '0'}
|
||||
cfgv.validate(cfg, CONFIG_SCHEMA)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT))
|
||||
def test_warn_additional(schema):
|
||||
allowed_keys = {item.key for item in schema.items if hasattr(item, 'key')}
|
||||
warn_additional, = [
|
||||
x for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys)
|
||||
]
|
||||
assert allowed_keys == set(warn_additional.keys)
|
59
tests/color_test.py
Normal file
59
tests/color_test.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import envcontext
|
||||
from pre_commit.color import format_color
|
||||
from pre_commit.color import GREEN
|
||||
from pre_commit.color import use_color
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('in_text', 'in_color', 'in_use_color', 'expected'), (
|
||||
('foo', GREEN, True, f'{GREEN}foo\033[m'),
|
||||
('foo', GREEN, False, 'foo'),
|
||||
),
|
||||
)
|
||||
def test_format_color(in_text, in_color, in_use_color, expected):
|
||||
ret = format_color(in_text, in_color, in_use_color)
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test_use_color_never():
|
||||
assert use_color('never') is False
|
||||
|
||||
|
||||
def test_use_color_always():
|
||||
assert use_color('always') is True
|
||||
|
||||
|
||||
def test_use_color_no_tty():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=False):
|
||||
assert use_color('auto') is False
|
||||
|
||||
|
||||
def test_use_color_tty_with_color_support():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=True):
|
||||
with mock.patch('pre_commit.color.terminal_supports_color', True):
|
||||
with envcontext.envcontext((('TERM', envcontext.UNSET),)):
|
||||
assert use_color('auto') is True
|
||||
|
||||
|
||||
def test_use_color_tty_without_color_support():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=True):
|
||||
with mock.patch('pre_commit.color.terminal_supports_color', False):
|
||||
with envcontext.envcontext((('TERM', envcontext.UNSET),)):
|
||||
assert use_color('auto') is False
|
||||
|
||||
|
||||
def test_use_color_dumb_term():
|
||||
with mock.patch.object(sys.stdout, 'isatty', return_value=True):
|
||||
with mock.patch('pre_commit.color.terminal_supports_color', True):
|
||||
with envcontext.envcontext((('TERM', 'dumb'),)):
|
||||
assert use_color('auto') is False
|
||||
|
||||
|
||||
def test_use_color_raises_if_given_shenanigans():
|
||||
with pytest.raises(ValueError):
|
||||
use_color('herpaderp')
|
0
tests/commands/__init__.py
Normal file
0
tests/commands/__init__.py
Normal file
437
tests/commands/autoupdate_test.py
Normal file
437
tests/commands/autoupdate_test.py
Normal file
|
@ -0,0 +1,437 @@
|
|||
import shlex
|
||||
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit.commands.autoupdate import _check_hooks_still_exist_at_rev
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.autoupdate import RepositoryCannotBeUpdatedError
|
||||
from pre_commit.commands.autoupdate import RevInfo
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
from testing.fixtures import add_config_to_repo
|
||||
from testing.fixtures import make_config_from_repo
|
||||
from testing.fixtures import make_repo
|
||||
from testing.fixtures import modify_manifest
|
||||
from testing.fixtures import read_config
|
||||
from testing.fixtures import sample_local_config
|
||||
from testing.fixtures import write_config
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def up_to_date(tempdir_factory):
|
||||
yield make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def out_of_date(tempdir_factory):
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
original_rev = git.head_rev(path)
|
||||
|
||||
git_commit(cwd=path)
|
||||
head_rev = git.head_rev(path)
|
||||
|
||||
yield auto_namedtuple(
|
||||
path=path, original_rev=original_rev, head_rev=head_rev,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tagged(out_of_date):
|
||||
cmd_output('git', 'tag', 'v1.2.3', cwd=out_of_date.path)
|
||||
yield out_of_date
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def hook_disappearing(tempdir_factory):
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
original_rev = git.head_rev(path)
|
||||
|
||||
with modify_manifest(path) as manifest:
|
||||
manifest[0]['id'] = 'bar'
|
||||
|
||||
yield auto_namedtuple(path=path, original_rev=original_rev)
|
||||
|
||||
|
||||
def test_rev_info_from_config():
|
||||
info = RevInfo.from_config({'repo': 'repo/path', 'rev': 'v1.2.3'})
|
||||
assert info == RevInfo('repo/path', 'v1.2.3', None)
|
||||
|
||||
|
||||
def test_rev_info_update_up_to_date_repo(up_to_date):
|
||||
config = make_config_from_repo(up_to_date)
|
||||
info = RevInfo.from_config(config)
|
||||
new_info = info.update(tags_only=False, freeze=False)
|
||||
assert info == new_info
|
||||
|
||||
|
||||
def test_rev_info_update_out_of_date_repo(out_of_date):
|
||||
config = make_config_from_repo(
|
||||
out_of_date.path, rev=out_of_date.original_rev,
|
||||
)
|
||||
info = RevInfo.from_config(config)
|
||||
new_info = info.update(tags_only=False, freeze=False)
|
||||
assert new_info.rev == out_of_date.head_rev
|
||||
|
||||
|
||||
def test_rev_info_update_non_master_default_branch(out_of_date):
|
||||
# change the default branch to be not-master
|
||||
cmd_output('git', '-C', out_of_date.path, 'branch', '-m', 'dev')
|
||||
test_rev_info_update_out_of_date_repo(out_of_date)
|
||||
|
||||
|
||||
def test_rev_info_update_tags_even_if_not_tags_only(tagged):
|
||||
config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
|
||||
info = RevInfo.from_config(config)
|
||||
new_info = info.update(tags_only=False, freeze=False)
|
||||
assert new_info.rev == 'v1.2.3'
|
||||
|
||||
|
||||
def test_rev_info_update_tags_only_does_not_pick_tip(tagged):
|
||||
git_commit(cwd=tagged.path)
|
||||
config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
|
||||
info = RevInfo.from_config(config)
|
||||
new_info = info.update(tags_only=True, freeze=False)
|
||||
assert new_info.rev == 'v1.2.3'
|
||||
|
||||
|
||||
def test_rev_info_update_freeze_tag(tagged):
|
||||
git_commit(cwd=tagged.path)
|
||||
config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
|
||||
info = RevInfo.from_config(config)
|
||||
new_info = info.update(tags_only=True, freeze=True)
|
||||
assert new_info.rev == tagged.head_rev
|
||||
assert new_info.frozen == 'v1.2.3'
|
||||
|
||||
|
||||
def test_rev_info_update_does_not_freeze_if_already_sha(out_of_date):
|
||||
config = make_config_from_repo(
|
||||
out_of_date.path, rev=out_of_date.original_rev,
|
||||
)
|
||||
info = RevInfo.from_config(config)
|
||||
new_info = info.update(tags_only=True, freeze=True)
|
||||
assert new_info.rev == out_of_date.head_rev
|
||||
assert new_info.frozen is None
|
||||
|
||||
|
||||
def test_autoupdate_up_to_date_repo(up_to_date, tmpdir, store):
|
||||
contents = (
|
||||
f'repos:\n'
|
||||
f'- repo: {up_to_date}\n'
|
||||
f' rev: {git.head_rev(up_to_date)}\n'
|
||||
f' hooks:\n'
|
||||
f' - id: foo\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||
assert cfg.read() == contents
|
||||
|
||||
|
||||
def test_autoupdate_old_revision_broken(tempdir_factory, in_tmpdir, store):
|
||||
"""In $FUTURE_VERSION, hooks.yaml will no longer be supported. This
|
||||
asserts that when that day comes, pre-commit will be able to autoupdate
|
||||
despite not being able to read hooks.yaml in that repository.
|
||||
"""
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
config = make_config_from_repo(path, check=False)
|
||||
|
||||
cmd_output('git', 'mv', C.MANIFEST_FILE, 'nope.yaml', cwd=path)
|
||||
git_commit(cwd=path)
|
||||
# Assume this is the revision the user's old repository was at
|
||||
rev = git.head_rev(path)
|
||||
cmd_output('git', 'mv', 'nope.yaml', C.MANIFEST_FILE, cwd=path)
|
||||
git_commit(cwd=path)
|
||||
update_rev = git.head_rev(path)
|
||||
|
||||
config['rev'] = rev
|
||||
write_config('.', config)
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
before = f.read()
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
after = f.read()
|
||||
assert before != after
|
||||
assert update_rev in after
|
||||
|
||||
|
||||
def test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store):
|
||||
fmt = (
|
||||
'repos:\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {}\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(fmt.format(out_of_date.path, out_of_date.original_rev))
|
||||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||
assert cfg.read() == fmt.format(out_of_date.path, out_of_date.head_rev)
|
||||
|
||||
|
||||
def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir, store):
|
||||
fmt = (
|
||||
'repos:\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {}\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {}\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
before = fmt.format(
|
||||
up_to_date, git.head_rev(up_to_date),
|
||||
out_of_date.path, out_of_date.original_rev,
|
||||
)
|
||||
cfg.write(before)
|
||||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||
assert cfg.read() == fmt.format(
|
||||
up_to_date, git.head_rev(up_to_date),
|
||||
out_of_date.path, out_of_date.head_rev,
|
||||
)
|
||||
|
||||
|
||||
def test_autoupdate_out_of_date_repo_with_correct_repo_name(
|
||||
out_of_date, in_tmpdir, store,
|
||||
):
|
||||
stale_config = make_config_from_repo(
|
||||
out_of_date.path, rev=out_of_date.original_rev, check=False,
|
||||
)
|
||||
local_config = sample_local_config()
|
||||
config = {'repos': [stale_config, local_config]}
|
||||
write_config('.', config)
|
||||
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
before = f.read()
|
||||
repo_name = f'file://{out_of_date.path}'
|
||||
ret = autoupdate(
|
||||
C.CONFIG_FILE, store, freeze=False, tags_only=False,
|
||||
repos=(repo_name,),
|
||||
)
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
after = f.read()
|
||||
assert ret == 0
|
||||
assert before != after
|
||||
assert out_of_date.head_rev in after
|
||||
assert 'local' in after
|
||||
|
||||
|
||||
def test_autoupdate_out_of_date_repo_with_wrong_repo_name(
|
||||
out_of_date, in_tmpdir, store,
|
||||
):
|
||||
config = make_config_from_repo(
|
||||
out_of_date.path, rev=out_of_date.original_rev, check=False,
|
||||
)
|
||||
write_config('.', config)
|
||||
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
before = f.read()
|
||||
# It will not update it, because the name doesn't match
|
||||
ret = autoupdate(
|
||||
C.CONFIG_FILE, store, freeze=False, tags_only=False,
|
||||
repos=('dne',),
|
||||
)
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
after = f.read()
|
||||
assert ret == 0
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_does_not_reformat(tmpdir, out_of_date, store):
|
||||
fmt = (
|
||||
'repos:\n'
|
||||
'- repo: {}\n'
|
||||
' rev: {} # definitely the version I want!\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' # These args are because reasons!\n'
|
||||
' args: [foo, bar, baz]\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(fmt.format(out_of_date.path, out_of_date.original_rev))
|
||||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||
expected = fmt.format(out_of_date.path, out_of_date.head_rev)
|
||||
assert cfg.read() == expected
|
||||
|
||||
|
||||
def test_loses_formatting_when_not_detectable(out_of_date, store, tmpdir):
|
||||
"""A best-effort attempt is made at updating rev without rewriting
|
||||
formatting. When the original formatting cannot be detected, this
|
||||
is abandoned.
|
||||
"""
|
||||
config = (
|
||||
'repos: [\n'
|
||||
' {{\n'
|
||||
' repo: {}, rev: {},\n'
|
||||
' hooks: [\n'
|
||||
' # A comment!\n'
|
||||
' {{id: foo}},\n'
|
||||
' ],\n'
|
||||
' }}\n'
|
||||
']\n'.format(
|
||||
shlex.quote(out_of_date.path), out_of_date.original_rev,
|
||||
)
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(config)
|
||||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 0
|
||||
expected = (
|
||||
f'repos:\n'
|
||||
f'- repo: {out_of_date.path}\n'
|
||||
f' rev: {out_of_date.head_rev}\n'
|
||||
f' hooks:\n'
|
||||
f' - id: foo\n'
|
||||
)
|
||||
assert cfg.read() == expected
|
||||
|
||||
|
||||
def test_autoupdate_tagged_repo(tagged, in_tmpdir, store):
|
||||
config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
|
||||
write_config('.', config)
|
||||
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
assert 'v1.2.3' in f.read()
|
||||
|
||||
|
||||
def test_autoupdate_freeze(tagged, in_tmpdir, store):
|
||||
config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
|
||||
write_config('.', config)
|
||||
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=True, tags_only=False) == 0
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
expected = f'rev: {tagged.head_rev} # frozen: v1.2.3'
|
||||
assert expected in f.read()
|
||||
|
||||
# if we un-freeze it should remove the frozen comment
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
assert 'rev: v1.2.3\n' in f.read()
|
||||
|
||||
|
||||
def test_autoupdate_tags_only(tagged, in_tmpdir, store):
|
||||
# add some commits after the tag
|
||||
git_commit(cwd=tagged.path)
|
||||
|
||||
config = make_config_from_repo(tagged.path, rev=tagged.original_rev)
|
||||
write_config('.', config)
|
||||
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=True) == 0
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
assert 'v1.2.3' in f.read()
|
||||
|
||||
|
||||
def test_autoupdate_latest_no_config(out_of_date, in_tmpdir, store):
|
||||
config = make_config_from_repo(
|
||||
out_of_date.path, rev=out_of_date.original_rev,
|
||||
)
|
||||
write_config('.', config)
|
||||
|
||||
cmd_output('git', 'rm', '-r', ':/', cwd=out_of_date.path)
|
||||
git_commit(cwd=out_of_date.path)
|
||||
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 1
|
||||
with open(C.CONFIG_FILE) as f:
|
||||
assert out_of_date.original_rev in f.read()
|
||||
|
||||
|
||||
def test_hook_disppearing_repo_raises(hook_disappearing, store):
|
||||
config = make_config_from_repo(
|
||||
hook_disappearing.path,
|
||||
rev=hook_disappearing.original_rev,
|
||||
hooks=[{'id': 'foo'}],
|
||||
)
|
||||
info = RevInfo.from_config(config).update(tags_only=False, freeze=False)
|
||||
with pytest.raises(RepositoryCannotBeUpdatedError):
|
||||
_check_hooks_still_exist_at_rev(config, info, store)
|
||||
|
||||
|
||||
def test_autoupdate_hook_disappearing_repo(hook_disappearing, tmpdir, store):
|
||||
contents = (
|
||||
f'repos:\n'
|
||||
f'- repo: {hook_disappearing.path}\n'
|
||||
f' rev: {hook_disappearing.original_rev}\n'
|
||||
f' hooks:\n'
|
||||
f' - id: foo\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=False) == 1
|
||||
assert cfg.read() == contents
|
||||
|
||||
|
||||
def test_autoupdate_local_hooks(in_git_dir, store):
|
||||
config = sample_local_config()
|
||||
add_config_to_repo('.', config)
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||
new_config_writen = read_config('.')
|
||||
assert len(new_config_writen['repos']) == 1
|
||||
assert new_config_writen['repos'][0] == config
|
||||
|
||||
|
||||
def test_autoupdate_local_hooks_with_out_of_date_repo(
|
||||
out_of_date, in_tmpdir, store,
|
||||
):
|
||||
stale_config = make_config_from_repo(
|
||||
out_of_date.path, rev=out_of_date.original_rev, check=False,
|
||||
)
|
||||
local_config = sample_local_config()
|
||||
config = {'repos': [local_config, stale_config]}
|
||||
write_config('.', config)
|
||||
assert autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False) == 0
|
||||
new_config_writen = read_config('.')
|
||||
assert len(new_config_writen['repos']) == 2
|
||||
assert new_config_writen['repos'][0] == local_config
|
||||
|
||||
|
||||
def test_autoupdate_meta_hooks(tmpdir, store):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(
|
||||
'repos:\n'
|
||||
'- repo: meta\n'
|
||||
' hooks:\n'
|
||||
' - id: check-useless-excludes\n',
|
||||
)
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=True) == 0
|
||||
assert cfg.read() == (
|
||||
'repos:\n'
|
||||
'- repo: meta\n'
|
||||
' hooks:\n'
|
||||
' - id: check-useless-excludes\n'
|
||||
)
|
||||
|
||||
|
||||
def test_updates_old_format_to_new_format(tmpdir, capsys, store):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n',
|
||||
)
|
||||
assert autoupdate(str(cfg), store, freeze=False, tags_only=True) == 0
|
||||
contents = cfg.read()
|
||||
assert contents == (
|
||||
'repos:\n'
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n'
|
||||
)
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == 'Configuration has been migrated.\n'
|
33
tests/commands/clean_test.py
Normal file
33
tests/commands/clean_test.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.commands.clean import clean
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def fake_old_dir(tempdir_factory):
|
||||
fake_old_dir = tempdir_factory.get()
|
||||
|
||||
def _expanduser(path, *args, **kwargs):
|
||||
assert path == '~/.pre-commit'
|
||||
return fake_old_dir
|
||||
|
||||
with mock.patch.object(os.path, 'expanduser', side_effect=_expanduser):
|
||||
yield fake_old_dir
|
||||
|
||||
|
||||
def test_clean(store, fake_old_dir):
|
||||
assert os.path.exists(fake_old_dir)
|
||||
assert os.path.exists(store.directory)
|
||||
clean(store)
|
||||
assert not os.path.exists(fake_old_dir)
|
||||
assert not os.path.exists(store.directory)
|
||||
|
||||
|
||||
def test_clean_idempotent(store):
|
||||
clean(store)
|
||||
assert not os.path.exists(store.directory)
|
||||
clean(store)
|
||||
assert not os.path.exists(store.directory)
|
161
tests/commands/gc_test.py
Normal file
161
tests/commands/gc_test.py
Normal file
|
@ -0,0 +1,161 @@
|
|||
import os
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit.clientlib import load_config
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.gc import gc
|
||||
from pre_commit.commands.install_uninstall import install_hooks
|
||||
from pre_commit.repository import all_hooks
|
||||
from testing.fixtures import make_config_from_repo
|
||||
from testing.fixtures import make_repo
|
||||
from testing.fixtures import modify_config
|
||||
from testing.fixtures import sample_local_config
|
||||
from testing.fixtures import sample_meta_config
|
||||
from testing.fixtures import write_config
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def _repo_count(store):
|
||||
return len(store.select_all_repos())
|
||||
|
||||
|
||||
def _config_count(store):
|
||||
return len(store.select_all_configs())
|
||||
|
||||
|
||||
def _remove_config_assert_cleared(store, cap_out):
|
||||
os.remove(C.CONFIG_FILE)
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 0
|
||||
assert _repo_count(store) == 0
|
||||
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
|
||||
|
||||
|
||||
def test_gc(tempdir_factory, store, in_git_dir, cap_out):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
old_rev = git.head_rev(path)
|
||||
git_commit(cwd=path)
|
||||
|
||||
write_config('.', make_config_from_repo(path, rev=old_rev))
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
|
||||
# update will clone both the old and new repo, making the old one gc-able
|
||||
install_hooks(C.CONFIG_FILE, store)
|
||||
assert not autoupdate(C.CONFIG_FILE, store, freeze=False, tags_only=False)
|
||||
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 2
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 1
|
||||
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
|
||||
|
||||
_remove_config_assert_cleared(store, cap_out)
|
||||
|
||||
|
||||
def test_gc_repo_not_cloned(tempdir_factory, store, in_git_dir, cap_out):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
write_config('.', make_config_from_repo(path))
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 0
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 0
|
||||
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
|
||||
|
||||
|
||||
def test_gc_meta_repo_does_not_crash(store, in_git_dir, cap_out):
|
||||
write_config('.', sample_meta_config())
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
assert not gc(store)
|
||||
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
|
||||
|
||||
|
||||
def test_gc_local_repo_does_not_crash(store, in_git_dir, cap_out):
|
||||
write_config('.', sample_local_config())
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
assert not gc(store)
|
||||
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
|
||||
|
||||
|
||||
def test_gc_unused_local_repo_with_env(store, in_git_dir, cap_out):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'flake8', 'name': 'flake8', 'entry': 'flake8',
|
||||
# a `language: python` local hook will create an environment
|
||||
'types': ['python'], 'language': 'python',
|
||||
}],
|
||||
}
|
||||
write_config('.', config)
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
|
||||
# this causes the repositories to be created
|
||||
all_hooks(load_config(C.CONFIG_FILE), store)
|
||||
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 1
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 1
|
||||
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
|
||||
|
||||
_remove_config_assert_cleared(store, cap_out)
|
||||
|
||||
|
||||
def test_gc_config_with_missing_hook(
|
||||
tempdir_factory, store, in_git_dir, cap_out,
|
||||
):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
write_config('.', make_config_from_repo(path))
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
# to trigger a clone
|
||||
all_hooks(load_config(C.CONFIG_FILE), store)
|
||||
|
||||
with modify_config() as config:
|
||||
# add a hook which does not exist, make sure we don't crash
|
||||
config['repos'][0]['hooks'].append({'id': 'does-not-exist'})
|
||||
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 1
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 1
|
||||
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
|
||||
|
||||
_remove_config_assert_cleared(store, cap_out)
|
||||
|
||||
|
||||
def test_gc_deletes_invalid_configs(store, in_git_dir, cap_out):
|
||||
config = {'i am': 'invalid'}
|
||||
write_config('.', config)
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
|
||||
assert _config_count(store) == 1
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 0
|
||||
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
|
||||
|
||||
|
||||
def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out):
|
||||
# clean up repos from old pre-commit versions
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
write_config('.', make_config_from_repo(path))
|
||||
store.mark_config_used(C.CONFIG_FILE)
|
||||
|
||||
# trigger a clone
|
||||
install_hooks(C.CONFIG_FILE, store)
|
||||
|
||||
# we'll "break" the manifest to simulate an old version clone
|
||||
(_, _, path), = store.select_all_repos()
|
||||
os.remove(os.path.join(path, C.MANIFEST_FILE))
|
||||
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 1
|
||||
assert not gc(store)
|
||||
assert _config_count(store) == 1
|
||||
assert _repo_count(store) == 0
|
||||
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
|
235
tests/commands/hook_impl_test.py
Normal file
235
tests/commands/hook_impl_test.py
Normal file
|
@ -0,0 +1,235 @@
|
|||
import subprocess
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit.commands import hook_impl
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import make_executable
|
||||
from testing.fixtures import git_dir
|
||||
from testing.fixtures import sample_local_config
|
||||
from testing.fixtures import write_config
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def test_validate_config_file_exists(tmpdir):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE).ensure()
|
||||
hook_impl._validate_config(0, cfg, True)
|
||||
|
||||
|
||||
def test_validate_config_missing(capsys):
|
||||
with pytest.raises(SystemExit) as excinfo:
|
||||
hook_impl._validate_config(123, 'DNE.yaml', False)
|
||||
ret, = excinfo.value.args
|
||||
assert ret == 1
|
||||
assert capsys.readouterr().out == (
|
||||
'No DNE.yaml file was found\n'
|
||||
'- To temporarily silence this, run '
|
||||
'`PRE_COMMIT_ALLOW_NO_CONFIG=1 git ...`\n'
|
||||
'- To permanently silence this, install pre-commit with the '
|
||||
'--allow-missing-config option\n'
|
||||
'- To uninstall pre-commit run `pre-commit uninstall`\n'
|
||||
)
|
||||
|
||||
|
||||
def test_validate_config_skip_missing_config(capsys):
|
||||
with pytest.raises(SystemExit) as excinfo:
|
||||
hook_impl._validate_config(123, 'DNE.yaml', True)
|
||||
ret, = excinfo.value.args
|
||||
assert ret == 123
|
||||
expected = '`DNE.yaml` config file not found. Skipping `pre-commit`.\n'
|
||||
assert capsys.readouterr().out == expected
|
||||
|
||||
|
||||
def test_validate_config_skip_via_env_variable(capsys):
|
||||
with pytest.raises(SystemExit) as excinfo:
|
||||
with envcontext((('PRE_COMMIT_ALLOW_NO_CONFIG', '1'),)):
|
||||
hook_impl._validate_config(0, 'DNE.yaml', False)
|
||||
ret, = excinfo.value.args
|
||||
assert ret == 0
|
||||
expected = '`DNE.yaml` config file not found. Skipping `pre-commit`.\n'
|
||||
assert capsys.readouterr().out == expected
|
||||
|
||||
|
||||
def test_run_legacy_does_not_exist(tmpdir):
|
||||
retv, stdin = hook_impl._run_legacy('pre-commit', tmpdir, ())
|
||||
assert (retv, stdin) == (0, b'')
|
||||
|
||||
|
||||
def test_run_legacy_executes_legacy_script(tmpdir, capfd):
|
||||
hook = tmpdir.join('pre-commit.legacy')
|
||||
hook.write('#!/usr/bin/env bash\necho hi "$@"\nexit 1\n')
|
||||
make_executable(hook)
|
||||
retv, stdin = hook_impl._run_legacy('pre-commit', tmpdir, ('arg1', 'arg2'))
|
||||
assert capfd.readouterr().out.strip() == 'hi arg1 arg2'
|
||||
assert (retv, stdin) == (1, b'')
|
||||
|
||||
|
||||
def test_run_legacy_pre_push_returns_stdin(tmpdir):
|
||||
with mock.patch.object(sys.stdin.buffer, 'read', return_value=b'stdin'):
|
||||
retv, stdin = hook_impl._run_legacy('pre-push', tmpdir, ())
|
||||
assert (retv, stdin) == (0, b'stdin')
|
||||
|
||||
|
||||
def test_run_legacy_recursive(tmpdir):
|
||||
hook = tmpdir.join('pre-commit.legacy').ensure()
|
||||
make_executable(hook)
|
||||
|
||||
# simulate a call being recursive
|
||||
def call(*_, **__):
|
||||
return hook_impl._run_legacy('pre-commit', tmpdir, ())
|
||||
|
||||
with mock.patch.object(subprocess, 'run', call):
|
||||
with pytest.raises(SystemExit):
|
||||
call()
|
||||
|
||||
|
||||
def test_run_ns_pre_commit():
|
||||
ns = hook_impl._run_ns('pre-commit', True, (), b'')
|
||||
assert ns is not None
|
||||
assert ns.hook_stage == 'commit'
|
||||
assert ns.color is True
|
||||
|
||||
|
||||
def test_run_ns_commit_msg():
|
||||
ns = hook_impl._run_ns('commit-msg', False, ('.git/COMMIT_MSG',), b'')
|
||||
assert ns is not None
|
||||
assert ns.hook_stage == 'commit-msg'
|
||||
assert ns.color is False
|
||||
assert ns.commit_msg_filename == '.git/COMMIT_MSG'
|
||||
|
||||
|
||||
def test_run_ns_post_checkout():
|
||||
ns = hook_impl._run_ns('post-checkout', True, ('a', 'b', 'c'), b'')
|
||||
assert ns is not None
|
||||
assert ns.hook_stage == 'post-checkout'
|
||||
assert ns.color is True
|
||||
assert ns.from_ref == 'a'
|
||||
assert ns.to_ref == 'b'
|
||||
assert ns.checkout_type == 'c'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def push_example(tempdir_factory):
|
||||
src = git_dir(tempdir_factory)
|
||||
git_commit(cwd=src)
|
||||
src_head = git.head_rev(src)
|
||||
|
||||
clone = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', src, clone)
|
||||
git_commit(cwd=clone)
|
||||
clone_head = git.head_rev(clone)
|
||||
return (src, src_head, clone, clone_head)
|
||||
|
||||
|
||||
def test_run_ns_pre_push_updating_branch(push_example):
|
||||
src, src_head, clone, clone_head = push_example
|
||||
|
||||
with cwd(clone):
|
||||
args = ('origin', src)
|
||||
stdin = f'HEAD {clone_head} refs/heads/b {src_head}\n'.encode()
|
||||
ns = hook_impl._run_ns('pre-push', False, args, stdin)
|
||||
|
||||
assert ns is not None
|
||||
assert ns.hook_stage == 'push'
|
||||
assert ns.color is False
|
||||
assert ns.remote_name == 'origin'
|
||||
assert ns.remote_url == src
|
||||
assert ns.from_ref == src_head
|
||||
assert ns.to_ref == clone_head
|
||||
assert ns.all_files is False
|
||||
|
||||
|
||||
def test_run_ns_pre_push_new_branch(push_example):
|
||||
src, src_head, clone, clone_head = push_example
|
||||
|
||||
with cwd(clone):
|
||||
args = ('origin', src)
|
||||
stdin = f'HEAD {clone_head} refs/heads/b {hook_impl.Z40}\n'.encode()
|
||||
ns = hook_impl._run_ns('pre-push', False, args, stdin)
|
||||
|
||||
assert ns is not None
|
||||
assert ns.from_ref == src_head
|
||||
assert ns.to_ref == clone_head
|
||||
|
||||
|
||||
def test_run_ns_pre_push_new_branch_existing_rev(push_example):
|
||||
src, src_head, clone, _ = push_example
|
||||
|
||||
with cwd(clone):
|
||||
args = ('origin', src)
|
||||
stdin = f'HEAD {src_head} refs/heads/b2 {hook_impl.Z40}\n'.encode()
|
||||
ns = hook_impl._run_ns('pre-push', False, args, stdin)
|
||||
|
||||
assert ns is None
|
||||
|
||||
|
||||
def test_pushing_orphan_branch(push_example):
|
||||
src, src_head, clone, _ = push_example
|
||||
|
||||
cmd_output('git', 'checkout', '--orphan', 'b2', cwd=clone)
|
||||
git_commit(cwd=clone, msg='something else to get unique hash')
|
||||
clone_rev = git.head_rev(clone)
|
||||
|
||||
with cwd(clone):
|
||||
args = ('origin', src)
|
||||
stdin = f'HEAD {clone_rev} refs/heads/b2 {hook_impl.Z40}\n'.encode()
|
||||
ns = hook_impl._run_ns('pre-push', False, args, stdin)
|
||||
|
||||
assert ns is not None
|
||||
assert ns.all_files is True
|
||||
|
||||
|
||||
def test_run_ns_pre_push_deleting_branch(push_example):
|
||||
src, src_head, clone, _ = push_example
|
||||
|
||||
with cwd(clone):
|
||||
args = ('origin', src)
|
||||
stdin = f'(delete) {hook_impl.Z40} refs/heads/b {src_head}'.encode()
|
||||
ns = hook_impl._run_ns('pre-push', False, args, stdin)
|
||||
|
||||
assert ns is None
|
||||
|
||||
|
||||
def test_hook_impl_main_noop_pre_push(cap_out, store, push_example):
|
||||
src, src_head, clone, _ = push_example
|
||||
|
||||
stdin = f'(delete) {hook_impl.Z40} refs/heads/b {src_head}'.encode()
|
||||
with mock.patch.object(sys.stdin.buffer, 'read', return_value=stdin):
|
||||
with cwd(clone):
|
||||
write_config('.', sample_local_config())
|
||||
ret = hook_impl.hook_impl(
|
||||
store,
|
||||
config=C.CONFIG_FILE,
|
||||
color=False,
|
||||
hook_type='pre-push',
|
||||
hook_dir='.git/hooks',
|
||||
skip_on_missing_config=False,
|
||||
args=('origin', src),
|
||||
)
|
||||
assert ret == 0
|
||||
assert cap_out.get() == ''
|
||||
|
||||
|
||||
def test_hook_impl_main_runs_hooks(cap_out, tempdir_factory, store):
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
write_config('.', sample_local_config())
|
||||
ret = hook_impl.hook_impl(
|
||||
store,
|
||||
config=C.CONFIG_FILE,
|
||||
color=False,
|
||||
hook_type='pre-commit',
|
||||
hook_dir='.git/hooks',
|
||||
skip_on_missing_config=False,
|
||||
args=(),
|
||||
)
|
||||
assert ret == 0
|
||||
expected = '''\
|
||||
Block if "DO NOT COMMIT" is found....................(no files to check)Skipped
|
||||
'''
|
||||
assert cap_out.get() == expected
|
92
tests/commands/init_templatedir_test.py
Normal file
92
tests/commands/init_templatedir_test.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.commands.init_templatedir import init_templatedir
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.fixtures import git_dir
|
||||
from testing.fixtures import make_consuming_repo
|
||||
from testing.util import cmd_output_mocked_pre_commit_home
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def test_init_templatedir(tmpdir, tempdir_factory, store, cap_out):
|
||||
target = str(tmpdir.join('tmpl'))
|
||||
init_templatedir(C.CONFIG_FILE, store, target, hook_types=['pre-commit'])
|
||||
lines = cap_out.get().splitlines()
|
||||
assert lines[0].startswith('pre-commit installed at ')
|
||||
assert lines[1] == (
|
||||
'[WARNING] `init.templateDir` not set to the target directory'
|
||||
)
|
||||
assert lines[2].startswith(
|
||||
'[WARNING] maybe `git config --global init.templateDir',
|
||||
)
|
||||
|
||||
with envcontext((('GIT_TEMPLATE_DIR', target),)):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
|
||||
with cwd(path):
|
||||
retcode, output = git_commit(
|
||||
fn=cmd_output_mocked_pre_commit_home,
|
||||
tempdir_factory=tempdir_factory,
|
||||
)
|
||||
assert retcode == 0
|
||||
assert 'Bash hook....' in output
|
||||
|
||||
|
||||
def test_init_templatedir_already_set(tmpdir, tempdir_factory, store, cap_out):
|
||||
target = str(tmpdir.join('tmpl'))
|
||||
tmp_git_dir = git_dir(tempdir_factory)
|
||||
with cwd(tmp_git_dir):
|
||||
cmd_output('git', 'config', 'init.templateDir', target)
|
||||
init_templatedir(
|
||||
C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
|
||||
)
|
||||
|
||||
lines = cap_out.get().splitlines()
|
||||
assert len(lines) == 1
|
||||
assert lines[0].startswith('pre-commit installed at')
|
||||
|
||||
|
||||
def test_init_templatedir_not_set(tmpdir, store, cap_out):
|
||||
# set HOME to ignore the current `.gitconfig`
|
||||
with envcontext((('HOME', str(tmpdir)),)):
|
||||
with tmpdir.join('tmpl').ensure_dir().as_cwd():
|
||||
# we have not set init.templateDir so this should produce a warning
|
||||
init_templatedir(
|
||||
C.CONFIG_FILE, store, '.', hook_types=['pre-commit'],
|
||||
)
|
||||
|
||||
lines = cap_out.get().splitlines()
|
||||
assert len(lines) == 3
|
||||
assert lines[1] == (
|
||||
'[WARNING] `init.templateDir` not set to the target directory'
|
||||
)
|
||||
|
||||
|
||||
def test_init_templatedir_expanduser(tmpdir, tempdir_factory, store, cap_out):
|
||||
target = str(tmpdir.join('tmpl'))
|
||||
tmp_git_dir = git_dir(tempdir_factory)
|
||||
with cwd(tmp_git_dir):
|
||||
cmd_output('git', 'config', 'init.templateDir', '~/templatedir')
|
||||
with mock.patch.object(os.path, 'expanduser', return_value=target):
|
||||
init_templatedir(
|
||||
C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
|
||||
)
|
||||
|
||||
lines = cap_out.get().splitlines()
|
||||
assert len(lines) == 1
|
||||
assert lines[0].startswith('pre-commit installed at')
|
||||
|
||||
|
||||
def test_init_templatedir_hookspath_set(tmpdir, tempdir_factory, store):
|
||||
target = tmpdir.join('tmpl')
|
||||
tmp_git_dir = git_dir(tempdir_factory)
|
||||
with cwd(tmp_git_dir):
|
||||
cmd_output('git', 'config', '--local', 'core.hooksPath', 'hooks')
|
||||
init_templatedir(
|
||||
C.CONFIG_FILE, store, target, hook_types=['pre-commit'],
|
||||
)
|
||||
assert target.join('hooks/pre-commit').exists()
|
901
tests/commands/install_uninstall_test.py
Normal file
901
tests/commands/install_uninstall_test.py
Normal file
|
@ -0,0 +1,901 @@
|
|||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit.commands import install_uninstall
|
||||
from pre_commit.commands.install_uninstall import CURRENT_HASH
|
||||
from pre_commit.commands.install_uninstall import install
|
||||
from pre_commit.commands.install_uninstall import install_hooks
|
||||
from pre_commit.commands.install_uninstall import is_our_script
|
||||
from pre_commit.commands.install_uninstall import PRIOR_HASHES
|
||||
from pre_commit.commands.install_uninstall import shebang
|
||||
from pre_commit.commands.install_uninstall import uninstall
|
||||
from pre_commit.parse_shebang import find_executable
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import make_executable
|
||||
from pre_commit.util import resource_text
|
||||
from testing.fixtures import add_config_to_repo
|
||||
from testing.fixtures import git_dir
|
||||
from testing.fixtures import make_consuming_repo
|
||||
from testing.fixtures import remove_config_from_repo
|
||||
from testing.fixtures import write_config
|
||||
from testing.util import cmd_output_mocked_pre_commit_home
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def test_is_not_script():
|
||||
assert is_our_script('setup.py') is False
|
||||
|
||||
|
||||
def test_is_script():
|
||||
assert is_our_script('pre_commit/resources/hook-tmpl')
|
||||
|
||||
|
||||
def test_is_previous_pre_commit(tmpdir):
|
||||
f = tmpdir.join('foo')
|
||||
f.write(f'{PRIOR_HASHES[0]}\n')
|
||||
assert is_our_script(f.strpath)
|
||||
|
||||
|
||||
def patch_platform(platform):
|
||||
return mock.patch.object(sys, 'platform', platform)
|
||||
|
||||
|
||||
def patch_lookup_path(path):
|
||||
return mock.patch.object(install_uninstall, 'POSIX_SEARCH_PATH', path)
|
||||
|
||||
|
||||
def patch_sys_exe(exe):
|
||||
return mock.patch.object(install_uninstall, 'SYS_EXE', exe)
|
||||
|
||||
|
||||
def test_shebang_windows():
|
||||
with patch_platform('win32'), patch_sys_exe('python.exe'):
|
||||
assert shebang() == '#!/usr/bin/env python.exe'
|
||||
|
||||
|
||||
def test_shebang_posix_not_on_path():
|
||||
with patch_platform('posix'), patch_lookup_path(()):
|
||||
with patch_sys_exe('python3.6'):
|
||||
assert shebang() == '#!/usr/bin/env python3.6'
|
||||
|
||||
|
||||
def test_shebang_posix_on_path(tmpdir):
|
||||
exe = tmpdir.join(f'python{sys.version_info[0]}').ensure()
|
||||
make_executable(exe)
|
||||
|
||||
with patch_platform('posix'), patch_lookup_path((tmpdir.strpath,)):
|
||||
with patch_sys_exe('python'):
|
||||
assert shebang() == f'#!/usr/bin/env python{sys.version_info[0]}'
|
||||
|
||||
|
||||
def test_install_pre_commit(in_git_dir, store):
|
||||
assert not install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
assert os.access(in_git_dir.join('.git/hooks/pre-commit').strpath, os.X_OK)
|
||||
|
||||
assert not install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
assert os.access(in_git_dir.join('.git/hooks/pre-push').strpath, os.X_OK)
|
||||
|
||||
|
||||
def test_install_hooks_directory_not_present(in_git_dir, store):
|
||||
# Simulate some git clients which don't make .git/hooks #234
|
||||
if in_git_dir.join('.git/hooks').exists(): # pragma: no cover (odd git)
|
||||
in_git_dir.join('.git/hooks').remove()
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
assert in_git_dir.join('.git/hooks/pre-commit').exists()
|
||||
|
||||
|
||||
def test_install_multiple_hooks_at_once(in_git_dir, store):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit', 'pre-push'])
|
||||
assert in_git_dir.join('.git/hooks/pre-commit').exists()
|
||||
assert in_git_dir.join('.git/hooks/pre-push').exists()
|
||||
uninstall(hook_types=['pre-commit', 'pre-push'])
|
||||
assert not in_git_dir.join('.git/hooks/pre-commit').exists()
|
||||
assert not in_git_dir.join('.git/hooks/pre-push').exists()
|
||||
|
||||
|
||||
def test_install_refuses_core_hookspath(in_git_dir, store):
|
||||
cmd_output('git', 'config', '--local', 'core.hooksPath', 'hooks')
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
|
||||
|
||||
def test_install_hooks_dead_symlink(in_git_dir, store):
|
||||
hook = in_git_dir.join('.git/hooks').ensure_dir().join('pre-commit')
|
||||
os.symlink('/fake/baz', hook.strpath)
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
assert hook.exists()
|
||||
|
||||
|
||||
def test_uninstall_does_not_blow_up_when_not_there(in_git_dir):
|
||||
assert uninstall(hook_types=['pre-commit']) == 0
|
||||
|
||||
|
||||
def test_uninstall(in_git_dir, store):
|
||||
assert not in_git_dir.join('.git/hooks/pre-commit').exists()
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
assert in_git_dir.join('.git/hooks/pre-commit').exists()
|
||||
uninstall(hook_types=['pre-commit'])
|
||||
assert not in_git_dir.join('.git/hooks/pre-commit').exists()
|
||||
|
||||
|
||||
def _get_commit_output(tempdir_factory, touch_file='foo', **kwargs):
|
||||
open(touch_file, 'a').close()
|
||||
cmd_output('git', 'add', touch_file)
|
||||
return git_commit(
|
||||
fn=cmd_output_mocked_pre_commit_home,
|
||||
retcode=None,
|
||||
tempdir_factory=tempdir_factory,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
# osx does this different :(
|
||||
FILES_CHANGED = (
|
||||
r'('
|
||||
r' 1 file changed, 0 insertions\(\+\), 0 deletions\(-\)\n'
|
||||
r'|'
|
||||
r' 0 files changed\n'
|
||||
r')'
|
||||
)
|
||||
|
||||
|
||||
NORMAL_PRE_COMMIT_RUN = re.compile(
|
||||
fr'^\[INFO\] Initializing environment for .+\.\n'
|
||||
fr'Bash hook\.+Passed\n'
|
||||
fr'\[master [a-f0-9]{{7}}\] commit!\n'
|
||||
fr'{FILES_CHANGED}'
|
||||
fr' create mode 100644 foo\n$',
|
||||
)
|
||||
|
||||
|
||||
def test_install_pre_commit_and_run(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_install_pre_commit_and_run_custom_path(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
cmd_output('git', 'mv', C.CONFIG_FILE, 'custom.yaml')
|
||||
git_commit(cwd=path)
|
||||
assert install('custom.yaml', store, hook_types=['pre-commit']) == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_install_in_submodule_and_run(tempdir_factory, store):
|
||||
src_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
parent_path = git_dir(tempdir_factory)
|
||||
cmd_output('git', 'submodule', 'add', src_path, 'sub', cwd=parent_path)
|
||||
git_commit(cwd=parent_path)
|
||||
|
||||
sub_pth = os.path.join(parent_path, 'sub')
|
||||
with cwd(sub_pth):
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_install_in_worktree_and_run(tempdir_factory, store):
|
||||
src_path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
path = tempdir_factory.get()
|
||||
cmd_output('git', '-C', src_path, 'branch', '-m', 'notmaster')
|
||||
cmd_output('git', '-C', src_path, 'worktree', 'add', path, '-b', 'master')
|
||||
|
||||
with cwd(path):
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_commit_am(tempdir_factory, store):
|
||||
"""Regression test for #322."""
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
# Make an unstaged change
|
||||
open('unstaged', 'w').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit(cwd=path)
|
||||
with open('unstaged', 'w') as foo_file:
|
||||
foo_file.write('Oh hai')
|
||||
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def test_unicode_merge_commit_message(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
cmd_output('git', 'checkout', 'master', '-b', 'foo')
|
||||
git_commit('-n', cwd=path)
|
||||
cmd_output('git', 'checkout', 'master')
|
||||
cmd_output('git', 'merge', 'foo', '--no-ff', '--no-commit', '-m', '☃')
|
||||
# Used to crash
|
||||
git_commit(
|
||||
'--no-edit',
|
||||
msg=None,
|
||||
fn=cmd_output_mocked_pre_commit_home,
|
||||
tempdir_factory=tempdir_factory,
|
||||
)
|
||||
|
||||
|
||||
def test_install_idempotent(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def _path_without_us():
|
||||
# Choose a path which *probably* doesn't include us
|
||||
env = dict(os.environ)
|
||||
exe = find_executable('pre-commit', _environ=env)
|
||||
while exe:
|
||||
parts = env['PATH'].split(os.pathsep)
|
||||
after = [x for x in parts if x.lower() != os.path.dirname(exe).lower()]
|
||||
if parts == after:
|
||||
raise AssertionError(exe, parts)
|
||||
env['PATH'] = os.pathsep.join(after)
|
||||
exe = find_executable('pre-commit', _environ=env)
|
||||
return env['PATH']
|
||||
|
||||
|
||||
def test_environment_not_sourced(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
assert not install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
# simulate deleting the virtualenv by rewriting the exe
|
||||
hook = os.path.join(path, '.git/hooks/pre-commit')
|
||||
with open(hook) as f:
|
||||
src = f.read()
|
||||
src = re.sub(
|
||||
'\nINSTALL_PYTHON =.*\n',
|
||||
'\nINSTALL_PYTHON = "/dne"\n',
|
||||
src,
|
||||
)
|
||||
with open(hook, 'w') as f:
|
||||
f.write(src)
|
||||
|
||||
# Use a specific homedir to ignore --user installs
|
||||
homedir = tempdir_factory.get()
|
||||
ret, out = git_commit(
|
||||
env={
|
||||
'HOME': homedir,
|
||||
'PATH': _path_without_us(),
|
||||
# Git needs this to make a commit
|
||||
'GIT_AUTHOR_NAME': os.environ['GIT_AUTHOR_NAME'],
|
||||
'GIT_COMMITTER_NAME': os.environ['GIT_COMMITTER_NAME'],
|
||||
'GIT_AUTHOR_EMAIL': os.environ['GIT_AUTHOR_EMAIL'],
|
||||
'GIT_COMMITTER_EMAIL': os.environ['GIT_COMMITTER_EMAIL'],
|
||||
},
|
||||
retcode=None,
|
||||
)
|
||||
assert ret == 1
|
||||
assert out == (
|
||||
'`pre-commit` not found. '
|
||||
'Did you forget to activate your virtualenv?\n'
|
||||
)
|
||||
|
||||
|
||||
FAILING_PRE_COMMIT_RUN = re.compile(
|
||||
r'^\[INFO\] Initializing environment for .+\.\n'
|
||||
r'Failing hook\.+Failed\n'
|
||||
r'- hook id: failing_hook\n'
|
||||
r'- exit code: 1\n'
|
||||
r'\n'
|
||||
r'Fail\n'
|
||||
r'foo\n'
|
||||
r'\n$',
|
||||
)
|
||||
|
||||
|
||||
def test_failing_hooks_returns_nonzero(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'failing_hook_repo')
|
||||
with cwd(path):
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 1
|
||||
assert FAILING_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
EXISTING_COMMIT_RUN = re.compile(
|
||||
fr'^legacy hook\n'
|
||||
fr'\[master [a-f0-9]{{7}}\] commit!\n'
|
||||
fr'{FILES_CHANGED}'
|
||||
fr' create mode 100644 baz\n$',
|
||||
)
|
||||
|
||||
|
||||
def _write_legacy_hook(path):
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write(f'{shebang()}\nprint("legacy hook")\n')
|
||||
make_executable(f.name)
|
||||
|
||||
|
||||
def test_install_existing_hooks_no_overwrite(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
_write_legacy_hook(path)
|
||||
|
||||
# Make sure we installed the "old" hook correctly
|
||||
ret, output = _get_commit_output(tempdir_factory, touch_file='baz')
|
||||
assert ret == 0
|
||||
assert EXISTING_COMMIT_RUN.match(output)
|
||||
|
||||
# Now install pre-commit (no-overwrite)
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
# We should run both the legacy and pre-commit hooks
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert output.startswith('legacy hook\n')
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output[len('legacy hook\n'):])
|
||||
|
||||
|
||||
def test_legacy_overwriting_legacy_hook(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
_write_legacy_hook(path)
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
_write_legacy_hook(path)
|
||||
# this previously crashed on windows. See #1010
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
|
||||
def test_install_existing_hook_no_overwrite_idempotent(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
_write_legacy_hook(path)
|
||||
|
||||
# Install twice
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
# We should run both the legacy and pre-commit hooks
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert output.startswith('legacy hook\n')
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output[len('legacy hook\n'):])
|
||||
|
||||
|
||||
FAIL_OLD_HOOK = re.compile(
|
||||
r'fail!\n'
|
||||
r'\[INFO\] Initializing environment for .+\.\n'
|
||||
r'Bash hook\.+Passed\n',
|
||||
)
|
||||
|
||||
|
||||
def test_failing_existing_hook_returns_1(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
# Write out a failing "old" hook
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write('#!/usr/bin/env bash\necho "fail!"\nexit 1\n')
|
||||
make_executable(f.name)
|
||||
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
# We should get a failure from the legacy hook
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 1
|
||||
assert FAIL_OLD_HOOK.match(output)
|
||||
|
||||
|
||||
def test_install_overwrite_no_existing_hooks(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
assert not install(
|
||||
C.CONFIG_FILE, store, hook_types=['pre-commit'], overwrite=True,
|
||||
)
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_install_overwrite(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
_write_legacy_hook(path)
|
||||
assert not install(
|
||||
C.CONFIG_FILE, store, hook_types=['pre-commit'], overwrite=True,
|
||||
)
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_uninstall_restores_legacy_hooks(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
_write_legacy_hook(path)
|
||||
|
||||
# Now install and uninstall pre-commit
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
assert uninstall(hook_types=['pre-commit']) == 0
|
||||
|
||||
# Make sure we installed the "old" hook correctly
|
||||
ret, output = _get_commit_output(tempdir_factory, touch_file='baz')
|
||||
assert ret == 0
|
||||
assert EXISTING_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_replace_old_commit_script(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
# Install a script that looks like our old script
|
||||
pre_commit_contents = resource_text('hook-tmpl')
|
||||
new_contents = pre_commit_contents.replace(
|
||||
CURRENT_HASH, PRIOR_HASHES[-1],
|
||||
)
|
||||
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-commit'), 'w') as f:
|
||||
f.write(new_contents)
|
||||
make_executable(f.name)
|
||||
|
||||
# Install normally
|
||||
assert install(C.CONFIG_FILE, store, hook_types=['pre-commit']) == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def test_uninstall_doesnt_remove_not_our_hooks(in_git_dir):
|
||||
pre_commit = in_git_dir.join('.git/hooks').ensure_dir().join('pre-commit')
|
||||
pre_commit.write('#!/usr/bin/env bash\necho 1\n')
|
||||
make_executable(pre_commit.strpath)
|
||||
|
||||
assert uninstall(hook_types=['pre-commit']) == 0
|
||||
|
||||
assert pre_commit.exists()
|
||||
|
||||
|
||||
PRE_INSTALLED = re.compile(
|
||||
fr'Bash hook\.+Passed\n'
|
||||
fr'\[master [a-f0-9]{{7}}\] commit!\n'
|
||||
fr'{FILES_CHANGED}'
|
||||
fr' create mode 100644 foo\n$',
|
||||
)
|
||||
|
||||
|
||||
def test_installs_hooks_with_hooks_True(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'], hooks=True)
|
||||
ret, output = _get_commit_output(
|
||||
tempdir_factory, pre_commit_home=store.directory,
|
||||
)
|
||||
|
||||
assert ret == 0
|
||||
assert PRE_INSTALLED.match(output)
|
||||
|
||||
|
||||
def test_install_hooks_command(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
install_hooks(C.CONFIG_FILE, store)
|
||||
ret, output = _get_commit_output(
|
||||
tempdir_factory, pre_commit_home=store.directory,
|
||||
)
|
||||
|
||||
assert ret == 0
|
||||
assert PRE_INSTALLED.match(output)
|
||||
|
||||
|
||||
def test_installed_from_venv(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-commit'])
|
||||
# No environment so pre-commit is not on the path when running!
|
||||
# Should still pick up the python from when we installed
|
||||
ret, output = _get_commit_output(
|
||||
tempdir_factory,
|
||||
env={
|
||||
'HOME': os.path.expanduser('~'),
|
||||
'PATH': _path_without_us(),
|
||||
'TERM': os.environ.get('TERM', ''),
|
||||
# Windows needs this to import `random`
|
||||
'SYSTEMROOT': os.environ.get('SYSTEMROOT', ''),
|
||||
# Windows needs this to resolve executables
|
||||
'PATHEXT': os.environ.get('PATHEXT', ''),
|
||||
# Git needs this to make a commit
|
||||
'GIT_AUTHOR_NAME': os.environ['GIT_AUTHOR_NAME'],
|
||||
'GIT_COMMITTER_NAME': os.environ['GIT_COMMITTER_NAME'],
|
||||
'GIT_AUTHOR_EMAIL': os.environ['GIT_AUTHOR_EMAIL'],
|
||||
'GIT_COMMITTER_EMAIL': os.environ['GIT_COMMITTER_EMAIL'],
|
||||
},
|
||||
)
|
||||
assert ret == 0
|
||||
assert NORMAL_PRE_COMMIT_RUN.match(output)
|
||||
|
||||
|
||||
def _get_push_output(tempdir_factory, remote='origin', opts=()):
|
||||
return cmd_output_mocked_pre_commit_home(
|
||||
'git', 'push', remote, 'HEAD:new_branch', *opts,
|
||||
tempdir_factory=tempdir_factory,
|
||||
retcode=None,
|
||||
)[:2]
|
||||
|
||||
|
||||
def test_pre_push_integration_failing(tempdir_factory, store):
|
||||
upstream = make_consuming_repo(tempdir_factory, 'failing_hook_repo')
|
||||
path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
# commit succeeds because pre-commit is only installed for pre-push
|
||||
assert _get_commit_output(tempdir_factory)[0] == 0
|
||||
assert _get_commit_output(tempdir_factory, touch_file='zzz')[0] == 0
|
||||
|
||||
retc, output = _get_push_output(tempdir_factory)
|
||||
assert retc == 1
|
||||
assert 'Failing hook' in output
|
||||
assert 'Failed' in output
|
||||
assert 'foo zzz' in output # both filenames should be printed
|
||||
assert 'hook id: failing_hook' in output
|
||||
|
||||
|
||||
def test_pre_push_integration_accepted(tempdir_factory, store):
|
||||
upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
assert _get_commit_output(tempdir_factory)[0] == 0
|
||||
|
||||
retc, output = _get_push_output(tempdir_factory)
|
||||
assert retc == 0
|
||||
assert 'Bash hook' in output
|
||||
assert 'Passed' in output
|
||||
|
||||
|
||||
def test_pre_push_force_push_without_fetch(tempdir_factory, store):
|
||||
upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
path1 = tempdir_factory.get()
|
||||
path2 = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path1)
|
||||
cmd_output('git', 'clone', upstream, path2)
|
||||
with cwd(path1):
|
||||
assert _get_commit_output(tempdir_factory)[0] == 0
|
||||
assert _get_push_output(tempdir_factory)[0] == 0
|
||||
|
||||
with cwd(path2):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
assert _get_commit_output(tempdir_factory, msg='force!')[0] == 0
|
||||
|
||||
retc, output = _get_push_output(tempdir_factory, opts=('--force',))
|
||||
assert retc == 0
|
||||
assert 'Bash hook' in output
|
||||
assert 'Passed' in output
|
||||
|
||||
|
||||
def test_pre_push_new_upstream(tempdir_factory, store):
|
||||
upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
upstream2 = git_dir(tempdir_factory)
|
||||
path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
assert _get_commit_output(tempdir_factory)[0] == 0
|
||||
|
||||
cmd_output('git', 'remote', 'rename', 'origin', 'upstream')
|
||||
cmd_output('git', 'remote', 'add', 'origin', upstream2)
|
||||
retc, output = _get_push_output(tempdir_factory)
|
||||
assert retc == 0
|
||||
assert 'Bash hook' in output
|
||||
assert 'Passed' in output
|
||||
|
||||
|
||||
def test_pre_push_environment_variables(tempdir_factory, store):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'print-remote-info',
|
||||
'name': 'print remote info',
|
||||
'entry': 'bash -c "echo remote: $PRE_COMMIT_REMOTE_NAME"',
|
||||
'language': 'system',
|
||||
'verbose': True,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
upstream = git_dir(tempdir_factory)
|
||||
clone = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, clone)
|
||||
add_config_to_repo(clone, config)
|
||||
with cwd(clone):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
|
||||
cmd_output('git', 'remote', 'rename', 'origin', 'origin2')
|
||||
retc, output = _get_push_output(tempdir_factory, remote='origin2')
|
||||
assert retc == 0
|
||||
assert '\nremote: origin2\n' in output
|
||||
|
||||
|
||||
def test_pre_push_integration_empty_push(tempdir_factory, store):
|
||||
upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
_get_push_output(tempdir_factory)
|
||||
retc, output = _get_push_output(tempdir_factory)
|
||||
assert output == 'Everything up-to-date\n'
|
||||
assert retc == 0
|
||||
|
||||
|
||||
def test_pre_push_legacy(tempdir_factory, store):
|
||||
upstream = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', upstream, path)
|
||||
with cwd(path):
|
||||
os.makedirs(os.path.join(path, '.git/hooks'), exist_ok=True)
|
||||
with open(os.path.join(path, '.git/hooks/pre-push'), 'w') as f:
|
||||
f.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
'read lr ls rr rs\n'
|
||||
'test -n "$lr" -a -n "$ls" -a -n "$rr" -a -n "$rs"\n'
|
||||
'echo legacy\n',
|
||||
)
|
||||
make_executable(f.name)
|
||||
|
||||
install(C.CONFIG_FILE, store, hook_types=['pre-push'])
|
||||
assert _get_commit_output(tempdir_factory)[0] == 0
|
||||
|
||||
retc, output = _get_push_output(tempdir_factory)
|
||||
assert retc == 0
|
||||
first_line, _, third_line = output.splitlines()[:3]
|
||||
assert first_line == 'legacy'
|
||||
assert third_line.startswith('Bash hook')
|
||||
assert third_line.endswith('Passed')
|
||||
|
||||
|
||||
def test_commit_msg_integration_failing(
|
||||
commit_msg_repo, tempdir_factory, store,
|
||||
):
|
||||
install(C.CONFIG_FILE, store, hook_types=['commit-msg'])
|
||||
retc, out = _get_commit_output(tempdir_factory)
|
||||
assert retc == 1
|
||||
assert out == '''\
|
||||
Must have "Signed off by:"...............................................Failed
|
||||
- hook id: must-have-signoff
|
||||
- exit code: 1
|
||||
'''
|
||||
|
||||
|
||||
def test_commit_msg_integration_passing(
|
||||
commit_msg_repo, tempdir_factory, store,
|
||||
):
|
||||
install(C.CONFIG_FILE, store, hook_types=['commit-msg'])
|
||||
msg = 'Hi\nSigned off by: me, lol'
|
||||
retc, out = _get_commit_output(tempdir_factory, msg=msg)
|
||||
assert retc == 0
|
||||
first_line = out.splitlines()[0]
|
||||
assert first_line.startswith('Must have "Signed off by:"...')
|
||||
assert first_line.endswith('...Passed')
|
||||
|
||||
|
||||
def test_commit_msg_legacy(commit_msg_repo, tempdir_factory, store):
|
||||
hook_path = os.path.join(commit_msg_repo, '.git/hooks/commit-msg')
|
||||
os.makedirs(os.path.dirname(hook_path), exist_ok=True)
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
hook_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
'test -e "$1"\n'
|
||||
'echo legacy\n',
|
||||
)
|
||||
make_executable(hook_path)
|
||||
|
||||
install(C.CONFIG_FILE, store, hook_types=['commit-msg'])
|
||||
|
||||
msg = 'Hi\nSigned off by: asottile'
|
||||
retc, out = _get_commit_output(tempdir_factory, msg=msg)
|
||||
assert retc == 0
|
||||
first_line, second_line = out.splitlines()[:2]
|
||||
assert first_line == 'legacy'
|
||||
assert second_line.startswith('Must have "Signed off by:"...')
|
||||
|
||||
|
||||
def test_post_checkout_integration(tempdir_factory, store):
|
||||
path = git_dir(tempdir_factory)
|
||||
config = [
|
||||
{
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'post-checkout',
|
||||
'name': 'Post checkout',
|
||||
'entry': 'bash -c "echo ${PRE_COMMIT_TO_REF}"',
|
||||
'language': 'system',
|
||||
'always_run': True,
|
||||
'verbose': True,
|
||||
'stages': ['post-checkout'],
|
||||
}],
|
||||
},
|
||||
{'repo': 'meta', 'hooks': [{'id': 'identity'}]},
|
||||
]
|
||||
write_config(path, config)
|
||||
with cwd(path):
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit()
|
||||
|
||||
# add a file only on `feature`, it should not be passed to hooks
|
||||
cmd_output('git', 'checkout', '-b', 'feature')
|
||||
open('some_file', 'a').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit()
|
||||
cmd_output('git', 'checkout', 'master')
|
||||
|
||||
install(C.CONFIG_FILE, store, hook_types=['post-checkout'])
|
||||
retc, _, stderr = cmd_output('git', 'checkout', 'feature')
|
||||
assert stderr is not None
|
||||
assert retc == 0
|
||||
assert git.head_rev(path) in stderr
|
||||
assert 'some_file' not in stderr
|
||||
|
||||
|
||||
def test_prepare_commit_msg_integration_failing(
|
||||
failing_prepare_commit_msg_repo, tempdir_factory, store,
|
||||
):
|
||||
install(C.CONFIG_FILE, store, hook_types=['prepare-commit-msg'])
|
||||
retc, out = _get_commit_output(tempdir_factory)
|
||||
assert retc == 1
|
||||
assert out == '''\
|
||||
Add "Signed off by:".....................................................Failed
|
||||
- hook id: add-signoff
|
||||
- exit code: 1
|
||||
'''
|
||||
|
||||
|
||||
def test_prepare_commit_msg_integration_passing(
|
||||
prepare_commit_msg_repo, tempdir_factory, store,
|
||||
):
|
||||
install(C.CONFIG_FILE, store, hook_types=['prepare-commit-msg'])
|
||||
retc, out = _get_commit_output(tempdir_factory, msg='Hi')
|
||||
assert retc == 0
|
||||
first_line = out.splitlines()[0]
|
||||
assert first_line.startswith('Add "Signed off by:"...')
|
||||
assert first_line.endswith('...Passed')
|
||||
commit_msg_path = os.path.join(
|
||||
prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
|
||||
)
|
||||
with open(commit_msg_path) as f:
|
||||
assert 'Signed off by: ' in f.read()
|
||||
|
||||
|
||||
def test_prepare_commit_msg_legacy(
|
||||
prepare_commit_msg_repo, tempdir_factory, store,
|
||||
):
|
||||
hook_path = os.path.join(
|
||||
prepare_commit_msg_repo, '.git/hooks/prepare-commit-msg',
|
||||
)
|
||||
os.makedirs(os.path.dirname(hook_path), exist_ok=True)
|
||||
with open(hook_path, 'w') as hook_file:
|
||||
hook_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
'test -e "$1"\n'
|
||||
'echo legacy\n',
|
||||
)
|
||||
make_executable(hook_path)
|
||||
|
||||
install(C.CONFIG_FILE, store, hook_types=['prepare-commit-msg'])
|
||||
|
||||
retc, out = _get_commit_output(tempdir_factory, msg='Hi')
|
||||
assert retc == 0
|
||||
first_line, second_line = out.splitlines()[:2]
|
||||
assert first_line == 'legacy'
|
||||
assert second_line.startswith('Add "Signed off by:"...')
|
||||
commit_msg_path = os.path.join(
|
||||
prepare_commit_msg_repo, '.git/COMMIT_EDITMSG',
|
||||
)
|
||||
with open(commit_msg_path) as f:
|
||||
assert 'Signed off by: ' in f.read()
|
||||
|
||||
|
||||
def test_pre_merge_commit_integration(tempdir_factory, store):
|
||||
expected = re.compile(
|
||||
r'^\[INFO\] Initializing environment for .+\n'
|
||||
r'Bash hook\.+Passed\n'
|
||||
r"Merge made by the 'recursive' strategy.\n"
|
||||
r' foo \| 0\n'
|
||||
r' 1 file changed, 0 insertions\(\+\), 0 deletions\(-\)\n'
|
||||
r' create mode 100644 foo\n$',
|
||||
)
|
||||
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
ret = install(C.CONFIG_FILE, store, hook_types=['pre-merge-commit'])
|
||||
assert ret == 0
|
||||
|
||||
cmd_output('git', 'checkout', 'master', '-b', 'feature')
|
||||
_get_commit_output(tempdir_factory)
|
||||
cmd_output('git', 'checkout', 'master')
|
||||
ret, output, _ = cmd_output_mocked_pre_commit_home(
|
||||
'git', 'merge', '--no-ff', '--no-edit', 'feature',
|
||||
tempdir_factory=tempdir_factory,
|
||||
)
|
||||
assert ret == 0
|
||||
assert expected.match(output)
|
||||
|
||||
|
||||
def test_install_disallow_missing_config(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
remove_config_from_repo(path)
|
||||
ret = install(
|
||||
C.CONFIG_FILE, store, hook_types=['pre-commit'],
|
||||
overwrite=True, skip_on_missing_config=False,
|
||||
)
|
||||
assert ret == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 1
|
||||
|
||||
|
||||
def test_install_allow_missing_config(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
remove_config_from_repo(path)
|
||||
ret = install(
|
||||
C.CONFIG_FILE, store, hook_types=['pre-commit'],
|
||||
overwrite=True, skip_on_missing_config=True,
|
||||
)
|
||||
assert ret == 0
|
||||
|
||||
ret, output = _get_commit_output(tempdir_factory)
|
||||
assert ret == 0
|
||||
expected = (
|
||||
'`.pre-commit-config.yaml` config file not found. '
|
||||
'Skipping `pre-commit`.'
|
||||
)
|
||||
assert expected in output
|
||||
|
||||
|
||||
def test_install_temporarily_allow_mising_config(tempdir_factory, store):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with cwd(path):
|
||||
remove_config_from_repo(path)
|
||||
ret = install(
|
||||
C.CONFIG_FILE, store, hook_types=['pre-commit'],
|
||||
overwrite=True, skip_on_missing_config=False,
|
||||
)
|
||||
assert ret == 0
|
||||
|
||||
env = dict(os.environ, PRE_COMMIT_ALLOW_NO_CONFIG='1')
|
||||
ret, output = _get_commit_output(tempdir_factory, env=env)
|
||||
assert ret == 0
|
||||
expected = (
|
||||
'`.pre-commit-config.yaml` config file not found. '
|
||||
'Skipping `pre-commit`.'
|
||||
)
|
||||
assert expected in output
|
156
tests/commands/migrate_config_test.py
Normal file
156
tests/commands/migrate_config_test.py
Normal file
|
@ -0,0 +1,156 @@
|
|||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.commands.migrate_config import _indent
|
||||
from pre_commit.commands.migrate_config import migrate_config
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('s', 'expected'),
|
||||
(
|
||||
('', ''),
|
||||
('a', ' a'),
|
||||
('foo\nbar', ' foo\n bar'),
|
||||
('foo\n\nbar\n', ' foo\n\n bar\n'),
|
||||
('\n\n\n', '\n\n\n'),
|
||||
),
|
||||
)
|
||||
def test_indent(s, expected):
|
||||
assert _indent(s) == expected
|
||||
|
||||
|
||||
def test_migrate_config_normal_format(tmpdir, capsys):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n',
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
assert not migrate_config(C.CONFIG_FILE)
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == 'Configuration has been migrated.\n'
|
||||
contents = cfg.read()
|
||||
assert contents == (
|
||||
'repos:\n'
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n'
|
||||
)
|
||||
|
||||
|
||||
def test_migrate_config_document_marker(tmpdir):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(
|
||||
'# comment\n'
|
||||
'\n'
|
||||
'---\n'
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n',
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
assert not migrate_config(C.CONFIG_FILE)
|
||||
contents = cfg.read()
|
||||
assert contents == (
|
||||
'# comment\n'
|
||||
'\n'
|
||||
'---\n'
|
||||
'repos:\n'
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n'
|
||||
)
|
||||
|
||||
|
||||
def test_migrate_config_list_literal(tmpdir):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(
|
||||
'[{\n'
|
||||
' repo: local,\n'
|
||||
' hooks: [{\n'
|
||||
' id: foo, name: foo, entry: ./bin/foo.sh,\n'
|
||||
' language: script,\n'
|
||||
' }]\n'
|
||||
'}]',
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
assert not migrate_config(C.CONFIG_FILE)
|
||||
contents = cfg.read()
|
||||
assert contents == (
|
||||
'repos:\n'
|
||||
' [{\n'
|
||||
' repo: local,\n'
|
||||
' hooks: [{\n'
|
||||
' id: foo, name: foo, entry: ./bin/foo.sh,\n'
|
||||
' language: script,\n'
|
||||
' }]\n'
|
||||
' }]'
|
||||
)
|
||||
|
||||
|
||||
def test_already_migrated_configuration_noop(tmpdir, capsys):
|
||||
contents = (
|
||||
'repos:\n'
|
||||
'- repo: local\n'
|
||||
' hooks:\n'
|
||||
' - id: foo\n'
|
||||
' name: foo\n'
|
||||
' entry: ./bin/foo.sh\n'
|
||||
' language: script\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
with tmpdir.as_cwd():
|
||||
assert not migrate_config(C.CONFIG_FILE)
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == 'Configuration is already migrated.\n'
|
||||
assert cfg.read() == contents
|
||||
|
||||
|
||||
def test_migrate_config_sha_to_rev(tmpdir):
|
||||
contents = (
|
||||
'repos:\n'
|
||||
'- repo: https://github.com/pre-commit/pre-commit-hooks\n'
|
||||
' sha: v1.2.0\n'
|
||||
' hooks: []\n'
|
||||
'- repo: https://github.com/pre-commit/pre-commit-hooks\n'
|
||||
' sha: v1.2.0\n'
|
||||
' hooks: []\n'
|
||||
)
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
with tmpdir.as_cwd():
|
||||
assert not migrate_config(C.CONFIG_FILE)
|
||||
contents = cfg.read()
|
||||
assert contents == (
|
||||
'repos:\n'
|
||||
'- repo: https://github.com/pre-commit/pre-commit-hooks\n'
|
||||
' rev: v1.2.0\n'
|
||||
' hooks: []\n'
|
||||
'- repo: https://github.com/pre-commit/pre-commit-hooks\n'
|
||||
' rev: v1.2.0\n'
|
||||
' hooks: []\n'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('contents', ('', '\n'))
|
||||
def test_empty_configuration_file_user_error(tmpdir, contents):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(contents)
|
||||
with tmpdir.as_cwd():
|
||||
assert not migrate_config(C.CONFIG_FILE)
|
||||
# even though the config is invalid, this should be a noop
|
||||
assert cfg.read() == contents
|
1012
tests/commands/run_test.py
Normal file
1012
tests/commands/run_test.py
Normal file
File diff suppressed because it is too large
Load diff
19
tests/commands/sample_config_test.py
Normal file
19
tests/commands/sample_config_test.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from pre_commit.commands.sample_config import sample_config
|
||||
|
||||
|
||||
def test_sample_config(capsys):
|
||||
ret = sample_config()
|
||||
assert ret == 0
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == '''\
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
'''
|
151
tests/commands/try_repo_test.py
Normal file
151
tests/commands/try_repo_test.py
Normal file
|
@ -0,0 +1,151 @@
|
|||
import os.path
|
||||
import re
|
||||
import time
|
||||
from unittest import mock
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.commands.try_repo import try_repo
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
from testing.fixtures import git_dir
|
||||
from testing.fixtures import make_repo
|
||||
from testing.fixtures import modify_manifest
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
from testing.util import run_opts
|
||||
|
||||
|
||||
def try_repo_opts(repo, ref=None, **kwargs):
|
||||
return auto_namedtuple(repo=repo, ref=ref, **run_opts(**kwargs)._asdict())
|
||||
|
||||
|
||||
def _get_out(cap_out):
|
||||
out = re.sub(r'\[INFO\].+\n', '', cap_out.get())
|
||||
start, using_config, config, rest = out.split(f'{"=" * 79}\n')
|
||||
assert using_config == 'Using config:\n'
|
||||
return start, config, rest
|
||||
|
||||
|
||||
def _add_test_file():
|
||||
open('test-file', 'a').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
|
||||
|
||||
def _run_try_repo(tempdir_factory, **kwargs):
|
||||
repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
_add_test_file()
|
||||
assert not try_repo(try_repo_opts(repo, **kwargs))
|
||||
|
||||
|
||||
def test_try_repo_repo_only(cap_out, tempdir_factory):
|
||||
with mock.patch.object(time, 'time', return_value=0.0):
|
||||
_run_try_repo(tempdir_factory, verbose=True)
|
||||
start, config, rest = _get_out(cap_out)
|
||||
assert start == ''
|
||||
assert re.match(
|
||||
'^repos:\n'
|
||||
'- repo: .+\n'
|
||||
' rev: .+\n'
|
||||
' hooks:\n'
|
||||
' - id: bash_hook\n'
|
||||
' - id: bash_hook2\n'
|
||||
' - id: bash_hook3\n$',
|
||||
config,
|
||||
)
|
||||
assert rest == '''\
|
||||
Bash hook............................................(no files to check)Skipped
|
||||
- hook id: bash_hook
|
||||
Bash hook................................................................Passed
|
||||
- hook id: bash_hook2
|
||||
- duration: 0s
|
||||
|
||||
test-file
|
||||
|
||||
Bash hook............................................(no files to check)Skipped
|
||||
- hook id: bash_hook3
|
||||
'''
|
||||
|
||||
|
||||
def test_try_repo_with_specific_hook(cap_out, tempdir_factory):
|
||||
_run_try_repo(tempdir_factory, hook='bash_hook', verbose=True)
|
||||
start, config, rest = _get_out(cap_out)
|
||||
assert start == ''
|
||||
assert re.match(
|
||||
'^repos:\n'
|
||||
'- repo: .+\n'
|
||||
' rev: .+\n'
|
||||
' hooks:\n'
|
||||
' - id: bash_hook\n$',
|
||||
config,
|
||||
)
|
||||
assert rest == '''\
|
||||
Bash hook............................................(no files to check)Skipped
|
||||
- hook id: bash_hook
|
||||
'''
|
||||
|
||||
|
||||
def test_try_repo_relative_path(cap_out, tempdir_factory):
|
||||
repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
_add_test_file()
|
||||
relative_repo = os.path.relpath(repo, '.')
|
||||
# previously crashed on cloning a relative path
|
||||
assert not try_repo(try_repo_opts(relative_repo, hook='bash_hook'))
|
||||
|
||||
|
||||
def test_try_repo_bare_repo(cap_out, tempdir_factory):
|
||||
repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
_add_test_file()
|
||||
bare_repo = os.path.join(repo, '.git')
|
||||
# previously crashed attempting modification changes
|
||||
assert not try_repo(try_repo_opts(bare_repo, hook='bash_hook'))
|
||||
|
||||
|
||||
def test_try_repo_specific_revision(cap_out, tempdir_factory):
|
||||
repo = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
ref = git.head_rev(repo)
|
||||
git_commit(cwd=repo)
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
_add_test_file()
|
||||
assert not try_repo(try_repo_opts(repo, ref=ref))
|
||||
|
||||
_, config, _ = _get_out(cap_out)
|
||||
assert ref in config
|
||||
|
||||
|
||||
def test_try_repo_uncommitted_changes(cap_out, tempdir_factory):
|
||||
repo = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
# make an uncommitted change
|
||||
with modify_manifest(repo, commit=False) as manifest:
|
||||
manifest[0]['name'] = 'modified name!'
|
||||
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
open('test-fie', 'a').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
assert not try_repo(try_repo_opts(repo))
|
||||
|
||||
start, config, rest = _get_out(cap_out)
|
||||
assert start == '[WARNING] Creating temporary repo with uncommitted changes...\n' # noqa: E501
|
||||
assert re.match(
|
||||
'^repos:\n'
|
||||
'- repo: .+shadow-repo\n'
|
||||
' rev: .+\n'
|
||||
' hooks:\n'
|
||||
' - id: bash_hook\n$',
|
||||
config,
|
||||
)
|
||||
assert rest == 'modified name!...........................................................Passed\n' # noqa: E501
|
||||
|
||||
|
||||
def test_try_repo_staged_changes(tempdir_factory):
|
||||
repo = make_repo(tempdir_factory, 'modified_file_returns_zero_repo')
|
||||
|
||||
with cwd(repo):
|
||||
open('staged-file', 'a').close()
|
||||
open('second-staged-file', 'a').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
|
||||
with cwd(git_dir(tempdir_factory)):
|
||||
assert not try_repo(try_repo_opts(repo, hook='bash_hook'))
|
277
tests/conftest.py
Normal file
277
tests/conftest.py
Normal file
|
@ -0,0 +1,277 @@
|
|||
import functools
|
||||
import io
|
||||
import logging
|
||||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import output
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.logging_handler import logging_handler
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import make_executable
|
||||
from testing.fixtures import git_dir
|
||||
from testing.fixtures import make_consuming_repo
|
||||
from testing.fixtures import write_config
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def no_warnings(recwarn):
|
||||
yield
|
||||
warnings = []
|
||||
for warning in recwarn: # pragma: no cover
|
||||
message = str(warning.message)
|
||||
# ImportWarning: Not importing directory '...' missing __init__(.py)
|
||||
if not (
|
||||
isinstance(warning.message, ImportWarning) and
|
||||
message.startswith('Not importing directory ') and
|
||||
' missing __init__' in message
|
||||
):
|
||||
warnings.append(
|
||||
f'{warning.filename}:{warning.lineno} {message}',
|
||||
)
|
||||
assert not warnings
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tempdir_factory(tmpdir):
|
||||
class TmpdirFactory:
|
||||
def __init__(self):
|
||||
self.tmpdir_count = 0
|
||||
|
||||
def get(self):
|
||||
path = tmpdir.join(str(self.tmpdir_count)).strpath
|
||||
self.tmpdir_count += 1
|
||||
os.mkdir(path)
|
||||
return path
|
||||
|
||||
yield TmpdirFactory()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_tmpdir(tempdir_factory):
|
||||
path = tempdir_factory.get()
|
||||
with cwd(path):
|
||||
yield path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_git_dir(tmpdir):
|
||||
repo = tmpdir.join('repo').ensure_dir()
|
||||
with repo.as_cwd():
|
||||
cmd_output('git', 'init')
|
||||
yield repo
|
||||
|
||||
|
||||
def _make_conflict():
|
||||
cmd_output('git', 'checkout', 'origin/master', '-b', 'foo')
|
||||
with open('conflict_file', 'w') as conflict_file:
|
||||
conflict_file.write('herp\nderp\n')
|
||||
cmd_output('git', 'add', 'conflict_file')
|
||||
with open('foo_only_file', 'w') as foo_only_file:
|
||||
foo_only_file.write('foo')
|
||||
cmd_output('git', 'add', 'foo_only_file')
|
||||
git_commit(msg=_make_conflict.__name__)
|
||||
cmd_output('git', 'checkout', 'origin/master', '-b', 'bar')
|
||||
with open('conflict_file', 'w') as conflict_file:
|
||||
conflict_file.write('harp\nddrp\n')
|
||||
cmd_output('git', 'add', 'conflict_file')
|
||||
with open('bar_only_file', 'w') as bar_only_file:
|
||||
bar_only_file.write('bar')
|
||||
cmd_output('git', 'add', 'bar_only_file')
|
||||
git_commit(msg=_make_conflict.__name__)
|
||||
cmd_output('git', 'merge', 'foo', retcode=None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_merge_conflict(tempdir_factory):
|
||||
path = make_consuming_repo(tempdir_factory, 'script_hooks_repo')
|
||||
open(os.path.join(path, 'dummy'), 'a').close()
|
||||
cmd_output('git', 'add', 'dummy', cwd=path)
|
||||
git_commit(msg=in_merge_conflict.__name__, cwd=path)
|
||||
|
||||
conflict_path = tempdir_factory.get()
|
||||
cmd_output('git', 'clone', path, conflict_path)
|
||||
with cwd(conflict_path):
|
||||
_make_conflict()
|
||||
yield os.path.join(conflict_path)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_conflicting_submodule(tempdir_factory):
|
||||
git_dir_1 = git_dir(tempdir_factory)
|
||||
git_dir_2 = git_dir(tempdir_factory)
|
||||
git_commit(msg=in_conflicting_submodule.__name__, cwd=git_dir_2)
|
||||
cmd_output('git', 'submodule', 'add', git_dir_2, 'sub', cwd=git_dir_1)
|
||||
with cwd(os.path.join(git_dir_1, 'sub')):
|
||||
_make_conflict()
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def commit_msg_repo(tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'must-have-signoff',
|
||||
'name': 'Must have "Signed off by:"',
|
||||
'entry': 'grep -q "Signed off by:"',
|
||||
'language': 'system',
|
||||
'stages': ['commit-msg'],
|
||||
}],
|
||||
}
|
||||
write_config(path, config)
|
||||
with cwd(path):
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit(msg=commit_msg_repo.__name__)
|
||||
yield path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def prepare_commit_msg_repo(tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
script_name = 'add_sign_off.sh'
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'add-signoff',
|
||||
'name': 'Add "Signed off by:"',
|
||||
'entry': f'./{script_name}',
|
||||
'language': 'script',
|
||||
'stages': ['prepare-commit-msg'],
|
||||
}],
|
||||
}
|
||||
write_config(path, config)
|
||||
with cwd(path):
|
||||
with open(script_name, 'w') as script_file:
|
||||
script_file.write(
|
||||
'#!/usr/bin/env bash\n'
|
||||
'set -eu\n'
|
||||
'echo "\nSigned off by: " >> "$1"\n',
|
||||
)
|
||||
make_executable(script_name)
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit(msg=prepare_commit_msg_repo.__name__)
|
||||
yield path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failing_prepare_commit_msg_repo(tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'add-signoff',
|
||||
'name': 'Add "Signed off by:"',
|
||||
'entry': 'bash -c "exit 1"',
|
||||
'language': 'system',
|
||||
'stages': ['prepare-commit-msg'],
|
||||
}],
|
||||
}
|
||||
write_config(path, config)
|
||||
with cwd(path):
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit(msg=failing_prepare_commit_msg_repo.__name__)
|
||||
yield path
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope='session')
|
||||
def dont_write_to_home_directory():
|
||||
"""pre_commit.store.Store will by default write to the home directory
|
||||
We'll mock out `Store.get_default_directory` to raise invariantly so we
|
||||
don't construct a `Store` object that writes to our home directory.
|
||||
"""
|
||||
class YouForgotToExplicitlyChooseAStoreDirectory(AssertionError):
|
||||
pass
|
||||
|
||||
with mock.patch.object(
|
||||
Store,
|
||||
'get_default_directory',
|
||||
side_effect=YouForgotToExplicitlyChooseAStoreDirectory,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope='session')
|
||||
def configure_logging():
|
||||
with logging_handler(use_color=False):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_store_dir(tempdir_factory):
|
||||
tmpdir = tempdir_factory.get()
|
||||
with mock.patch.object(
|
||||
Store,
|
||||
'get_default_directory',
|
||||
return_value=tmpdir,
|
||||
):
|
||||
yield tmpdir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def store(tempdir_factory):
|
||||
yield Store(os.path.join(tempdir_factory.get(), '.pre-commit'))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def log_info_mock():
|
||||
with mock.patch.object(logging.getLogger('pre_commit'), 'info') as mck:
|
||||
yield mck
|
||||
|
||||
|
||||
class FakeStream:
|
||||
def __init__(self):
|
||||
self.data = io.BytesIO()
|
||||
|
||||
def write(self, s):
|
||||
self.data.write(s)
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
|
||||
class Fixture:
|
||||
def __init__(self, stream):
|
||||
self._stream = stream
|
||||
|
||||
def get_bytes(self):
|
||||
"""Get the output as-if no encoding occurred"""
|
||||
data = self._stream.data.getvalue()
|
||||
self._stream.data.seek(0)
|
||||
self._stream.data.truncate()
|
||||
return data.replace(b'\r\n', b'\n')
|
||||
|
||||
def get(self):
|
||||
"""Get the output assuming it was written as UTF-8 bytes"""
|
||||
return self.get_bytes().decode()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cap_out():
|
||||
stream = FakeStream()
|
||||
write = functools.partial(output.write, stream=stream)
|
||||
write_line_b = functools.partial(output.write_line_b, stream=stream)
|
||||
with mock.patch.multiple(output, write=write, write_line_b=write_line_b):
|
||||
yield Fixture(stream)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_log_handler():
|
||||
handler = mock.Mock(level=logging.INFO)
|
||||
logger = logging.getLogger('pre_commit')
|
||||
logger.addHandler(handler)
|
||||
yield handler
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', autouse=True)
|
||||
def set_git_templatedir(tmpdir_factory):
|
||||
tdir = str(tmpdir_factory.mktemp('git_template_dir'))
|
||||
with envcontext((('GIT_TEMPLATE_DIR', tdir),)):
|
||||
yield
|
101
tests/envcontext_test.py
Normal file
101
tests/envcontext_test.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
import os
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import UNSET
|
||||
from pre_commit.envcontext import Var
|
||||
|
||||
|
||||
def _test(*, before, patch, expected):
|
||||
env = before.copy()
|
||||
with envcontext(patch, _env=env):
|
||||
assert env == expected
|
||||
assert env == before
|
||||
|
||||
|
||||
def test_trivial():
|
||||
_test(before={}, patch={}, expected={})
|
||||
|
||||
|
||||
def test_noop():
|
||||
_test(before={'foo': 'bar'}, patch=(), expected={'foo': 'bar'})
|
||||
|
||||
|
||||
def test_adds():
|
||||
_test(before={}, patch=[('foo', 'bar')], expected={'foo': 'bar'})
|
||||
|
||||
|
||||
def test_overrides():
|
||||
_test(
|
||||
before={'foo': 'baz'},
|
||||
patch=[('foo', 'bar')],
|
||||
expected={'foo': 'bar'},
|
||||
)
|
||||
|
||||
|
||||
def test_unset_but_nothing_to_unset():
|
||||
_test(before={}, patch=[('foo', UNSET)], expected={})
|
||||
|
||||
|
||||
def test_unset_things_to_remove():
|
||||
_test(
|
||||
before={'PYTHONHOME': ''},
|
||||
patch=[('PYTHONHOME', UNSET)],
|
||||
expected={},
|
||||
)
|
||||
|
||||
|
||||
def test_templated_environment_variable_missing():
|
||||
_test(
|
||||
before={},
|
||||
patch=[('PATH', ('~/bin:', Var('PATH')))],
|
||||
expected={'PATH': '~/bin:'},
|
||||
)
|
||||
|
||||
|
||||
def test_templated_environment_variable_defaults():
|
||||
_test(
|
||||
before={},
|
||||
patch=[('PATH', ('~/bin:', Var('PATH', default='/bin')))],
|
||||
expected={'PATH': '~/bin:/bin'},
|
||||
)
|
||||
|
||||
|
||||
def test_templated_environment_variable_there():
|
||||
_test(
|
||||
before={'PATH': '/usr/local/bin:/usr/bin'},
|
||||
patch=[('PATH', ('~/bin:', Var('PATH')))],
|
||||
expected={'PATH': '~/bin:/usr/local/bin:/usr/bin'},
|
||||
)
|
||||
|
||||
|
||||
def test_templated_environ_sources_from_previous():
|
||||
_test(
|
||||
before={'foo': 'bar'},
|
||||
patch=(
|
||||
('foo', 'baz'),
|
||||
('herp', ('foo: ', Var('foo'))),
|
||||
),
|
||||
expected={'foo': 'baz', 'herp': 'foo: bar'},
|
||||
)
|
||||
|
||||
|
||||
def test_exception_safety():
|
||||
class MyError(RuntimeError):
|
||||
pass
|
||||
|
||||
env = {'hello': 'world'}
|
||||
with pytest.raises(MyError):
|
||||
with envcontext((('foo', 'bar'),), _env=env):
|
||||
raise MyError()
|
||||
assert env == {'hello': 'world'}
|
||||
|
||||
|
||||
def test_integration_os_environ():
|
||||
with mock.patch.dict(os.environ, {'FOO': 'bar'}, clear=True):
|
||||
assert os.environ == {'FOO': 'bar'}
|
||||
with envcontext((('HERP', 'derp'),)):
|
||||
assert os.environ == {'FOO': 'bar', 'HERP': 'derp'}
|
||||
assert os.environ == {'FOO': 'bar'}
|
170
tests/error_handler_test.py
Normal file
170
tests/error_handler_test.py
Normal file
|
@ -0,0 +1,170 @@
|
|||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import error_handler
|
||||
from pre_commit.util import CalledProcessError
|
||||
from testing.util import cmd_output_mocked_pre_commit_home
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mocked_log_and_exit():
|
||||
with mock.patch.object(error_handler, '_log_and_exit') as log_and_exit:
|
||||
yield log_and_exit
|
||||
|
||||
|
||||
def test_error_handler_no_exception(mocked_log_and_exit):
|
||||
with error_handler.error_handler():
|
||||
pass
|
||||
assert mocked_log_and_exit.call_count == 0
|
||||
|
||||
|
||||
def test_error_handler_fatal_error(mocked_log_and_exit):
|
||||
exc = error_handler.FatalError('just a test')
|
||||
with error_handler.error_handler():
|
||||
raise exc
|
||||
|
||||
mocked_log_and_exit.assert_called_once_with(
|
||||
'An error has occurred',
|
||||
exc,
|
||||
# Tested below
|
||||
mock.ANY,
|
||||
)
|
||||
|
||||
assert re.match(
|
||||
r'Traceback \(most recent call last\):\n'
|
||||
r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
|
||||
r' yield\n'
|
||||
r' File ".+tests.error_handler_test.py", line \d+, '
|
||||
r'in test_error_handler_fatal_error\n'
|
||||
r' raise exc\n'
|
||||
r'(pre_commit\.error_handler\.)?FatalError: just a test\n',
|
||||
mocked_log_and_exit.call_args[0][2],
|
||||
)
|
||||
|
||||
|
||||
def test_error_handler_uncaught_error(mocked_log_and_exit):
|
||||
exc = ValueError('another test')
|
||||
with error_handler.error_handler():
|
||||
raise exc
|
||||
|
||||
mocked_log_and_exit.assert_called_once_with(
|
||||
'An unexpected error has occurred',
|
||||
exc,
|
||||
# Tested below
|
||||
mock.ANY,
|
||||
)
|
||||
assert re.match(
|
||||
r'Traceback \(most recent call last\):\n'
|
||||
r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
|
||||
r' yield\n'
|
||||
r' File ".+tests.error_handler_test.py", line \d+, '
|
||||
r'in test_error_handler_uncaught_error\n'
|
||||
r' raise exc\n'
|
||||
r'ValueError: another test\n',
|
||||
mocked_log_and_exit.call_args[0][2],
|
||||
)
|
||||
|
||||
|
||||
def test_error_handler_keyboardinterrupt(mocked_log_and_exit):
|
||||
exc = KeyboardInterrupt()
|
||||
with error_handler.error_handler():
|
||||
raise exc
|
||||
|
||||
mocked_log_and_exit.assert_called_once_with(
|
||||
'Interrupted (^C)',
|
||||
exc,
|
||||
# Tested below
|
||||
mock.ANY,
|
||||
)
|
||||
assert re.match(
|
||||
r'Traceback \(most recent call last\):\n'
|
||||
r' File ".+pre_commit.error_handler.py", line \d+, in error_handler\n'
|
||||
r' yield\n'
|
||||
r' File ".+tests.error_handler_test.py", line \d+, '
|
||||
r'in test_error_handler_keyboardinterrupt\n'
|
||||
r' raise exc\n'
|
||||
r'KeyboardInterrupt\n',
|
||||
mocked_log_and_exit.call_args[0][2],
|
||||
)
|
||||
|
||||
|
||||
def test_log_and_exit(cap_out, mock_store_dir):
|
||||
with pytest.raises(SystemExit):
|
||||
error_handler._log_and_exit(
|
||||
'msg', error_handler.FatalError('hai'), "I'm a stacktrace",
|
||||
)
|
||||
|
||||
printed = cap_out.get()
|
||||
log_file = os.path.join(mock_store_dir, 'pre-commit.log')
|
||||
assert printed == f'msg: FatalError: hai\nCheck the log at {log_file}\n'
|
||||
|
||||
assert os.path.exists(log_file)
|
||||
with open(log_file) as f:
|
||||
logged = f.read()
|
||||
expected = (
|
||||
r'^### version information\n'
|
||||
r'\n'
|
||||
r'```\n'
|
||||
r'pre-commit version: \d+\.\d+\.\d+\n'
|
||||
r'sys.version:\n'
|
||||
r'( .*\n)*'
|
||||
r'sys.executable: .*\n'
|
||||
r'os.name: .*\n'
|
||||
r'sys.platform: .*\n'
|
||||
r'```\n'
|
||||
r'\n'
|
||||
r'### error information\n'
|
||||
r'\n'
|
||||
r'```\n'
|
||||
r'msg: FatalError: hai\n'
|
||||
r'```\n'
|
||||
r'\n'
|
||||
r'```\n'
|
||||
r"I'm a stacktrace\n"
|
||||
r'```\n'
|
||||
)
|
||||
assert re.match(expected, logged)
|
||||
|
||||
|
||||
def test_error_handler_non_ascii_exception(mock_store_dir):
|
||||
with pytest.raises(SystemExit):
|
||||
with error_handler.error_handler():
|
||||
raise ValueError('☃')
|
||||
|
||||
|
||||
def test_error_handler_non_utf8_exception(mock_store_dir):
|
||||
with pytest.raises(SystemExit):
|
||||
with error_handler.error_handler():
|
||||
raise CalledProcessError(1, ('exe',), 0, b'error: \xa0\xe1', b'')
|
||||
|
||||
|
||||
def test_error_handler_non_stringable_exception(mock_store_dir):
|
||||
class C(Exception):
|
||||
def __str__(self):
|
||||
raise RuntimeError('not today!')
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
with error_handler.error_handler():
|
||||
raise C()
|
||||
|
||||
|
||||
def test_error_handler_no_tty(tempdir_factory):
|
||||
pre_commit_home = tempdir_factory.get()
|
||||
ret, out, _ = cmd_output_mocked_pre_commit_home(
|
||||
sys.executable,
|
||||
'-c',
|
||||
'from pre_commit.error_handler import error_handler\n'
|
||||
'with error_handler():\n'
|
||||
' raise ValueError("\\u2603")\n',
|
||||
retcode=1,
|
||||
tempdir_factory=tempdir_factory,
|
||||
pre_commit_home=pre_commit_home,
|
||||
)
|
||||
log_file = os.path.join(pre_commit_home, 'pre-commit.log')
|
||||
out_lines = out.splitlines()
|
||||
assert out_lines[-2] == 'An unexpected error has occurred: ValueError: ☃'
|
||||
assert out_lines[-1] == f'Check the log at {log_file}'
|
188
tests/git_test.py
Normal file
188
tests/git_test.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
import os.path
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def test_get_root_at_root(in_git_dir):
|
||||
expected = os.path.normcase(in_git_dir.strpath)
|
||||
assert os.path.normcase(git.get_root()) == expected
|
||||
|
||||
|
||||
def test_get_root_deeper(in_git_dir):
|
||||
expected = os.path.normcase(in_git_dir.strpath)
|
||||
with in_git_dir.join('foo').ensure_dir().as_cwd():
|
||||
assert os.path.normcase(git.get_root()) == expected
|
||||
|
||||
|
||||
def test_get_staged_files_deleted(in_git_dir):
|
||||
in_git_dir.join('test').ensure()
|
||||
cmd_output('git', 'add', 'test')
|
||||
git_commit()
|
||||
cmd_output('git', 'rm', '--cached', 'test')
|
||||
assert git.get_staged_files() == []
|
||||
|
||||
|
||||
def test_is_not_in_merge_conflict(in_git_dir):
|
||||
assert git.is_in_merge_conflict() is False
|
||||
|
||||
|
||||
def test_is_in_merge_conflict(in_merge_conflict):
|
||||
assert git.is_in_merge_conflict() is True
|
||||
|
||||
|
||||
def test_is_in_merge_conflict_submodule(in_conflicting_submodule):
|
||||
assert git.is_in_merge_conflict() is True
|
||||
|
||||
|
||||
def test_cherry_pick_conflict(in_merge_conflict):
|
||||
cmd_output('git', 'merge', '--abort')
|
||||
foo_ref = cmd_output('git', 'rev-parse', 'foo')[1].strip()
|
||||
cmd_output('git', 'cherry-pick', foo_ref, retcode=None)
|
||||
assert git.is_in_merge_conflict() is False
|
||||
|
||||
|
||||
def resolve_conflict():
|
||||
with open('conflict_file', 'w') as conflicted_file:
|
||||
conflicted_file.write('herp\nderp\n')
|
||||
cmd_output('git', 'add', 'conflict_file')
|
||||
|
||||
|
||||
def test_get_conflicted_files(in_merge_conflict):
|
||||
resolve_conflict()
|
||||
with open('other_file', 'w') as other_file:
|
||||
other_file.write('oh hai')
|
||||
cmd_output('git', 'add', 'other_file')
|
||||
|
||||
ret = set(git.get_conflicted_files())
|
||||
assert ret == {'conflict_file', 'other_file'}
|
||||
|
||||
|
||||
def test_get_conflicted_files_in_submodule(in_conflicting_submodule):
|
||||
resolve_conflict()
|
||||
assert set(git.get_conflicted_files()) == {'conflict_file'}
|
||||
|
||||
|
||||
def test_get_conflicted_files_unstaged_files(in_merge_conflict):
|
||||
"""This case no longer occurs, but it is a useful test nonetheless"""
|
||||
resolve_conflict()
|
||||
|
||||
# Make unstaged file.
|
||||
with open('bar_only_file', 'w') as bar_only_file:
|
||||
bar_only_file.write('new contents!\n')
|
||||
|
||||
ret = set(git.get_conflicted_files())
|
||||
assert ret == {'conflict_file'}
|
||||
|
||||
|
||||
MERGE_MSG = b"Merge branch 'foo' into bar\n\nConflicts:\n\tconflict_file\n"
|
||||
OTHER_MERGE_MSG = MERGE_MSG + b'\tother_conflict_file\n'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('input', 'expected_output'),
|
||||
(
|
||||
(MERGE_MSG, ['conflict_file']),
|
||||
(OTHER_MERGE_MSG, ['conflict_file', 'other_conflict_file']),
|
||||
),
|
||||
)
|
||||
def test_parse_merge_msg_for_conflicts(input, expected_output):
|
||||
ret = git.parse_merge_msg_for_conflicts(input)
|
||||
assert ret == expected_output
|
||||
|
||||
|
||||
def test_get_changed_files(in_git_dir):
|
||||
git_commit()
|
||||
in_git_dir.join('a.txt').ensure()
|
||||
in_git_dir.join('b.txt').ensure()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit()
|
||||
files = git.get_changed_files('HEAD^', 'HEAD')
|
||||
assert files == ['a.txt', 'b.txt']
|
||||
|
||||
# files changed in source but not in origin should not be returned
|
||||
files = git.get_changed_files('HEAD', 'HEAD^')
|
||||
assert files == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('s', 'expected'),
|
||||
(
|
||||
('foo\0bar\0', ['foo', 'bar']),
|
||||
('foo\0', ['foo']),
|
||||
('', []),
|
||||
('foo', ['foo']),
|
||||
),
|
||||
)
|
||||
def test_zsplit(s, expected):
|
||||
assert git.zsplit(s) == expected
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def non_ascii_repo(in_git_dir):
|
||||
git_commit()
|
||||
in_git_dir.join('интервью').ensure()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit()
|
||||
yield in_git_dir
|
||||
|
||||
|
||||
def test_all_files_non_ascii(non_ascii_repo):
|
||||
ret = git.get_all_files()
|
||||
assert ret == ['интервью']
|
||||
|
||||
|
||||
def test_staged_files_non_ascii(non_ascii_repo):
|
||||
non_ascii_repo.join('интервью').write('hi')
|
||||
cmd_output('git', 'add', '.')
|
||||
assert git.get_staged_files() == ['интервью']
|
||||
|
||||
|
||||
def test_changed_files_non_ascii(non_ascii_repo):
|
||||
ret = git.get_changed_files('HEAD^', 'HEAD')
|
||||
assert ret == ['интервью']
|
||||
|
||||
|
||||
def test_get_conflicted_files_non_ascii(in_merge_conflict):
|
||||
open('интервью', 'a').close()
|
||||
cmd_output('git', 'add', '.')
|
||||
ret = git.get_conflicted_files()
|
||||
assert ret == {'conflict_file', 'интервью'}
|
||||
|
||||
|
||||
def test_intent_to_add(in_git_dir):
|
||||
in_git_dir.join('a').ensure()
|
||||
cmd_output('git', 'add', '--intent-to-add', 'a')
|
||||
|
||||
assert git.intent_to_add_files() == ['a']
|
||||
|
||||
|
||||
def test_status_output_with_rename(in_git_dir):
|
||||
in_git_dir.join('a').write('1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n')
|
||||
cmd_output('git', 'add', 'a')
|
||||
git_commit()
|
||||
cmd_output('git', 'mv', 'a', 'b')
|
||||
in_git_dir.join('c').ensure()
|
||||
cmd_output('git', 'add', '--intent-to-add', 'c')
|
||||
|
||||
assert git.intent_to_add_files() == ['c']
|
||||
|
||||
|
||||
def test_no_git_env():
|
||||
env = {
|
||||
'http_proxy': 'http://myproxy:80',
|
||||
'GIT_EXEC_PATH': '/some/git/exec/path',
|
||||
'GIT_SSH': '/usr/bin/ssh',
|
||||
'GIT_SSH_COMMAND': 'ssh -o',
|
||||
'GIT_DIR': '/none/shall/pass',
|
||||
}
|
||||
no_git_env = git.no_git_env(env)
|
||||
assert no_git_env == {
|
||||
'http_proxy': 'http://myproxy:80',
|
||||
'GIT_EXEC_PATH': '/some/git/exec/path',
|
||||
'GIT_SSH': '/usr/bin/ssh',
|
||||
'GIT_SSH_COMMAND': 'ssh -o',
|
||||
}
|
0
tests/languages/__init__.py
Normal file
0
tests/languages/__init__.py
Normal file
23
tests/languages/docker_test.py
Normal file
23
tests/languages/docker_test.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from unittest import mock
|
||||
|
||||
from pre_commit.languages import docker
|
||||
from pre_commit.util import CalledProcessError
|
||||
|
||||
|
||||
def test_docker_is_running_process_error():
|
||||
with mock.patch(
|
||||
'pre_commit.languages.docker.cmd_output_b',
|
||||
side_effect=CalledProcessError(1, (), 0, b'', None),
|
||||
):
|
||||
assert docker.docker_is_running() is False
|
||||
|
||||
|
||||
def test_docker_fallback_user():
|
||||
def invalid_attribute():
|
||||
raise AttributeError
|
||||
with mock.patch.multiple(
|
||||
'os', create=True,
|
||||
getuid=invalid_attribute,
|
||||
getgid=invalid_attribute,
|
||||
):
|
||||
assert docker.get_docker_user() == '1000:1000'
|
20
tests/languages/golang_test.py
Normal file
20
tests/languages/golang_test.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
import pytest
|
||||
|
||||
from pre_commit.languages.golang import guess_go_dir
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('url', 'expected'),
|
||||
(
|
||||
('/im/a/path/on/disk', 'unknown_src_dir'),
|
||||
('file:///im/a/path/on/disk', 'unknown_src_dir'),
|
||||
('git@github.com:golang/lint', 'github.com/golang/lint'),
|
||||
('git://github.com/golang/lint', 'github.com/golang/lint'),
|
||||
('http://github.com/golang/lint', 'github.com/golang/lint'),
|
||||
('https://github.com/golang/lint', 'github.com/golang/lint'),
|
||||
('ssh://git@github.com/golang/lint', 'github.com/golang/lint'),
|
||||
('git@github.com:golang/lint.git', 'github.com/golang/lint'),
|
||||
),
|
||||
)
|
||||
def test_guess_go_dir(url, expected):
|
||||
assert guess_go_dir(url) == expected
|
82
tests/languages/helpers_test.py
Normal file
82
tests/languages/helpers_test.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import CalledProcessError
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
|
||||
|
||||
def test_basic_get_default_version():
|
||||
assert helpers.basic_get_default_version() == C.DEFAULT
|
||||
|
||||
|
||||
def test_basic_healthy():
|
||||
assert helpers.basic_healthy(Prefix('.'), 'default') is True
|
||||
|
||||
|
||||
def test_failed_setup_command_does_not_unicode_error():
|
||||
script = (
|
||||
'import sys\n'
|
||||
"getattr(sys.stderr, 'buffer', sys.stderr).write(b'\\x81\\xfe')\n"
|
||||
'exit(1)\n'
|
||||
)
|
||||
|
||||
# an assertion that this does not raise `UnicodeError`
|
||||
with pytest.raises(CalledProcessError):
|
||||
helpers.run_setup_cmd(Prefix('.'), (sys.executable, '-c', script))
|
||||
|
||||
|
||||
def test_assert_no_additional_deps():
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
helpers.assert_no_additional_deps('lang', ['hmmm'])
|
||||
msg, = excinfo.value.args
|
||||
assert msg == (
|
||||
'For now, pre-commit does not support additional_dependencies for lang'
|
||||
)
|
||||
|
||||
|
||||
SERIAL_FALSE = auto_namedtuple(require_serial=False)
|
||||
SERIAL_TRUE = auto_namedtuple(require_serial=True)
|
||||
|
||||
|
||||
def test_target_concurrency_normal():
|
||||
with mock.patch.object(multiprocessing, 'cpu_count', return_value=123):
|
||||
with mock.patch.dict(os.environ, {}, clear=True):
|
||||
assert helpers.target_concurrency(SERIAL_FALSE) == 123
|
||||
|
||||
|
||||
def test_target_concurrency_cpu_count_require_serial_true():
|
||||
with mock.patch.dict(os.environ, {}, clear=True):
|
||||
assert helpers.target_concurrency(SERIAL_TRUE) == 1
|
||||
|
||||
|
||||
def test_target_concurrency_testing_env_var():
|
||||
with mock.patch.dict(
|
||||
os.environ, {'PRE_COMMIT_NO_CONCURRENCY': '1'}, clear=True,
|
||||
):
|
||||
assert helpers.target_concurrency(SERIAL_FALSE) == 1
|
||||
|
||||
|
||||
def test_target_concurrency_on_travis():
|
||||
with mock.patch.dict(os.environ, {'TRAVIS': '1'}, clear=True):
|
||||
assert helpers.target_concurrency(SERIAL_FALSE) == 2
|
||||
|
||||
|
||||
def test_target_concurrency_cpu_count_not_implemented():
|
||||
with mock.patch.object(
|
||||
multiprocessing, 'cpu_count', side_effect=NotImplementedError,
|
||||
):
|
||||
with mock.patch.dict(os.environ, {}, clear=True):
|
||||
assert helpers.target_concurrency(SERIAL_FALSE) == 1
|
||||
|
||||
|
||||
def test_shuffled_is_deterministic():
|
||||
seq = [str(i) for i in range(10)]
|
||||
expected = ['3', '7', '8', '2', '4', '6', '5', '1', '0', '9']
|
||||
assert helpers._shuffled(seq) == expected
|
65
tests/languages/pygrep_test.py
Normal file
65
tests/languages/pygrep_test.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
import pytest
|
||||
|
||||
from pre_commit.languages import pygrep
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def some_files(tmpdir):
|
||||
tmpdir.join('f1').write_binary(b'foo\nbar\n')
|
||||
tmpdir.join('f2').write_binary(b'[INFO] hi\n')
|
||||
tmpdir.join('f3').write_binary(b"with'quotes\n")
|
||||
with tmpdir.as_cwd():
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('some_files')
|
||||
@pytest.mark.parametrize(
|
||||
('pattern', 'expected_retcode', 'expected_out'),
|
||||
(
|
||||
('baz', 0, ''),
|
||||
('foo', 1, 'f1:1:foo\n'),
|
||||
('bar', 1, 'f1:2:bar\n'),
|
||||
(r'(?i)\[info\]', 1, 'f2:1:[INFO] hi\n'),
|
||||
("h'q", 1, "f3:1:with'quotes\n"),
|
||||
),
|
||||
)
|
||||
def test_main(some_files, cap_out, pattern, expected_retcode, expected_out):
|
||||
ret = pygrep.main((pattern, 'f1', 'f2', 'f3'))
|
||||
out = cap_out.get()
|
||||
assert ret == expected_retcode
|
||||
assert out == expected_out
|
||||
|
||||
|
||||
def test_ignore_case(some_files, cap_out):
|
||||
ret = pygrep.main(('--ignore-case', 'info', 'f1', 'f2', 'f3'))
|
||||
out = cap_out.get()
|
||||
assert ret == 1
|
||||
assert out == 'f2:1:[INFO] hi\n'
|
||||
|
||||
|
||||
def test_multiline(some_files, cap_out):
|
||||
ret = pygrep.main(('--multiline', r'foo\nbar', 'f1', 'f2', 'f3'))
|
||||
out = cap_out.get()
|
||||
assert ret == 1
|
||||
assert out == 'f1:1:foo\nbar\n'
|
||||
|
||||
|
||||
def test_multiline_line_number(some_files, cap_out):
|
||||
ret = pygrep.main(('--multiline', r'ar', 'f1', 'f2', 'f3'))
|
||||
out = cap_out.get()
|
||||
assert ret == 1
|
||||
assert out == 'f1:2:bar\n'
|
||||
|
||||
|
||||
def test_multiline_dotall_flag_is_enabled(some_files, cap_out):
|
||||
ret = pygrep.main(('--multiline', r'o.*bar', 'f1', 'f2', 'f3'))
|
||||
out = cap_out.get()
|
||||
assert ret == 1
|
||||
assert out == 'f1:1:foo\nbar\n'
|
||||
|
||||
|
||||
def test_multiline_multiline_flag_is_enabled(some_files, cap_out):
|
||||
ret = pygrep.main(('--multiline', r'foo$.*bar', 'f1', 'f2', 'f3'))
|
||||
out = cap_out.get()
|
||||
assert ret == 1
|
||||
assert out == 'f1:1:foo\nbar\n'
|
75
tests/languages/python_test.py
Normal file
75
tests/languages/python_test.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import os.path
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.languages import python
|
||||
from pre_commit.prefix import Prefix
|
||||
|
||||
|
||||
def test_norm_version_expanduser():
|
||||
home = os.path.expanduser('~')
|
||||
if os.name == 'nt': # pragma: nt cover
|
||||
path = r'~\python343'
|
||||
expected_path = fr'{home}\python343'
|
||||
else: # pragma: nt no cover
|
||||
path = '~/.pyenv/versions/3.4.3/bin/python'
|
||||
expected_path = f'{home}/.pyenv/versions/3.4.3/bin/python'
|
||||
result = python.norm_version(path)
|
||||
assert result == expected_path
|
||||
|
||||
|
||||
@pytest.mark.parametrize('v', ('python3.6', 'python3', 'python'))
|
||||
def test_sys_executable_matches(v):
|
||||
with mock.patch.object(sys, 'version_info', (3, 6, 7)):
|
||||
assert python._sys_executable_matches(v)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('v', ('notpython', 'python3.x'))
|
||||
def test_sys_executable_matches_does_not_match(v):
|
||||
with mock.patch.object(sys, 'version_info', (3, 6, 7)):
|
||||
assert not python._sys_executable_matches(v)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('exe', 'realpath', 'expected'), (
|
||||
('/usr/bin/python3', '/usr/bin/python3.7', 'python3'),
|
||||
('/usr/bin/python', '/usr/bin/python3.7', 'python3.7'),
|
||||
('/usr/bin/python', '/usr/bin/python', None),
|
||||
('/usr/bin/python3.6m', '/usr/bin/python3.6m', 'python3.6m'),
|
||||
('v/bin/python', 'v/bin/pypy', 'pypy'),
|
||||
),
|
||||
)
|
||||
def test_find_by_sys_executable(exe, realpath, expected):
|
||||
with mock.patch.object(sys, 'executable', exe):
|
||||
with mock.patch.object(os.path, 'realpath', return_value=realpath):
|
||||
with mock.patch.object(python, 'find_executable', lambda x: x):
|
||||
assert python._find_by_sys_executable() == expected
|
||||
|
||||
|
||||
def test_healthy_types_py_in_cwd(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
prefix = tmpdir.join('prefix').ensure_dir()
|
||||
prefix.join('setup.py').write('import setuptools; setuptools.setup()')
|
||||
prefix = Prefix(str(prefix))
|
||||
python.install_environment(prefix, C.DEFAULT, ())
|
||||
|
||||
# even if a `types.py` file exists, should still be healthy
|
||||
tmpdir.join('types.py').ensure()
|
||||
assert python.healthy(prefix, C.DEFAULT) is True
|
||||
|
||||
|
||||
def test_healthy_python_goes_missing(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
prefix = tmpdir.join('prefix').ensure_dir()
|
||||
prefix.join('setup.py').write('import setuptools; setuptools.setup()')
|
||||
prefix = Prefix(str(prefix))
|
||||
python.install_environment(prefix, C.DEFAULT, ())
|
||||
|
||||
exe_name = 'python' if sys.platform != 'win32' else 'python.exe'
|
||||
py_exe = prefix.path(python.bin_dir('py_env-default'), exe_name)
|
||||
os.remove(py_exe)
|
||||
|
||||
assert python.healthy(prefix, C.DEFAULT) is False
|
28
tests/languages/ruby_test.py
Normal file
28
tests/languages/ruby_test.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import os.path
|
||||
|
||||
from pre_commit.languages import ruby
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.util import xfailif_windows_no_ruby
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby
|
||||
def test_install_rbenv(tempdir_factory):
|
||||
prefix = Prefix(tempdir_factory.get())
|
||||
ruby._install_rbenv(prefix)
|
||||
# Should have created rbenv directory
|
||||
assert os.path.exists(prefix.path('rbenv-default'))
|
||||
|
||||
# Should be able to activate using our script and access rbenv
|
||||
with ruby.in_env(prefix, 'default'):
|
||||
cmd_output('rbenv', '--help')
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby
|
||||
def test_install_rbenv_with_version(tempdir_factory):
|
||||
prefix = Prefix(tempdir_factory.get())
|
||||
ruby._install_rbenv(prefix, version='1.9.3p547')
|
||||
|
||||
# Should be able to activate and use rbenv install
|
||||
with ruby.in_env(prefix, '1.9.3p547'):
|
||||
cmd_output('rbenv', 'install', '--help')
|
21
tests/logging_handler_test.py
Normal file
21
tests/logging_handler_test.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import logging
|
||||
|
||||
from pre_commit import color
|
||||
from pre_commit.logging_handler import LoggingHandler
|
||||
|
||||
|
||||
def _log_record(message, level):
|
||||
return logging.LogRecord('name', level, '', 1, message, {}, None)
|
||||
|
||||
|
||||
def test_logging_handler_color(cap_out):
|
||||
handler = LoggingHandler(True)
|
||||
handler.emit(_log_record('hi', logging.WARNING))
|
||||
ret = cap_out.get()
|
||||
assert ret == f'{color.YELLOW}[WARNING]{color.NORMAL} hi\n'
|
||||
|
||||
|
||||
def test_logging_handler_no_color(cap_out):
|
||||
handler = LoggingHandler(False)
|
||||
handler.emit(_log_record('hi', logging.WARNING))
|
||||
assert cap_out.get() == '[WARNING] hi\n'
|
189
tests/main_test.py
Normal file
189
tests/main_test.py
Normal file
|
@ -0,0 +1,189 @@
|
|||
import argparse
|
||||
import os.path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import main
|
||||
from pre_commit.error_handler import FatalError
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('argv', 'expected'),
|
||||
(
|
||||
((), ['f']),
|
||||
(('--f', 'x'), ['x']),
|
||||
(('--f', 'x', '--f', 'y'), ['x', 'y']),
|
||||
),
|
||||
)
|
||||
def test_append_replace_default(argv, expected):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--f', action=main.AppendReplaceDefault, default=['f'])
|
||||
assert parser.parse_args(argv).f == expected
|
||||
|
||||
|
||||
def _args(**kwargs):
|
||||
kwargs.setdefault('command', 'help')
|
||||
kwargs.setdefault('config', C.CONFIG_FILE)
|
||||
return argparse.Namespace(**kwargs)
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_not_in_git_dir(in_tmpdir):
|
||||
with pytest.raises(FatalError):
|
||||
main._adjust_args_and_chdir(_args())
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_in_dot_git_dir(in_git_dir):
|
||||
with in_git_dir.join('.git').as_cwd(), pytest.raises(FatalError):
|
||||
main._adjust_args_and_chdir(_args())
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_noop(in_git_dir):
|
||||
args = _args(command='run', files=['f1', 'f2'])
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert args.config == C.CONFIG_FILE
|
||||
assert args.files == ['f1', 'f2']
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_relative_things(in_git_dir):
|
||||
in_git_dir.join('foo/cfg.yaml').ensure()
|
||||
in_git_dir.join('foo').chdir()
|
||||
|
||||
args = _args(command='run', files=['f1', 'f2'], config='cfg.yaml')
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert args.config == os.path.join('foo', 'cfg.yaml')
|
||||
assert args.files == [os.path.join('foo', 'f1'), os.path.join('foo', 'f2')]
|
||||
|
||||
|
||||
def test_adjust_args_and_chdir_non_relative_config(in_git_dir):
|
||||
in_git_dir.join('foo').ensure_dir().chdir()
|
||||
|
||||
args = _args()
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert args.config == C.CONFIG_FILE
|
||||
|
||||
|
||||
def test_adjust_args_try_repo_repo_relative(in_git_dir):
|
||||
in_git_dir.join('foo').ensure_dir().chdir()
|
||||
|
||||
args = _args(command='try-repo', repo='../foo', files=[])
|
||||
assert args.repo is not None
|
||||
assert os.path.exists(args.repo)
|
||||
main._adjust_args_and_chdir(args)
|
||||
assert os.getcwd() == in_git_dir
|
||||
assert os.path.exists(args.repo)
|
||||
assert args.repo == 'foo'
|
||||
|
||||
|
||||
FNS = (
|
||||
'autoupdate', 'clean', 'gc', 'hook_impl', 'install', 'install_hooks',
|
||||
'migrate_config', 'run', 'sample_config', 'uninstall',
|
||||
)
|
||||
CMDS = tuple(fn.replace('_', '-') for fn in FNS)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_commands():
|
||||
mcks = {fn: mock.patch.object(main, fn).start() for fn in FNS}
|
||||
ret = auto_namedtuple(**mcks)
|
||||
yield ret
|
||||
for mck in ret:
|
||||
mck.stop()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def argparse_parse_args_spy():
|
||||
parse_args_mock = mock.Mock()
|
||||
|
||||
original_parse_args = argparse.ArgumentParser.parse_args
|
||||
|
||||
def fake_parse_args(self, args):
|
||||
# call our spy object
|
||||
parse_args_mock(args)
|
||||
return original_parse_args(self, args)
|
||||
|
||||
with mock.patch.object(
|
||||
argparse.ArgumentParser, 'parse_args', fake_parse_args,
|
||||
):
|
||||
yield parse_args_mock
|
||||
|
||||
|
||||
def assert_only_one_mock_called(mock_objs):
|
||||
total_call_count = sum(mock_obj.call_count for mock_obj in mock_objs)
|
||||
assert total_call_count == 1
|
||||
|
||||
|
||||
def test_overall_help(mock_commands):
|
||||
with pytest.raises(SystemExit):
|
||||
main.main(['--help'])
|
||||
|
||||
|
||||
def test_help_command(mock_commands, argparse_parse_args_spy):
|
||||
with pytest.raises(SystemExit):
|
||||
main.main(['help'])
|
||||
|
||||
argparse_parse_args_spy.assert_has_calls([
|
||||
mock.call(['help']),
|
||||
mock.call(['--help']),
|
||||
])
|
||||
|
||||
|
||||
def test_help_other_command(mock_commands, argparse_parse_args_spy):
|
||||
with pytest.raises(SystemExit):
|
||||
main.main(['help', 'run'])
|
||||
|
||||
argparse_parse_args_spy.assert_has_calls([
|
||||
mock.call(['help', 'run']),
|
||||
mock.call(['run', '--help']),
|
||||
])
|
||||
|
||||
|
||||
@pytest.mark.parametrize('command', CMDS)
|
||||
def test_all_cmds(command, mock_commands, mock_store_dir):
|
||||
main.main((command,))
|
||||
assert getattr(mock_commands, command.replace('-', '_')).call_count == 1
|
||||
assert_only_one_mock_called(mock_commands)
|
||||
|
||||
|
||||
def test_try_repo(mock_store_dir):
|
||||
with mock.patch.object(main, 'try_repo') as patch:
|
||||
main.main(('try-repo', '.'))
|
||||
assert patch.call_count == 1
|
||||
|
||||
|
||||
def test_init_templatedir(mock_store_dir):
|
||||
with mock.patch.object(main, 'init_templatedir') as patch:
|
||||
main.main(('init-templatedir', 'tdir'))
|
||||
assert patch.call_count == 1
|
||||
|
||||
|
||||
def test_help_cmd_in_empty_directory(
|
||||
in_tmpdir,
|
||||
mock_commands,
|
||||
argparse_parse_args_spy,
|
||||
):
|
||||
with pytest.raises(SystemExit):
|
||||
main.main(['help', 'run'])
|
||||
|
||||
argparse_parse_args_spy.assert_has_calls([
|
||||
mock.call(['help', 'run']),
|
||||
mock.call(['run', '--help']),
|
||||
])
|
||||
|
||||
|
||||
def test_expected_fatal_error_no_git_repo(in_tmpdir, cap_out, mock_store_dir):
|
||||
with pytest.raises(SystemExit):
|
||||
main.main([])
|
||||
log_file = os.path.join(mock_store_dir, 'pre-commit.log')
|
||||
cap_out_lines = cap_out.get().splitlines()
|
||||
assert (
|
||||
cap_out_lines[-2] ==
|
||||
'An error has occurred: FatalError: git failed. '
|
||||
'Is it installed, and are you in a Git repository directory?'
|
||||
)
|
||||
assert cap_out_lines[-1] == f'Check the log at {log_file}'
|
46
tests/make_archives_test.py
Normal file
46
tests/make_archives_test.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
import tarfile
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit import make_archives
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def test_make_archive(in_git_dir, tmpdir):
|
||||
output_dir = tmpdir.join('output').ensure_dir()
|
||||
# Add a files to the git directory
|
||||
in_git_dir.join('foo').ensure()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit()
|
||||
# We'll use this rev
|
||||
head_rev = git.head_rev('.')
|
||||
# And check that this file doesn't exist
|
||||
in_git_dir.join('bar').ensure()
|
||||
cmd_output('git', 'add', '.')
|
||||
git_commit()
|
||||
|
||||
# Do the thing
|
||||
archive_path = make_archives.make_archive(
|
||||
'foo', in_git_dir.strpath, head_rev, output_dir.strpath,
|
||||
)
|
||||
|
||||
expected = output_dir.join('foo.tar.gz')
|
||||
assert archive_path == expected.strpath
|
||||
assert expected.exists()
|
||||
|
||||
extract_dir = tmpdir.join('extract').ensure_dir()
|
||||
with tarfile.open(archive_path) as tf:
|
||||
tf.extractall(extract_dir.strpath)
|
||||
|
||||
# Verify the contents of the tar
|
||||
assert extract_dir.join('foo').isdir()
|
||||
assert extract_dir.join('foo/foo').exists()
|
||||
assert not extract_dir.join('foo/.git').exists()
|
||||
assert not extract_dir.join('foo/bar').exists()
|
||||
|
||||
|
||||
def test_main(tmpdir):
|
||||
make_archives.main(('--dest', tmpdir.strpath))
|
||||
|
||||
for archive, _, _ in make_archives.REPOS:
|
||||
assert tmpdir.join(f'{archive}.tar.gz').exists()
|
0
tests/meta_hooks/__init__.py
Normal file
0
tests/meta_hooks/__init__.py
Normal file
138
tests/meta_hooks/check_hooks_apply_test.py
Normal file
138
tests/meta_hooks/check_hooks_apply_test.py
Normal file
|
@ -0,0 +1,138 @@
|
|||
from pre_commit.meta_hooks import check_hooks_apply
|
||||
from testing.fixtures import add_config_to_repo
|
||||
|
||||
|
||||
def test_hook_excludes_everything(capsys, in_git_dir, mock_store_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'check-useless-excludes',
|
||||
'exclude': '.pre-commit-config.yaml',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_hooks_apply.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert 'check-useless-excludes does not apply to this repository' in out
|
||||
|
||||
|
||||
def test_hook_includes_nothing(capsys, in_git_dir, mock_store_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'check-useless-excludes',
|
||||
'files': 'foo',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_hooks_apply.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert 'check-useless-excludes does not apply to this repository' in out
|
||||
|
||||
|
||||
def test_hook_types_not_matched(capsys, in_git_dir, mock_store_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'check-useless-excludes',
|
||||
'types': ['python'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_hooks_apply.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert 'check-useless-excludes does not apply to this repository' in out
|
||||
|
||||
|
||||
def test_hook_types_excludes_everything(capsys, in_git_dir, mock_store_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'check-useless-excludes',
|
||||
'exclude_types': ['yaml'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_hooks_apply.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert 'check-useless-excludes does not apply to this repository' in out
|
||||
|
||||
|
||||
def test_valid_exceptions(capsys, in_git_dir, mock_store_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'local',
|
||||
'hooks': [
|
||||
# applies to a file
|
||||
{
|
||||
'id': 'check-yaml',
|
||||
'name': 'check yaml',
|
||||
'entry': './check-yaml',
|
||||
'language': 'script',
|
||||
'files': r'\.yaml$',
|
||||
},
|
||||
# Should not be reported as an error due to language: fail
|
||||
{
|
||||
'id': 'changelogs-rst',
|
||||
'name': 'changelogs must be rst',
|
||||
'entry': 'changelog filenames must end in .rst',
|
||||
'language': 'fail',
|
||||
'files': r'changelog/.*(?<!\.rst)$',
|
||||
},
|
||||
# Should not be reported as an error due to always_run
|
||||
{
|
||||
'id': 'i-always-run',
|
||||
'name': 'make check',
|
||||
'entry': 'make check',
|
||||
'language': 'system',
|
||||
'files': '^$',
|
||||
'always_run': True,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_hooks_apply.main(()) == 0
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == ''
|
115
tests/meta_hooks/check_useless_excludes_test.py
Normal file
115
tests/meta_hooks/check_useless_excludes_test.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
from pre_commit.meta_hooks import check_useless_excludes
|
||||
from testing.fixtures import add_config_to_repo
|
||||
|
||||
|
||||
def test_useless_exclude_global(capsys, in_git_dir):
|
||||
config = {
|
||||
'exclude': 'foo',
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [{'id': 'check-useless-excludes'}],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_useless_excludes.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
out = out.strip()
|
||||
assert "The global exclude pattern 'foo' does not match any files" == out
|
||||
|
||||
|
||||
def test_useless_exclude_for_hook(capsys, in_git_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [{'id': 'check-useless-excludes', 'exclude': 'foo'}],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_useless_excludes.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
out = out.strip()
|
||||
expected = (
|
||||
"The exclude pattern 'foo' for check-useless-excludes "
|
||||
'does not match any files'
|
||||
)
|
||||
assert expected == out
|
||||
|
||||
|
||||
def test_useless_exclude_with_types_filter(capsys, in_git_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'check-useless-excludes',
|
||||
'exclude': '.pre-commit-config.yaml',
|
||||
'types': ['python'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_useless_excludes.main(()) == 1
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
out = out.strip()
|
||||
expected = (
|
||||
"The exclude pattern '.pre-commit-config.yaml' for "
|
||||
'check-useless-excludes does not match any files'
|
||||
)
|
||||
assert expected == out
|
||||
|
||||
|
||||
def test_no_excludes(capsys, in_git_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [{'id': 'check-useless-excludes'}],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_useless_excludes.main(()) == 0
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == ''
|
||||
|
||||
|
||||
def test_valid_exclude(capsys, in_git_dir):
|
||||
config = {
|
||||
'repos': [
|
||||
{
|
||||
'repo': 'meta',
|
||||
'hooks': [
|
||||
{
|
||||
'id': 'check-useless-excludes',
|
||||
'exclude': '.pre-commit-config.yaml',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
add_config_to_repo(in_git_dir.strpath, config)
|
||||
|
||||
assert check_useless_excludes.main(()) == 0
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert out == ''
|
6
tests/meta_hooks/identity_test.py
Normal file
6
tests/meta_hooks/identity_test.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from pre_commit.meta_hooks import identity
|
||||
|
||||
|
||||
def test_identity(cap_out):
|
||||
assert not identity.main(('a', 'b', 'c'))
|
||||
assert cap_out.get() == 'a\nb\nc\n'
|
9
tests/output_test.py
Normal file
9
tests/output_test.py
Normal file
|
@ -0,0 +1,9 @@
|
|||
import io
|
||||
|
||||
from pre_commit import output
|
||||
|
||||
|
||||
def test_output_write_writes():
|
||||
stream = io.BytesIO()
|
||||
output.write('hello world', stream)
|
||||
assert stream.getvalue() == b'hello world'
|
152
tests/parse_shebang_test.py
Normal file
152
tests/parse_shebang_test.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
import contextlib
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import parse_shebang
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.envcontext import Var
|
||||
from pre_commit.util import make_executable
|
||||
|
||||
|
||||
def _echo_exe() -> str:
|
||||
exe = shutil.which('echo')
|
||||
assert exe is not None
|
||||
return exe
|
||||
|
||||
|
||||
def test_file_doesnt_exist():
|
||||
assert parse_shebang.parse_filename('herp derp derp') == ()
|
||||
|
||||
|
||||
def test_simple_case(tmpdir):
|
||||
x = tmpdir.join('f')
|
||||
x.write('#!/usr/bin/env echo')
|
||||
make_executable(x.strpath)
|
||||
assert parse_shebang.parse_filename(x.strpath) == ('echo',)
|
||||
|
||||
|
||||
def test_find_executable_full_path():
|
||||
assert parse_shebang.find_executable(sys.executable) == sys.executable
|
||||
|
||||
|
||||
def test_find_executable_on_path():
|
||||
assert parse_shebang.find_executable('echo') == _echo_exe()
|
||||
|
||||
|
||||
def test_find_executable_not_found_none():
|
||||
assert parse_shebang.find_executable('not-a-real-executable') is None
|
||||
|
||||
|
||||
def write_executable(shebang, filename='run'):
|
||||
os.mkdir('bin')
|
||||
path = os.path.join('bin', filename)
|
||||
with open(path, 'w') as f:
|
||||
f.write(f'#!{shebang}')
|
||||
make_executable(path)
|
||||
return path
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def bin_on_path():
|
||||
bindir = os.path.join(os.getcwd(), 'bin')
|
||||
with envcontext((('PATH', (bindir, os.pathsep, Var('PATH'))),)):
|
||||
yield
|
||||
|
||||
|
||||
def test_find_executable_path_added(in_tmpdir):
|
||||
path = os.path.abspath(write_executable('/usr/bin/env sh'))
|
||||
assert parse_shebang.find_executable('run') is None
|
||||
with bin_on_path():
|
||||
assert parse_shebang.find_executable('run') == path
|
||||
|
||||
|
||||
def test_find_executable_path_ext(in_tmpdir):
|
||||
"""Windows exports PATHEXT as a list of extensions to automatically add
|
||||
to executables when doing PATH searching.
|
||||
"""
|
||||
exe_path = os.path.abspath(
|
||||
write_executable('/usr/bin/env sh', filename='run.myext'),
|
||||
)
|
||||
env_path = {'PATH': os.path.dirname(exe_path)}
|
||||
env_path_ext = dict(env_path, PATHEXT=os.pathsep.join(('.exe', '.myext')))
|
||||
assert parse_shebang.find_executable('run') is None
|
||||
assert parse_shebang.find_executable('run', _environ=env_path) is None
|
||||
ret = parse_shebang.find_executable('run.myext', _environ=env_path)
|
||||
assert ret == exe_path
|
||||
ret = parse_shebang.find_executable('run', _environ=env_path_ext)
|
||||
assert ret == exe_path
|
||||
|
||||
|
||||
def test_normexe_does_not_exist():
|
||||
with pytest.raises(OSError) as excinfo:
|
||||
parse_shebang.normexe('i-dont-exist-lol')
|
||||
assert excinfo.value.args == ('Executable `i-dont-exist-lol` not found',)
|
||||
|
||||
|
||||
def test_normexe_does_not_exist_sep():
|
||||
with pytest.raises(OSError) as excinfo:
|
||||
parse_shebang.normexe('./i-dont-exist-lol')
|
||||
assert excinfo.value.args == ('Executable `./i-dont-exist-lol` not found',)
|
||||
|
||||
|
||||
@pytest.mark.xfail(os.name == 'nt', reason='posix only')
|
||||
def test_normexe_not_executable(tmpdir): # pragma: win32 no cover
|
||||
tmpdir.join('exe').ensure()
|
||||
with tmpdir.as_cwd(), pytest.raises(OSError) as excinfo:
|
||||
parse_shebang.normexe('./exe')
|
||||
assert excinfo.value.args == ('Executable `./exe` is not executable',)
|
||||
|
||||
|
||||
def test_normexe_is_a_directory(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
tmpdir.join('exe').ensure_dir()
|
||||
exe = os.path.join('.', 'exe')
|
||||
with pytest.raises(OSError) as excinfo:
|
||||
parse_shebang.normexe(exe)
|
||||
msg, = excinfo.value.args
|
||||
assert msg == f'Executable `{exe}` is a directory'
|
||||
|
||||
|
||||
def test_normexe_already_full_path():
|
||||
assert parse_shebang.normexe(sys.executable) == sys.executable
|
||||
|
||||
|
||||
def test_normexe_gives_full_path():
|
||||
assert parse_shebang.normexe('echo') == _echo_exe()
|
||||
assert os.sep in _echo_exe()
|
||||
|
||||
|
||||
def test_normalize_cmd_trivial():
|
||||
cmd = (_echo_exe(), 'hi')
|
||||
assert parse_shebang.normalize_cmd(cmd) == cmd
|
||||
|
||||
|
||||
def test_normalize_cmd_PATH():
|
||||
cmd = ('echo', '--version')
|
||||
expected = (_echo_exe(), '--version')
|
||||
assert parse_shebang.normalize_cmd(cmd) == expected
|
||||
|
||||
|
||||
def test_normalize_cmd_shebang(in_tmpdir):
|
||||
echo = _echo_exe().replace(os.sep, '/')
|
||||
path = write_executable(echo)
|
||||
assert parse_shebang.normalize_cmd((path,)) == (echo, path)
|
||||
|
||||
|
||||
def test_normalize_cmd_PATH_shebang_full_path(in_tmpdir):
|
||||
echo = _echo_exe().replace(os.sep, '/')
|
||||
path = write_executable(echo)
|
||||
with bin_on_path():
|
||||
ret = parse_shebang.normalize_cmd(('run',))
|
||||
assert ret == (echo, os.path.abspath(path))
|
||||
|
||||
|
||||
def test_normalize_cmd_PATH_shebang_PATH(in_tmpdir):
|
||||
echo = _echo_exe()
|
||||
path = write_executable('/usr/bin/env echo')
|
||||
with bin_on_path():
|
||||
ret = parse_shebang.normalize_cmd(('run',))
|
||||
assert ret == (echo, os.path.abspath(path))
|
44
tests/prefix_test.py
Normal file
44
tests/prefix_test.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
import os.path
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.prefix import Prefix
|
||||
|
||||
|
||||
def norm_slash(*args):
|
||||
return tuple(x.replace('/', os.sep) for x in args)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('prefix', 'path_end', 'expected_output'),
|
||||
(
|
||||
norm_slash('foo', '', 'foo'),
|
||||
norm_slash('foo', 'bar', 'foo/bar'),
|
||||
norm_slash('foo/bar', '../baz', 'foo/baz'),
|
||||
norm_slash('./', 'bar', 'bar'),
|
||||
norm_slash('./', '', '.'),
|
||||
norm_slash('/tmp/foo', '/tmp/bar', '/tmp/bar'),
|
||||
),
|
||||
)
|
||||
def test_path(prefix, path_end, expected_output):
|
||||
instance = Prefix(prefix)
|
||||
ret = instance.path(path_end)
|
||||
assert ret == expected_output
|
||||
|
||||
|
||||
def test_path_multiple_args():
|
||||
instance = Prefix('foo')
|
||||
ret = instance.path('bar', 'baz')
|
||||
assert ret == os.path.join('foo', 'bar', 'baz')
|
||||
|
||||
|
||||
def test_exists(tmpdir):
|
||||
assert not Prefix(str(tmpdir)).exists('foo')
|
||||
tmpdir.ensure('foo')
|
||||
assert Prefix(str(tmpdir)).exists('foo')
|
||||
|
||||
|
||||
def test_star(tmpdir):
|
||||
for f in ('a.txt', 'b.txt', 'c.py'):
|
||||
tmpdir.join(f).ensure()
|
||||
assert set(Prefix(str(tmpdir)).star('.txt')) == {'a.txt', 'b.txt'}
|
902
tests/repository_test.py
Normal file
902
tests/repository_test.py
Normal file
|
@ -0,0 +1,902 @@
|
|||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from unittest import mock
|
||||
|
||||
import cfgv
|
||||
import pytest
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib import CONFIG_SCHEMA
|
||||
from pre_commit.clientlib import load_manifest
|
||||
from pre_commit.envcontext import envcontext
|
||||
from pre_commit.hook import Hook
|
||||
from pre_commit.languages import golang
|
||||
from pre_commit.languages import helpers
|
||||
from pre_commit.languages import node
|
||||
from pre_commit.languages import python
|
||||
from pre_commit.languages import ruby
|
||||
from pre_commit.languages import rust
|
||||
from pre_commit.languages.all import languages
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.repository import all_hooks
|
||||
from pre_commit.repository import install_hook_envs
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
from testing.fixtures import make_config_from_repo
|
||||
from testing.fixtures import make_repo
|
||||
from testing.fixtures import modify_manifest
|
||||
from testing.util import cwd
|
||||
from testing.util import get_resource_path
|
||||
from testing.util import skipif_cant_run_docker
|
||||
from testing.util import skipif_cant_run_swift
|
||||
from testing.util import xfailif_no_venv
|
||||
from testing.util import xfailif_windows_no_ruby
|
||||
|
||||
|
||||
def _norm_out(b):
|
||||
return b.replace(b'\r\n', b'\n')
|
||||
|
||||
|
||||
def _hook_run(hook, filenames, color):
|
||||
return languages[hook.language].run_hook(hook, filenames, color)
|
||||
|
||||
|
||||
def _get_hook_no_install(repo_config, store, hook_id):
|
||||
config = {'repos': [repo_config]}
|
||||
config = cfgv.validate(config, CONFIG_SCHEMA)
|
||||
config = cfgv.apply_defaults(config, CONFIG_SCHEMA)
|
||||
hooks = all_hooks(config, store)
|
||||
hook, = [hook for hook in hooks if hook.id == hook_id]
|
||||
return hook
|
||||
|
||||
|
||||
def _get_hook(repo_config, store, hook_id):
|
||||
hook = _get_hook_no_install(repo_config, store, hook_id)
|
||||
install_hook_envs([hook], store)
|
||||
return hook
|
||||
|
||||
|
||||
def _test_hook_repo(
|
||||
tempdir_factory,
|
||||
store,
|
||||
repo_path,
|
||||
hook_id,
|
||||
args,
|
||||
expected,
|
||||
expected_return_code=0,
|
||||
config_kwargs=None,
|
||||
color=False,
|
||||
):
|
||||
path = make_repo(tempdir_factory, repo_path)
|
||||
config = make_config_from_repo(path, **(config_kwargs or {}))
|
||||
hook = _get_hook(config, store, hook_id)
|
||||
ret, out = _hook_run(hook, args, color=color)
|
||||
assert ret == expected_return_code
|
||||
assert _norm_out(out) == expected
|
||||
|
||||
|
||||
def test_conda_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'conda_hooks_repo',
|
||||
'sys-exec', [os.devnull],
|
||||
b'conda-default\n',
|
||||
)
|
||||
|
||||
|
||||
def test_conda_with_additional_dependencies_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'conda_hooks_repo',
|
||||
'additional-deps', [os.devnull],
|
||||
b'OK\n',
|
||||
config_kwargs={
|
||||
'hooks': [{
|
||||
'id': 'additional-deps',
|
||||
'args': ['-c', 'import mccabe; print("OK")'],
|
||||
'additional_dependencies': ['mccabe'],
|
||||
}],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def test_local_conda_additional_dependencies(store):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'local-conda',
|
||||
'name': 'local-conda',
|
||||
'entry': 'python',
|
||||
'language': 'conda',
|
||||
'args': ['-c', 'import mccabe; print("OK")'],
|
||||
'additional_dependencies': ['mccabe'],
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'local-conda')
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out) == b'OK\n'
|
||||
|
||||
|
||||
def test_python_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
f'[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
def test_python_hook_default_version(tempdir_factory, store):
|
||||
# make sure that this continues to work for platforms where default
|
||||
# language detection does not work
|
||||
with mock.patch.object(
|
||||
python, 'get_default_version', return_value=C.DEFAULT,
|
||||
):
|
||||
test_python_hook(tempdir_factory, store)
|
||||
|
||||
|
||||
def test_python_hook_args_with_spaces(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_hooks_repo',
|
||||
'foo',
|
||||
[],
|
||||
b"['i have spaces', 'and\"\\'quotes', '$and !this']\n"
|
||||
b'Hello World\n',
|
||||
config_kwargs={
|
||||
'hooks': [{
|
||||
'id': 'foo',
|
||||
'args': ['i have spaces', 'and"\'quotes', '$and !this'],
|
||||
}],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def test_python_hook_weird_setup_cfg(in_git_dir, tempdir_factory, store):
|
||||
in_git_dir.join('setup.cfg').write('[install]\ninstall_scripts=/usr/sbin')
|
||||
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
f'[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
@xfailif_no_venv
|
||||
def test_python_venv(tempdir_factory, store): # pragma: no cover (no venv)
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python_venv_hooks_repo',
|
||||
'foo', [os.devnull],
|
||||
f'[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
def test_switch_language_versions_doesnt_clobber(tempdir_factory, store):
|
||||
# We're using the python3 repo because it prints the python version
|
||||
path = make_repo(tempdir_factory, 'python3_hooks_repo')
|
||||
|
||||
def run_on_version(version, expected_output):
|
||||
config = make_config_from_repo(path)
|
||||
config['hooks'][0]['language_version'] = version
|
||||
hook = _get_hook(config, store, 'python3-hook')
|
||||
ret, out = _hook_run(hook, [], color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out) == expected_output
|
||||
|
||||
run_on_version('python2', b'2\n[]\nHello World\n')
|
||||
run_on_version('python3', b'3\n[]\nHello World\n')
|
||||
|
||||
|
||||
def test_versioned_python_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'python3_hooks_repo',
|
||||
'python3-hook',
|
||||
[os.devnull],
|
||||
f'3\n[{os.devnull!r}]\nHello World\n'.encode(),
|
||||
)
|
||||
|
||||
|
||||
@skipif_cant_run_docker # pragma: win32 no cover
|
||||
def test_run_a_docker_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'docker_hooks_repo',
|
||||
'docker-hook',
|
||||
['Hello World from docker'], b'Hello World from docker\n',
|
||||
)
|
||||
|
||||
|
||||
@skipif_cant_run_docker # pragma: win32 no cover
|
||||
def test_run_a_docker_hook_with_entry_args(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'docker_hooks_repo',
|
||||
'docker-hook-arg',
|
||||
['Hello World from docker'], b'Hello World from docker',
|
||||
)
|
||||
|
||||
|
||||
@skipif_cant_run_docker # pragma: win32 no cover
|
||||
def test_run_a_failing_docker_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'docker_hooks_repo',
|
||||
'docker-hook-failing',
|
||||
['Hello World from docker'],
|
||||
mock.ANY, # an error message about `bork` not existing
|
||||
expected_return_code=127,
|
||||
)
|
||||
|
||||
|
||||
@skipif_cant_run_docker # pragma: win32 no cover
|
||||
@pytest.mark.parametrize('hook_id', ('echo-entrypoint', 'echo-cmd'))
|
||||
def test_run_a_docker_image_hook(tempdir_factory, store, hook_id):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'docker_image_hooks_repo',
|
||||
hook_id,
|
||||
['Hello World from docker'], b'Hello World from docker\n',
|
||||
)
|
||||
|
||||
|
||||
def test_run_a_node_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'node_hooks_repo',
|
||||
'foo', [os.devnull], b'Hello World\n',
|
||||
)
|
||||
|
||||
|
||||
def test_run_versioned_node_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'node_versioned_hooks_repo',
|
||||
'versioned-node-hook', [os.devnull], b'v9.3.0\nHello World\n',
|
||||
)
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby
|
||||
def test_run_a_ruby_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'ruby_hooks_repo',
|
||||
'ruby_hook', [os.devnull], b'Hello world from a ruby hook\n',
|
||||
)
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby
|
||||
def test_run_versioned_ruby_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'ruby_versioned_hooks_repo',
|
||||
'ruby_hook',
|
||||
[os.devnull],
|
||||
b'2.5.1\nHello world from a ruby hook\n',
|
||||
)
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby
|
||||
def test_run_ruby_hook_with_disable_shared_gems(
|
||||
tempdir_factory,
|
||||
store,
|
||||
tmpdir,
|
||||
):
|
||||
"""Make sure a Gemfile in the project doesn't interfere."""
|
||||
tmpdir.join('Gemfile').write('gem "lol_hai"')
|
||||
tmpdir.join('.bundle').mkdir()
|
||||
tmpdir.join('.bundle', 'config').write(
|
||||
'BUNDLE_DISABLE_SHARED_GEMS: true\n'
|
||||
'BUNDLE_PATH: vendor/gem\n',
|
||||
)
|
||||
with cwd(tmpdir.strpath):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'ruby_versioned_hooks_repo',
|
||||
'ruby_hook',
|
||||
[os.devnull],
|
||||
b'2.5.1\nHello world from a ruby hook\n',
|
||||
)
|
||||
|
||||
|
||||
def test_system_hook_with_spaces(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'system_hook_with_spaces_repo',
|
||||
'system-hook-with-spaces', [os.devnull], b'Hello World\n',
|
||||
)
|
||||
|
||||
|
||||
@skipif_cant_run_swift # pragma: win32 no cover
|
||||
def test_swift_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'swift_hooks_repo',
|
||||
'swift-hooks-repo', [], b'Hello, world!\n',
|
||||
)
|
||||
|
||||
|
||||
def test_golang_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'golang_hooks_repo',
|
||||
'golang-hook', [], b'hello world\n',
|
||||
)
|
||||
|
||||
|
||||
def test_golang_hook_still_works_when_gobin_is_set(tempdir_factory, store):
|
||||
gobin_dir = tempdir_factory.get()
|
||||
with envcontext((('GOBIN', gobin_dir),)):
|
||||
test_golang_hook(tempdir_factory, store)
|
||||
assert os.listdir(gobin_dir) == []
|
||||
|
||||
|
||||
def test_rust_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'rust_hooks_repo',
|
||||
'rust-hook', [], b'hello world\n',
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dep', ('cli:shellharden:3.1.0', 'cli:shellharden'))
|
||||
def test_additional_rust_cli_dependencies_installed(
|
||||
tempdir_factory, store, dep,
|
||||
):
|
||||
path = make_repo(tempdir_factory, 'rust_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
# A small rust package with no dependencies.
|
||||
config['hooks'][0]['additional_dependencies'] = [dep]
|
||||
hook = _get_hook(config, store, 'rust-hook')
|
||||
binaries = os.listdir(
|
||||
hook.prefix.path(
|
||||
helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
||||
),
|
||||
)
|
||||
# normalize for windows
|
||||
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
||||
assert 'shellharden' in binaries
|
||||
|
||||
|
||||
def test_additional_rust_lib_dependencies_installed(
|
||||
tempdir_factory, store,
|
||||
):
|
||||
path = make_repo(tempdir_factory, 'rust_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
# A small rust package with no dependencies.
|
||||
deps = ['shellharden:3.1.0']
|
||||
config['hooks'][0]['additional_dependencies'] = deps
|
||||
hook = _get_hook(config, store, 'rust-hook')
|
||||
binaries = os.listdir(
|
||||
hook.prefix.path(
|
||||
helpers.environment_dir(rust.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
||||
),
|
||||
)
|
||||
# normalize for windows
|
||||
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
||||
assert 'rust-hello-world' in binaries
|
||||
assert 'shellharden' not in binaries
|
||||
|
||||
|
||||
def test_missing_executable(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'not_found_exe',
|
||||
'not-found-exe', [os.devnull],
|
||||
b'Executable `i-dont-exist-lol` not found',
|
||||
expected_return_code=1,
|
||||
)
|
||||
|
||||
|
||||
def test_run_a_script_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'script_hooks_repo',
|
||||
'bash_hook', ['bar'], b'bar\nHello World\n',
|
||||
)
|
||||
|
||||
|
||||
def test_run_hook_with_spaced_args(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'arg_per_line_hooks_repo',
|
||||
'arg-per-line',
|
||||
['foo bar', 'baz'],
|
||||
b'arg: hello\narg: world\narg: foo bar\narg: baz\n',
|
||||
)
|
||||
|
||||
|
||||
def test_run_hook_with_curly_braced_arguments(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'arg_per_line_hooks_repo',
|
||||
'arg-per-line',
|
||||
[],
|
||||
b"arg: hi {1}\narg: I'm {a} problem\n",
|
||||
config_kwargs={
|
||||
'hooks': [{
|
||||
'id': 'arg-per-line',
|
||||
'args': ['hi {1}', "I'm {a} problem"],
|
||||
}],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def test_intermixed_stdout_stderr(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'stdout_stderr_repo',
|
||||
'stdout-stderr',
|
||||
[],
|
||||
b'0\n1\n2\n3\n4\n5\n',
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.xfail(os.name == 'nt', reason='ptys are posix-only')
|
||||
def test_output_isatty(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'stdout_stderr_repo',
|
||||
'tty-check',
|
||||
[],
|
||||
b'stdin: False\nstdout: True\nstderr: True\n',
|
||||
color=True,
|
||||
)
|
||||
|
||||
|
||||
def _make_grep_repo(entry, store, args=()):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'grep-hook',
|
||||
'name': 'grep-hook',
|
||||
'language': 'pygrep',
|
||||
'entry': entry,
|
||||
'args': args,
|
||||
'types': ['text'],
|
||||
}],
|
||||
}
|
||||
return _get_hook(config, store, 'grep-hook')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def greppable_files(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
cmd_output_b('git', 'init', '.')
|
||||
tmpdir.join('f1').write_binary(b"hello'hi\nworld\n")
|
||||
tmpdir.join('f2').write_binary(b'foo\nbar\nbaz\n')
|
||||
tmpdir.join('f3').write_binary(b'[WARN] hi\n')
|
||||
yield tmpdir
|
||||
|
||||
|
||||
def test_grep_hook_matching(greppable_files, store):
|
||||
hook = _make_grep_repo('ello', store)
|
||||
ret, out = _hook_run(hook, ('f1', 'f2', 'f3'), color=False)
|
||||
assert ret == 1
|
||||
assert _norm_out(out) == b"f1:1:hello'hi\n"
|
||||
|
||||
|
||||
def test_grep_hook_case_insensitive(greppable_files, store):
|
||||
hook = _make_grep_repo('ELLO', store, args=['-i'])
|
||||
ret, out = _hook_run(hook, ('f1', 'f2', 'f3'), color=False)
|
||||
assert ret == 1
|
||||
assert _norm_out(out) == b"f1:1:hello'hi\n"
|
||||
|
||||
|
||||
@pytest.mark.parametrize('regex', ('nope', "foo'bar", r'^\[INFO\]'))
|
||||
def test_grep_hook_not_matching(regex, greppable_files, store):
|
||||
hook = _make_grep_repo(regex, store)
|
||||
ret, out = _hook_run(hook, ('f1', 'f2', 'f3'), color=False)
|
||||
assert (ret, out) == (0, b'')
|
||||
|
||||
|
||||
def _norm_pwd(path):
|
||||
# Under windows bash's temp and windows temp is different.
|
||||
# This normalizes to the bash /tmp
|
||||
return cmd_output_b(
|
||||
'bash', '-c', f"cd '{path}' && pwd",
|
||||
)[1].strip()
|
||||
|
||||
|
||||
def test_cwd_of_hook(in_git_dir, tempdir_factory, store):
|
||||
# Note: this doubles as a test for `system` hooks
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'prints_cwd_repo',
|
||||
'prints_cwd', ['-L'], _norm_pwd(in_git_dir.strpath) + b'\n',
|
||||
)
|
||||
|
||||
|
||||
def test_lots_of_files(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'script_hooks_repo',
|
||||
'bash_hook', [os.devnull] * 15000, mock.ANY,
|
||||
)
|
||||
|
||||
|
||||
def test_additional_dependencies_roll_forward(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
|
||||
config1 = make_config_from_repo(path)
|
||||
hook1 = _get_hook(config1, store, 'foo')
|
||||
with python.in_env(hook1.prefix, hook1.language_version):
|
||||
assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1]
|
||||
|
||||
# Make another repo with additional dependencies
|
||||
config2 = make_config_from_repo(path)
|
||||
config2['hooks'][0]['additional_dependencies'] = ['mccabe']
|
||||
hook2 = _get_hook(config2, store, 'foo')
|
||||
with python.in_env(hook2.prefix, hook2.language_version):
|
||||
assert 'mccabe' in cmd_output('pip', 'freeze', '-l')[1]
|
||||
|
||||
# should not have affected original
|
||||
with python.in_env(hook1.prefix, hook1.language_version):
|
||||
assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1]
|
||||
|
||||
|
||||
@xfailif_windows_no_ruby # pragma: win32 no cover
|
||||
def test_additional_ruby_dependencies_installed(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'ruby_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
config['hooks'][0]['additional_dependencies'] = ['tins']
|
||||
hook = _get_hook(config, store, 'ruby_hook')
|
||||
with ruby.in_env(hook.prefix, hook.language_version):
|
||||
output = cmd_output('gem', 'list', '--local')[1]
|
||||
assert 'tins' in output
|
||||
|
||||
|
||||
def test_additional_node_dependencies_installed(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'node_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
# Careful to choose a small package that's not depped by npm
|
||||
config['hooks'][0]['additional_dependencies'] = ['lodash']
|
||||
hook = _get_hook(config, store, 'foo')
|
||||
with node.in_env(hook.prefix, hook.language_version):
|
||||
output = cmd_output('npm', 'ls', '-g')[1]
|
||||
assert 'lodash' in output
|
||||
|
||||
|
||||
def test_additional_golang_dependencies_installed(
|
||||
tempdir_factory, store,
|
||||
):
|
||||
path = make_repo(tempdir_factory, 'golang_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
# A small go package
|
||||
deps = ['github.com/golang/example/hello']
|
||||
config['hooks'][0]['additional_dependencies'] = deps
|
||||
hook = _get_hook(config, store, 'golang-hook')
|
||||
binaries = os.listdir(
|
||||
hook.prefix.path(
|
||||
helpers.environment_dir(golang.ENVIRONMENT_DIR, C.DEFAULT), 'bin',
|
||||
),
|
||||
)
|
||||
# normalize for windows
|
||||
binaries = [os.path.splitext(binary)[0] for binary in binaries]
|
||||
assert 'hello' in binaries
|
||||
|
||||
|
||||
def test_local_golang_additional_dependencies(store):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'hello',
|
||||
'name': 'hello',
|
||||
'entry': 'hello',
|
||||
'language': 'golang',
|
||||
'additional_dependencies': ['github.com/golang/example/hello'],
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'hello')
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out) == b'Hello, Go examples!\n'
|
||||
|
||||
|
||||
def test_local_rust_additional_dependencies(store):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'hello',
|
||||
'name': 'hello',
|
||||
'entry': 'hello',
|
||||
'language': 'rust',
|
||||
'additional_dependencies': ['cli:hello-cli:0.2.2'],
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'hello')
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out) == b'Hello World!\n'
|
||||
|
||||
|
||||
def test_fail_hooks(store):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'fail',
|
||||
'name': 'fail',
|
||||
'language': 'fail',
|
||||
'entry': 'make sure to name changelogs as .rst!',
|
||||
'files': r'changelog/.*(?<!\.rst)$',
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'fail')
|
||||
ret, out = _hook_run(
|
||||
hook, ('changelog/123.bugfix', 'changelog/wat'), color=False,
|
||||
)
|
||||
assert ret == 1
|
||||
assert out == (
|
||||
b'make sure to name changelogs as .rst!\n'
|
||||
b'\n'
|
||||
b'changelog/123.bugfix\n'
|
||||
b'changelog/wat\n'
|
||||
)
|
||||
|
||||
|
||||
def test_unknown_keys(store, fake_log_handler):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'too-much',
|
||||
'name': 'too much',
|
||||
'hello': 'world',
|
||||
'foo': 'bar',
|
||||
'language': 'system',
|
||||
'entry': 'true',
|
||||
}],
|
||||
}
|
||||
_get_hook(config, store, 'too-much')
|
||||
expected = 'Unexpected key(s) present on local => too-much: foo, hello'
|
||||
assert fake_log_handler.handle.call_args[0][0].msg == expected
|
||||
|
||||
|
||||
def test_reinstall(tempdir_factory, store, log_info_mock):
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
_get_hook(config, store, 'foo')
|
||||
# We print some logging during clone (1) + install (3)
|
||||
assert log_info_mock.call_count == 4
|
||||
log_info_mock.reset_mock()
|
||||
# Reinstall on another run should not trigger another install
|
||||
_get_hook(config, store, 'foo')
|
||||
assert log_info_mock.call_count == 0
|
||||
|
||||
|
||||
def test_control_c_control_c_on_install(tempdir_factory, store):
|
||||
"""Regression test for #186."""
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
hooks = [_get_hook_no_install(config, store, 'foo')]
|
||||
|
||||
class MyKeyboardInterrupt(KeyboardInterrupt):
|
||||
pass
|
||||
|
||||
# To simulate a killed install, we'll make PythonEnv.run raise ^C
|
||||
# and then to simulate a second ^C during cleanup, we'll make shutil.rmtree
|
||||
# raise as well.
|
||||
with pytest.raises(MyKeyboardInterrupt):
|
||||
with mock.patch.object(
|
||||
helpers, 'run_setup_cmd', side_effect=MyKeyboardInterrupt,
|
||||
):
|
||||
with mock.patch.object(
|
||||
shutil, 'rmtree', side_effect=MyKeyboardInterrupt,
|
||||
):
|
||||
install_hook_envs(hooks, store)
|
||||
|
||||
# Should have made an environment, however this environment is broken!
|
||||
hook, = hooks
|
||||
assert hook.prefix.exists(
|
||||
helpers.environment_dir(python.ENVIRONMENT_DIR, hook.language_version),
|
||||
)
|
||||
|
||||
# However, it should be perfectly runnable (reinstall after botched
|
||||
# install)
|
||||
install_hook_envs(hooks, store)
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def test_invalidated_virtualenv(tempdir_factory, store):
|
||||
# A cached virtualenv may become invalidated if the system python upgrades
|
||||
# This should not cause every hook in that virtualenv to fail.
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
hook = _get_hook(config, store, 'foo')
|
||||
|
||||
# Simulate breaking of the virtualenv
|
||||
libdir = hook.prefix.path(
|
||||
helpers.environment_dir(python.ENVIRONMENT_DIR, hook.language_version),
|
||||
'lib', hook.language_version,
|
||||
)
|
||||
paths = [
|
||||
os.path.join(libdir, p) for p in ('site.py', 'site.pyc', '__pycache__')
|
||||
]
|
||||
cmd_output_b('rm', '-rf', *paths)
|
||||
|
||||
# pre-commit should rebuild the virtualenv and it should be runnable
|
||||
hook = _get_hook(config, store, 'foo')
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def test_really_long_file_paths(tempdir_factory, store):
|
||||
base_path = tempdir_factory.get()
|
||||
really_long_path = os.path.join(base_path, 'really_long' * 10)
|
||||
cmd_output_b('git', 'init', really_long_path)
|
||||
|
||||
path = make_repo(tempdir_factory, 'python_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
|
||||
with cwd(really_long_path):
|
||||
_get_hook(config, store, 'foo')
|
||||
|
||||
|
||||
def test_config_overrides_repo_specifics(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
|
||||
hook = _get_hook(config, store, 'bash_hook')
|
||||
assert hook.files == ''
|
||||
# Set the file regex to something else
|
||||
config['hooks'][0]['files'] = '\\.sh$'
|
||||
hook = _get_hook(config, store, 'bash_hook')
|
||||
assert hook.files == '\\.sh$'
|
||||
|
||||
|
||||
def _create_repo_with_tags(tempdir_factory, src, tag):
|
||||
path = make_repo(tempdir_factory, src)
|
||||
cmd_output_b('git', 'tag', tag, cwd=path)
|
||||
return path
|
||||
|
||||
|
||||
def test_tags_on_repositories(in_tmpdir, tempdir_factory, store):
|
||||
tag = 'v1.1'
|
||||
git1 = _create_repo_with_tags(tempdir_factory, 'prints_cwd_repo', tag)
|
||||
git2 = _create_repo_with_tags(tempdir_factory, 'script_hooks_repo', tag)
|
||||
|
||||
config1 = make_config_from_repo(git1, rev=tag)
|
||||
hook1 = _get_hook(config1, store, 'prints_cwd')
|
||||
ret1, out1 = _hook_run(hook1, ('-L',), color=False)
|
||||
assert ret1 == 0
|
||||
assert out1.strip() == _norm_pwd(in_tmpdir)
|
||||
|
||||
config2 = make_config_from_repo(git2, rev=tag)
|
||||
hook2 = _get_hook(config2, store, 'bash_hook')
|
||||
ret2, out2 = _hook_run(hook2, ('bar',), color=False)
|
||||
assert ret2 == 0
|
||||
assert out2 == b'bar\nHello World\n'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def local_python_config():
|
||||
# Make a "local" hooks repo that just installs our other hooks repo
|
||||
repo_path = get_resource_path('python_hooks_repo')
|
||||
manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
|
||||
hooks = [
|
||||
dict(hook, additional_dependencies=[repo_path]) for hook in manifest
|
||||
]
|
||||
return {'repo': 'local', 'hooks': hooks}
|
||||
|
||||
|
||||
@pytest.mark.xfail( # pragma: win32 no cover
|
||||
sys.platform == 'win32',
|
||||
reason='microsoft/azure-pipelines-image-generation#989',
|
||||
)
|
||||
def test_local_python_repo(store, local_python_config):
|
||||
hook = _get_hook(local_python_config, store, 'foo')
|
||||
# language_version should have been adjusted to the interpreter version
|
||||
assert hook.language_version != C.DEFAULT
|
||||
ret, out = _hook_run(hook, ('filename',), color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out) == b"['filename']\nHello World\n"
|
||||
|
||||
|
||||
def test_default_language_version(store, local_python_config):
|
||||
config: Dict[str, Any] = {
|
||||
'default_language_version': {'python': 'fake'},
|
||||
'default_stages': ['commit'],
|
||||
'repos': [local_python_config],
|
||||
}
|
||||
|
||||
# `language_version` was not set, should default
|
||||
hook, = all_hooks(config, store)
|
||||
assert hook.language_version == 'fake'
|
||||
|
||||
# `language_version` is set, should not default
|
||||
config['repos'][0]['hooks'][0]['language_version'] = 'fake2'
|
||||
hook, = all_hooks(config, store)
|
||||
assert hook.language_version == 'fake2'
|
||||
|
||||
|
||||
def test_default_stages(store, local_python_config):
|
||||
config: Dict[str, Any] = {
|
||||
'default_language_version': {'python': C.DEFAULT},
|
||||
'default_stages': ['commit'],
|
||||
'repos': [local_python_config],
|
||||
}
|
||||
|
||||
# `stages` was not set, should default
|
||||
hook, = all_hooks(config, store)
|
||||
assert hook.stages == ['commit']
|
||||
|
||||
# `stages` is set, should not default
|
||||
config['repos'][0]['hooks'][0]['stages'] = ['push']
|
||||
hook, = all_hooks(config, store)
|
||||
assert hook.stages == ['push']
|
||||
|
||||
|
||||
def test_hook_id_not_present(tempdir_factory, store, fake_log_handler):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
config['hooks'][0]['id'] = 'i-dont-exist'
|
||||
with pytest.raises(SystemExit):
|
||||
_get_hook(config, store, 'i-dont-exist')
|
||||
assert fake_log_handler.handle.call_args[0][0].msg == (
|
||||
f'`i-dont-exist` is not present in repository file://{path}. '
|
||||
f'Typo? Perhaps it is introduced in a newer version? '
|
||||
f'Often `pre-commit autoupdate` fixes this.'
|
||||
)
|
||||
|
||||
|
||||
def test_too_new_version(tempdir_factory, store, fake_log_handler):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with modify_manifest(path) as manifest:
|
||||
manifest[0]['minimum_pre_commit_version'] = '999.0.0'
|
||||
config = make_config_from_repo(path)
|
||||
with pytest.raises(SystemExit):
|
||||
_get_hook(config, store, 'bash_hook')
|
||||
msg = fake_log_handler.handle.call_args[0][0].msg
|
||||
assert re.match(
|
||||
r'^The hook `bash_hook` requires pre-commit version 999\.0\.0 but '
|
||||
r'version \d+\.\d+\.\d+ is installed. '
|
||||
r'Perhaps run `pip install --upgrade pre-commit`\.$',
|
||||
msg,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('version', ('0.1.0', C.VERSION))
|
||||
def test_versions_ok(tempdir_factory, store, version):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
with modify_manifest(path) as manifest:
|
||||
manifest[0]['minimum_pre_commit_version'] = version
|
||||
config = make_config_from_repo(path)
|
||||
# Should succeed
|
||||
_get_hook(config, store, 'bash_hook')
|
||||
|
||||
|
||||
def test_manifest_hooks(tempdir_factory, store):
|
||||
path = make_repo(tempdir_factory, 'script_hooks_repo')
|
||||
config = make_config_from_repo(path)
|
||||
hook = _get_hook(config, store, 'bash_hook')
|
||||
|
||||
assert hook == Hook(
|
||||
src=f'file://{path}',
|
||||
prefix=Prefix(mock.ANY),
|
||||
additional_dependencies=[],
|
||||
alias='',
|
||||
always_run=False,
|
||||
args=[],
|
||||
description='',
|
||||
entry='bin/hook.sh',
|
||||
exclude='^$',
|
||||
exclude_types=[],
|
||||
files='',
|
||||
id='bash_hook',
|
||||
language='script',
|
||||
language_version='default',
|
||||
log_file='',
|
||||
minimum_pre_commit_version='0',
|
||||
name='Bash hook',
|
||||
pass_filenames=True,
|
||||
require_serial=False,
|
||||
stages=(
|
||||
'commit', 'merge-commit', 'prepare-commit-msg', 'commit-msg',
|
||||
'manual', 'post-checkout', 'push',
|
||||
),
|
||||
types=['file'],
|
||||
verbose=False,
|
||||
)
|
||||
|
||||
|
||||
def test_perl_hook(tempdir_factory, store):
|
||||
_test_hook_repo(
|
||||
tempdir_factory, store, 'perl_hooks_repo',
|
||||
'perl-hook', [], b'Hello from perl-commit Perl!\n',
|
||||
)
|
||||
|
||||
|
||||
def test_local_perl_additional_dependencies(store):
|
||||
config = {
|
||||
'repo': 'local',
|
||||
'hooks': [{
|
||||
'id': 'hello',
|
||||
'name': 'hello',
|
||||
'entry': 'perltidy --version',
|
||||
'language': 'perl',
|
||||
'additional_dependencies': ['SHANCOCK/Perl-Tidy-20200110.tar.gz'],
|
||||
}],
|
||||
}
|
||||
hook = _get_hook(config, store, 'hello')
|
||||
ret, out = _hook_run(hook, (), color=False)
|
||||
assert ret == 0
|
||||
assert _norm_out(out).startswith(b'This is perltidy, v20200110')
|
349
tests/staged_files_only_test.py
Normal file
349
tests/staged_files_only_test.py
Normal file
|
@ -0,0 +1,349 @@
|
|||
import itertools
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.staged_files_only import staged_files_only
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
from testing.fixtures import git_dir
|
||||
from testing.util import cwd
|
||||
from testing.util import get_resource_path
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
FOO_CONTENTS = '\n'.join(('1', '2', '3', '4', '5', '6', '7', '8', ''))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def patch_dir(tempdir_factory):
|
||||
return tempdir_factory.get()
|
||||
|
||||
|
||||
def get_short_git_status():
|
||||
git_status = cmd_output('git', 'status', '-s')[1]
|
||||
line_parts = [line.split() for line in git_status.splitlines()]
|
||||
return {v: k for k, v in line_parts}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def foo_staged(in_git_dir):
|
||||
foo = in_git_dir.join('foo')
|
||||
foo.write(FOO_CONTENTS)
|
||||
cmd_output('git', 'add', 'foo')
|
||||
yield auto_namedtuple(path=in_git_dir.strpath, foo_filename=foo.strpath)
|
||||
|
||||
|
||||
def _test_foo_state(
|
||||
path,
|
||||
foo_contents=FOO_CONTENTS,
|
||||
status='A',
|
||||
encoding='UTF-8',
|
||||
):
|
||||
assert os.path.exists(path.foo_filename)
|
||||
with open(path.foo_filename, encoding=encoding) as f:
|
||||
assert f.read() == foo_contents
|
||||
actual_status = get_short_git_status()['foo']
|
||||
assert status == actual_status
|
||||
|
||||
|
||||
def test_foo_staged(foo_staged):
|
||||
_test_foo_state(foo_staged)
|
||||
|
||||
|
||||
def test_foo_nothing_unstaged(foo_staged, patch_dir):
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
_test_foo_state(foo_staged)
|
||||
|
||||
|
||||
def test_foo_something_unstaged(foo_staged, patch_dir):
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write('herp\nderp\n')
|
||||
|
||||
_test_foo_state(foo_staged, 'herp\nderp\n', 'AM')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
|
||||
_test_foo_state(foo_staged, 'herp\nderp\n', 'AM')
|
||||
|
||||
|
||||
def test_does_not_crash_patch_dir_does_not_exist(foo_staged, patch_dir):
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write('hello\nworld\n')
|
||||
|
||||
shutil.rmtree(patch_dir)
|
||||
with staged_files_only(patch_dir):
|
||||
pass
|
||||
|
||||
|
||||
def test_something_unstaged_ext_diff_tool(foo_staged, patch_dir, tmpdir):
|
||||
diff_tool = tmpdir.join('diff-tool.sh')
|
||||
diff_tool.write('#!/usr/bin/env bash\necho "$@"\n')
|
||||
cmd_output('git', 'config', 'diff.external', diff_tool.strpath)
|
||||
test_foo_something_unstaged(foo_staged, patch_dir)
|
||||
|
||||
|
||||
def test_foo_something_unstaged_diff_color_always(foo_staged, patch_dir):
|
||||
cmd_output('git', 'config', '--local', 'color.diff', 'always')
|
||||
test_foo_something_unstaged(foo_staged, patch_dir)
|
||||
|
||||
|
||||
def test_foo_both_modify_non_conflicting(foo_staged, patch_dir):
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(f'{FOO_CONTENTS}9\n')
|
||||
|
||||
_test_foo_state(foo_staged, f'{FOO_CONTENTS}9\n', 'AM')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
|
||||
# Modify the file as part of the "pre-commit"
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS.replace('1', 'a'))
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
|
||||
|
||||
_test_foo_state(foo_staged, f'{FOO_CONTENTS.replace("1", "a")}9\n', 'AM')
|
||||
|
||||
|
||||
def test_foo_both_modify_conflicting(foo_staged, patch_dir):
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS.replace('1', 'a'))
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
|
||||
# Modify in the same place as the stashed diff
|
||||
with open(foo_staged.foo_filename, 'w') as foo_file:
|
||||
foo_file.write(FOO_CONTENTS.replace('1', 'b'))
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'b'), 'AM')
|
||||
|
||||
_test_foo_state(foo_staged, FOO_CONTENTS.replace('1', 'a'), 'AM')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def img_staged(in_git_dir):
|
||||
img = in_git_dir.join('img.jpg')
|
||||
shutil.copy(get_resource_path('img1.jpg'), img.strpath)
|
||||
cmd_output('git', 'add', 'img.jpg')
|
||||
yield auto_namedtuple(path=in_git_dir.strpath, img_filename=img.strpath)
|
||||
|
||||
|
||||
def _test_img_state(path, expected_file='img1.jpg', status='A'):
|
||||
assert os.path.exists(path.img_filename)
|
||||
with open(path.img_filename, 'rb') as f1:
|
||||
with open(get_resource_path(expected_file), 'rb') as f2:
|
||||
assert f1.read() == f2.read()
|
||||
actual_status = get_short_git_status()['img.jpg']
|
||||
assert status == actual_status
|
||||
|
||||
|
||||
def test_img_staged(img_staged):
|
||||
_test_img_state(img_staged)
|
||||
|
||||
|
||||
def test_img_nothing_unstaged(img_staged, patch_dir):
|
||||
with staged_files_only(patch_dir):
|
||||
_test_img_state(img_staged)
|
||||
_test_img_state(img_staged)
|
||||
|
||||
|
||||
def test_img_something_unstaged(img_staged, patch_dir):
|
||||
shutil.copy(get_resource_path('img2.jpg'), img_staged.img_filename)
|
||||
|
||||
_test_img_state(img_staged, 'img2.jpg', 'AM')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
_test_img_state(img_staged)
|
||||
|
||||
_test_img_state(img_staged, 'img2.jpg', 'AM')
|
||||
|
||||
|
||||
def test_img_conflict(img_staged, patch_dir):
|
||||
"""Admittedly, this shouldn't happen, but just in case."""
|
||||
shutil.copy(get_resource_path('img2.jpg'), img_staged.img_filename)
|
||||
|
||||
_test_img_state(img_staged, 'img2.jpg', 'AM')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
_test_img_state(img_staged)
|
||||
shutil.copy(get_resource_path('img3.jpg'), img_staged.img_filename)
|
||||
_test_img_state(img_staged, 'img3.jpg', 'AM')
|
||||
|
||||
_test_img_state(img_staged, 'img2.jpg', 'AM')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def submodule_with_commits(tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
git_commit()
|
||||
rev1 = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
|
||||
git_commit()
|
||||
rev2 = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
|
||||
yield auto_namedtuple(path=path, rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def checkout_submodule(rev):
|
||||
cmd_output('git', 'checkout', rev, cwd='sub')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sub_staged(submodule_with_commits, tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
cmd_output(
|
||||
'git', 'submodule', 'add', submodule_with_commits.path, 'sub',
|
||||
)
|
||||
checkout_submodule(submodule_with_commits.rev1)
|
||||
cmd_output('git', 'add', 'sub')
|
||||
yield auto_namedtuple(
|
||||
path=path,
|
||||
sub_path=os.path.join(path, 'sub'),
|
||||
submodule=submodule_with_commits,
|
||||
)
|
||||
|
||||
|
||||
def _test_sub_state(path, rev='rev1', status='A'):
|
||||
assert os.path.exists(path.sub_path)
|
||||
with cwd(path.sub_path):
|
||||
actual_rev = cmd_output('git', 'rev-parse', 'HEAD')[1].strip()
|
||||
assert actual_rev == getattr(path.submodule, rev)
|
||||
actual_status = get_short_git_status()['sub']
|
||||
assert actual_status == status
|
||||
|
||||
|
||||
def test_sub_staged(sub_staged):
|
||||
_test_sub_state(sub_staged)
|
||||
|
||||
|
||||
def test_sub_nothing_unstaged(sub_staged, patch_dir):
|
||||
with staged_files_only(patch_dir):
|
||||
_test_sub_state(sub_staged)
|
||||
_test_sub_state(sub_staged)
|
||||
|
||||
|
||||
def test_sub_something_unstaged(sub_staged, patch_dir):
|
||||
checkout_submodule(sub_staged.submodule.rev2)
|
||||
|
||||
_test_sub_state(sub_staged, 'rev2', 'AM')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
# This is different from others, we don't want to touch subs
|
||||
_test_sub_state(sub_staged, 'rev2', 'AM')
|
||||
|
||||
_test_sub_state(sub_staged, 'rev2', 'AM')
|
||||
|
||||
|
||||
def test_stage_utf8_changes(foo_staged, patch_dir):
|
||||
contents = '\u2603'
|
||||
with open('foo', 'w', encoding='UTF-8') as foo_file:
|
||||
foo_file.write(contents)
|
||||
|
||||
_test_foo_state(foo_staged, contents, 'AM')
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
_test_foo_state(foo_staged, contents, 'AM')
|
||||
|
||||
|
||||
def test_stage_non_utf8_changes(foo_staged, patch_dir):
|
||||
contents = 'ú'
|
||||
# Produce a latin-1 diff
|
||||
with open('foo', 'w', encoding='latin-1') as foo_file:
|
||||
foo_file.write(contents)
|
||||
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
|
||||
|
||||
def test_non_utf8_conflicting_diff(foo_staged, patch_dir):
|
||||
"""Regression test for #397"""
|
||||
# The trailing whitespace is important here, this triggers git to produce
|
||||
# an error message which looks like:
|
||||
#
|
||||
# ...patch1471530032:14: trailing whitespace.
|
||||
# [[unprintable character]][[space character]]
|
||||
# error: patch failed: foo:1
|
||||
# error: foo: patch does not apply
|
||||
#
|
||||
# Previously, the error message (though discarded immediately) was being
|
||||
# decoded with the UTF-8 codec (causing a crash)
|
||||
contents = 'ú \n'
|
||||
with open('foo', 'w', encoding='latin-1') as foo_file:
|
||||
foo_file.write(contents)
|
||||
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
with staged_files_only(patch_dir):
|
||||
_test_foo_state(foo_staged)
|
||||
# Create a conflicting diff that will need to be rolled back
|
||||
with open('foo', 'w') as foo_file:
|
||||
foo_file.write('')
|
||||
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
|
||||
|
||||
|
||||
def _write(b):
|
||||
with open('foo', 'wb') as f:
|
||||
f.write(b)
|
||||
|
||||
|
||||
def assert_no_diff():
|
||||
tree = cmd_output('git', 'write-tree')[1].strip()
|
||||
cmd_output('git', 'diff-index', tree, '--exit-code')
|
||||
|
||||
|
||||
bool_product = tuple(itertools.product((True, False), repeat=2))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('crlf_before', 'crlf_after'), bool_product)
|
||||
@pytest.mark.parametrize('autocrlf', ('true', 'false', 'input'))
|
||||
def test_crlf(in_git_dir, patch_dir, crlf_before, crlf_after, autocrlf):
|
||||
cmd_output('git', 'config', '--local', 'core.autocrlf', autocrlf)
|
||||
|
||||
before, after = b'1\n2\n', b'3\n4\n\n'
|
||||
before = before.replace(b'\n', b'\r\n') if crlf_before else before
|
||||
after = after.replace(b'\n', b'\r\n') if crlf_after else after
|
||||
|
||||
_write(before)
|
||||
cmd_output('git', 'add', 'foo')
|
||||
_write(after)
|
||||
with staged_files_only(patch_dir):
|
||||
assert_no_diff()
|
||||
|
||||
|
||||
def test_whitespace_errors(in_git_dir, patch_dir):
|
||||
cmd_output('git', 'config', '--local', 'apply.whitespace', 'error')
|
||||
test_crlf(in_git_dir, patch_dir, True, True, 'true')
|
||||
|
||||
|
||||
def test_autocrlf_committed_crlf(in_git_dir, patch_dir):
|
||||
"""Regression test for #570"""
|
||||
cmd_output('git', 'config', '--local', 'core.autocrlf', 'false')
|
||||
_write(b'1\r\n2\r\n')
|
||||
cmd_output('git', 'add', 'foo')
|
||||
git_commit()
|
||||
|
||||
cmd_output('git', 'config', '--local', 'core.autocrlf', 'true')
|
||||
_write(b'1\r\n2\r\n\r\n\r\n\r\n')
|
||||
|
||||
with staged_files_only(patch_dir):
|
||||
assert_no_diff()
|
||||
|
||||
|
||||
def test_intent_to_add(in_git_dir, patch_dir):
|
||||
"""Regression test for #881"""
|
||||
_write(b'hello\nworld\n')
|
||||
cmd_output('git', 'add', '--intent-to-add', 'foo')
|
||||
|
||||
assert git.intent_to_add_files() == ['foo']
|
||||
with staged_files_only(patch_dir):
|
||||
assert_no_diff()
|
||||
assert git.intent_to_add_files() == ['foo']
|
216
tests/store_test.py
Normal file
216
tests/store_test.py
Normal file
|
@ -0,0 +1,216 @@
|
|||
import os.path
|
||||
import sqlite3
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit.store import _get_default_directory
|
||||
from pre_commit.store import Store
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import cmd_output
|
||||
from testing.fixtures import git_dir
|
||||
from testing.util import cwd
|
||||
from testing.util import git_commit
|
||||
|
||||
|
||||
def test_our_session_fixture_works():
|
||||
"""There's a session fixture which makes `Store` invariantly raise to
|
||||
prevent writing to the home directory.
|
||||
"""
|
||||
with pytest.raises(AssertionError):
|
||||
Store()
|
||||
|
||||
|
||||
def test_get_default_directory_defaults_to_home():
|
||||
# Not we use the module level one which is not mocked
|
||||
ret = _get_default_directory()
|
||||
assert ret == os.path.join(os.path.expanduser('~/.cache'), 'pre-commit')
|
||||
|
||||
|
||||
def test_adheres_to_xdg_specification():
|
||||
with mock.patch.dict(
|
||||
os.environ, {'XDG_CACHE_HOME': '/tmp/fakehome'},
|
||||
):
|
||||
ret = _get_default_directory()
|
||||
assert ret == os.path.join('/tmp/fakehome', 'pre-commit')
|
||||
|
||||
|
||||
def test_uses_environment_variable_when_present():
|
||||
with mock.patch.dict(
|
||||
os.environ, {'PRE_COMMIT_HOME': '/tmp/pre_commit_home'},
|
||||
):
|
||||
ret = _get_default_directory()
|
||||
assert ret == '/tmp/pre_commit_home'
|
||||
|
||||
|
||||
def test_store_init(store):
|
||||
# Should create the store directory
|
||||
assert os.path.exists(store.directory)
|
||||
# Should create a README file indicating what the directory is about
|
||||
with open(os.path.join(store.directory, 'README')) as readme_file:
|
||||
readme_contents = readme_file.read()
|
||||
for text_line in (
|
||||
'This directory is maintained by the pre-commit project.',
|
||||
'Learn more: https://github.com/pre-commit/pre-commit',
|
||||
):
|
||||
assert text_line in readme_contents
|
||||
|
||||
|
||||
def test_clone(store, tempdir_factory, log_info_mock):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
git_commit()
|
||||
rev = git.head_rev(path)
|
||||
git_commit()
|
||||
|
||||
ret = store.clone(path, rev)
|
||||
# Should have printed some stuff
|
||||
assert log_info_mock.call_args_list[0][0][0].startswith(
|
||||
'Initializing environment for ',
|
||||
)
|
||||
|
||||
# Should return a directory inside of the store
|
||||
assert os.path.exists(ret)
|
||||
assert ret.startswith(store.directory)
|
||||
# Directory should start with `repo`
|
||||
_, dirname = os.path.split(ret)
|
||||
assert dirname.startswith('repo')
|
||||
# Should be checked out to the rev we specified
|
||||
assert git.head_rev(ret) == rev
|
||||
|
||||
# Assert there's an entry in the sqlite db for this
|
||||
assert store.select_all_repos() == [(path, rev, ret)]
|
||||
|
||||
|
||||
def test_clone_cleans_up_on_checkout_failure(store):
|
||||
with pytest.raises(Exception) as excinfo:
|
||||
# This raises an exception because you can't clone something that
|
||||
# doesn't exist!
|
||||
store.clone('/i_dont_exist_lol', 'fake_rev')
|
||||
assert '/i_dont_exist_lol' in str(excinfo.value)
|
||||
|
||||
repo_dirs = [
|
||||
d for d in os.listdir(store.directory) if d.startswith('repo')
|
||||
]
|
||||
assert repo_dirs == []
|
||||
|
||||
|
||||
def test_clone_when_repo_already_exists(store):
|
||||
# Create an entry in the sqlite db that makes it look like the repo has
|
||||
# been cloned.
|
||||
with sqlite3.connect(store.db_path) as db:
|
||||
db.execute(
|
||||
'INSERT INTO repos (repo, ref, path) '
|
||||
'VALUES ("fake_repo", "fake_ref", "fake_path")',
|
||||
)
|
||||
|
||||
assert store.clone('fake_repo', 'fake_ref') == 'fake_path'
|
||||
|
||||
|
||||
def test_clone_shallow_failure_fallback_to_complete(
|
||||
store, tempdir_factory,
|
||||
log_info_mock,
|
||||
):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
git_commit()
|
||||
rev = git.head_rev(path)
|
||||
git_commit()
|
||||
|
||||
# Force shallow clone failure
|
||||
def fake_shallow_clone(self, *args, **kwargs):
|
||||
raise CalledProcessError(1, (), 0, b'', None)
|
||||
store._shallow_clone = fake_shallow_clone
|
||||
|
||||
ret = store.clone(path, rev)
|
||||
|
||||
# Should have printed some stuff
|
||||
assert log_info_mock.call_args_list[0][0][0].startswith(
|
||||
'Initializing environment for ',
|
||||
)
|
||||
|
||||
# Should return a directory inside of the store
|
||||
assert os.path.exists(ret)
|
||||
assert ret.startswith(store.directory)
|
||||
# Directory should start with `repo`
|
||||
_, dirname = os.path.split(ret)
|
||||
assert dirname.startswith('repo')
|
||||
# Should be checked out to the rev we specified
|
||||
assert git.head_rev(ret) == rev
|
||||
|
||||
# Assert there's an entry in the sqlite db for this
|
||||
assert store.select_all_repos() == [(path, rev, ret)]
|
||||
|
||||
|
||||
def test_clone_tag_not_on_mainline(store, tempdir_factory):
|
||||
path = git_dir(tempdir_factory)
|
||||
with cwd(path):
|
||||
git_commit()
|
||||
cmd_output('git', 'checkout', 'master', '-b', 'branch')
|
||||
git_commit()
|
||||
cmd_output('git', 'tag', 'v1')
|
||||
cmd_output('git', 'checkout', 'master')
|
||||
cmd_output('git', 'branch', '-D', 'branch')
|
||||
|
||||
# previously crashed on unreachable refs
|
||||
store.clone(path, 'v1')
|
||||
|
||||
|
||||
def test_create_when_directory_exists_but_not_db(store):
|
||||
# In versions <= 0.3.5, there was no sqlite db causing a need for
|
||||
# backward compatibility
|
||||
os.remove(store.db_path)
|
||||
store = Store(store.directory)
|
||||
assert os.path.exists(store.db_path)
|
||||
|
||||
|
||||
def test_create_when_store_already_exists(store):
|
||||
# an assertion that this is idempotent and does not crash
|
||||
Store(store.directory)
|
||||
|
||||
|
||||
def test_db_repo_name(store):
|
||||
assert store.db_repo_name('repo', ()) == 'repo'
|
||||
assert store.db_repo_name('repo', ('b', 'a', 'c')) == 'repo:a,b,c'
|
||||
|
||||
|
||||
def test_local_resources_reflects_reality():
|
||||
on_disk = {
|
||||
res[len('empty_template_'):]
|
||||
for res in os.listdir('pre_commit/resources')
|
||||
if res.startswith('empty_template_')
|
||||
}
|
||||
assert on_disk == set(Store.LOCAL_RESOURCES)
|
||||
|
||||
|
||||
def test_mark_config_as_used(store, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
f = tmpdir.join('f').ensure()
|
||||
store.mark_config_used('f')
|
||||
assert store.select_all_configs() == [f.strpath]
|
||||
|
||||
|
||||
def test_mark_config_as_used_idempotent(store, tmpdir):
|
||||
test_mark_config_as_used(store, tmpdir)
|
||||
test_mark_config_as_used(store, tmpdir)
|
||||
|
||||
|
||||
def test_mark_config_as_used_does_not_exist(store):
|
||||
store.mark_config_used('f')
|
||||
assert store.select_all_configs() == []
|
||||
|
||||
|
||||
def _simulate_pre_1_14_0(store):
|
||||
with store.connect() as db:
|
||||
db.executescript('DROP TABLE configs')
|
||||
|
||||
|
||||
def test_select_all_configs_roll_forward(store):
|
||||
_simulate_pre_1_14_0(store)
|
||||
assert store.select_all_configs() == []
|
||||
|
||||
|
||||
def test_mark_config_as_used_roll_forward(store, tmpdir):
|
||||
_simulate_pre_1_14_0(store)
|
||||
test_mark_config_as_used(store, tmpdir)
|
122
tests/util_test.py
Normal file
122
tests/util_test.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit.util import CalledProcessError
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import cmd_output
|
||||
from pre_commit.util import cmd_output_b
|
||||
from pre_commit.util import cmd_output_p
|
||||
from pre_commit.util import make_executable
|
||||
from pre_commit.util import parse_version
|
||||
from pre_commit.util import rmtree
|
||||
from pre_commit.util import tmpdir
|
||||
|
||||
|
||||
def test_CalledProcessError_str():
|
||||
error = CalledProcessError(1, ('exe',), 0, b'output', b'errors')
|
||||
assert str(error) == (
|
||||
"command: ('exe',)\n"
|
||||
'return code: 1\n'
|
||||
'expected return code: 0\n'
|
||||
'stdout:\n'
|
||||
' output\n'
|
||||
'stderr:\n'
|
||||
' errors'
|
||||
)
|
||||
|
||||
|
||||
def test_CalledProcessError_str_nooutput():
|
||||
error = CalledProcessError(1, ('exe',), 0, b'', b'')
|
||||
assert str(error) == (
|
||||
"command: ('exe',)\n"
|
||||
'return code: 1\n'
|
||||
'expected return code: 0\n'
|
||||
'stdout: (none)\n'
|
||||
'stderr: (none)'
|
||||
)
|
||||
|
||||
|
||||
def test_clean_on_failure_noop(in_tmpdir):
|
||||
with clean_path_on_failure('foo'):
|
||||
pass
|
||||
|
||||
|
||||
def test_clean_path_on_failure_does_nothing_when_not_raising(in_tmpdir):
|
||||
with clean_path_on_failure('foo'):
|
||||
os.mkdir('foo')
|
||||
assert os.path.exists('foo')
|
||||
|
||||
|
||||
def test_clean_path_on_failure_cleans_for_normal_exception(in_tmpdir):
|
||||
class MyException(Exception):
|
||||
pass
|
||||
|
||||
with pytest.raises(MyException):
|
||||
with clean_path_on_failure('foo'):
|
||||
os.mkdir('foo')
|
||||
raise MyException
|
||||
|
||||
assert not os.path.exists('foo')
|
||||
|
||||
|
||||
def test_clean_path_on_failure_cleans_for_system_exit(in_tmpdir):
|
||||
class MySystemExit(SystemExit):
|
||||
pass
|
||||
|
||||
with pytest.raises(MySystemExit):
|
||||
with clean_path_on_failure('foo'):
|
||||
os.mkdir('foo')
|
||||
raise MySystemExit
|
||||
|
||||
assert not os.path.exists('foo')
|
||||
|
||||
|
||||
def test_tmpdir():
|
||||
with tmpdir() as tempdir:
|
||||
assert os.path.exists(tempdir)
|
||||
assert not os.path.exists(tempdir)
|
||||
|
||||
|
||||
def test_cmd_output_exe_not_found():
|
||||
ret, out, _ = cmd_output('dne', retcode=None)
|
||||
assert ret == 1
|
||||
assert out == 'Executable `dne` not found'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fn', (cmd_output_b, cmd_output_p))
|
||||
def test_cmd_output_exe_not_found_bytes(fn):
|
||||
ret, out, _ = fn('dne', retcode=None, stderr=subprocess.STDOUT)
|
||||
assert ret == 1
|
||||
assert out == b'Executable `dne` not found'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fn', (cmd_output_b, cmd_output_p))
|
||||
def test_cmd_output_no_shebang(tmpdir, fn):
|
||||
f = tmpdir.join('f').ensure()
|
||||
make_executable(f)
|
||||
|
||||
# previously this raised `OSError` -- the output is platform specific
|
||||
ret, out, _ = fn(str(f), retcode=None, stderr=subprocess.STDOUT)
|
||||
assert ret == 1
|
||||
assert isinstance(out, bytes)
|
||||
assert out.endswith(b'\n')
|
||||
|
||||
|
||||
def test_parse_version():
|
||||
assert parse_version('0.0') == parse_version('0.0')
|
||||
assert parse_version('0.1') > parse_version('0.0')
|
||||
assert parse_version('2.1') >= parse_version('2')
|
||||
|
||||
|
||||
def test_rmtree_read_only_directories(tmpdir):
|
||||
"""Simulates the go module tree. See #1042"""
|
||||
tmpdir.join('x/y/z').ensure_dir().join('a').ensure()
|
||||
mode = os.stat(str(tmpdir.join('x'))).st_mode
|
||||
mode_no_w = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
|
||||
tmpdir.join('x/y/z').chmod(mode_no_w)
|
||||
tmpdir.join('x/y/z').chmod(mode_no_w)
|
||||
tmpdir.join('x/y/z').chmod(mode_no_w)
|
||||
rmtree(str(tmpdir.join('x')))
|
197
tests/xargs_test.py
Normal file
197
tests/xargs_test.py
Normal file
|
@ -0,0 +1,197 @@
|
|||
import concurrent.futures
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Tuple
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from pre_commit import parse_shebang
|
||||
from pre_commit import xargs
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('env', 'expected'),
|
||||
(
|
||||
({}, 0),
|
||||
({b'x': b'1'}, 12),
|
||||
({b'x': b'12'}, 13),
|
||||
({b'x': b'1', b'y': b'2'}, 24),
|
||||
),
|
||||
)
|
||||
def test_environ_size(env, expected):
|
||||
# normalize integer sizing
|
||||
assert xargs._environ_size(_env=env) == expected
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def win32_mock():
|
||||
with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
|
||||
with mock.patch.object(sys, 'platform', 'win32'):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def linux_mock():
|
||||
with mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8'):
|
||||
with mock.patch.object(sys, 'platform', 'linux'):
|
||||
yield
|
||||
|
||||
|
||||
def test_partition_trivial():
|
||||
assert xargs.partition(('cmd',), (), 1) == (('cmd',),)
|
||||
|
||||
|
||||
def test_partition_simple():
|
||||
assert xargs.partition(('cmd',), ('foo',), 1) == (('cmd', 'foo'),)
|
||||
|
||||
|
||||
def test_partition_limits():
|
||||
ret = xargs.partition(
|
||||
('ninechars',), (
|
||||
# Just match the end (with spaces)
|
||||
'.' * 5, '.' * 4,
|
||||
# Just match the end (single arg)
|
||||
'.' * 10,
|
||||
# Goes over the end
|
||||
'.' * 5,
|
||||
'.' * 6,
|
||||
),
|
||||
1,
|
||||
_max_length=21,
|
||||
)
|
||||
assert ret == (
|
||||
('ninechars', '.' * 5, '.' * 4),
|
||||
('ninechars', '.' * 10),
|
||||
('ninechars', '.' * 5),
|
||||
('ninechars', '.' * 6),
|
||||
)
|
||||
|
||||
|
||||
def test_partition_limit_win32(win32_mock):
|
||||
cmd = ('ninechars',)
|
||||
# counted as half because of utf-16 encode
|
||||
varargs = ('😑' * 5,)
|
||||
ret = xargs.partition(cmd, varargs, 1, _max_length=21)
|
||||
assert ret == (cmd + varargs,)
|
||||
|
||||
|
||||
def test_partition_limit_linux(linux_mock):
|
||||
cmd = ('ninechars',)
|
||||
varargs = ('😑' * 5,)
|
||||
ret = xargs.partition(cmd, varargs, 1, _max_length=31)
|
||||
assert ret == (cmd + varargs,)
|
||||
|
||||
|
||||
def test_argument_too_long_with_large_unicode(linux_mock):
|
||||
cmd = ('ninechars',)
|
||||
varargs = ('😑' * 10,) # 4 bytes * 10
|
||||
with pytest.raises(xargs.ArgumentTooLongError):
|
||||
xargs.partition(cmd, varargs, 1, _max_length=20)
|
||||
|
||||
|
||||
def test_partition_target_concurrency():
|
||||
ret = xargs.partition(
|
||||
('foo',), ('A',) * 22,
|
||||
4,
|
||||
_max_length=50,
|
||||
)
|
||||
assert ret == (
|
||||
('foo',) + ('A',) * 6,
|
||||
('foo',) + ('A',) * 6,
|
||||
('foo',) + ('A',) * 6,
|
||||
('foo',) + ('A',) * 4,
|
||||
)
|
||||
|
||||
|
||||
def test_partition_target_concurrency_wont_make_tiny_partitions():
|
||||
ret = xargs.partition(
|
||||
('foo',), ('A',) * 10,
|
||||
4,
|
||||
_max_length=50,
|
||||
)
|
||||
assert ret == (
|
||||
('foo',) + ('A',) * 4,
|
||||
('foo',) + ('A',) * 4,
|
||||
('foo',) + ('A',) * 2,
|
||||
)
|
||||
|
||||
|
||||
def test_argument_too_long():
|
||||
with pytest.raises(xargs.ArgumentTooLongError):
|
||||
xargs.partition(('a' * 5,), ('a' * 5,), 1, _max_length=10)
|
||||
|
||||
|
||||
def test_xargs_smoke():
|
||||
ret, out = xargs.xargs(('echo',), ('hello', 'world'))
|
||||
assert ret == 0
|
||||
assert out.replace(b'\r\n', b'\n') == b'hello world\n'
|
||||
|
||||
|
||||
exit_cmd = parse_shebang.normalize_cmd(('bash', '-c', 'exit $1', '--'))
|
||||
# Abuse max_length to control the exit code
|
||||
max_length = len(' '.join(exit_cmd)) + 3
|
||||
|
||||
|
||||
def test_xargs_retcode_normal():
|
||||
ret, _ = xargs.xargs(exit_cmd, ('0',), _max_length=max_length)
|
||||
assert ret == 0
|
||||
|
||||
ret, _ = xargs.xargs(exit_cmd, ('0', '1'), _max_length=max_length)
|
||||
assert ret == 1
|
||||
|
||||
# takes the maximum return code
|
||||
ret, _ = xargs.xargs(exit_cmd, ('0', '5', '1'), _max_length=max_length)
|
||||
assert ret == 5
|
||||
|
||||
|
||||
def test_xargs_concurrency():
|
||||
bash_cmd = parse_shebang.normalize_cmd(('bash', '-c'))
|
||||
print_pid = ('sleep 0.5 && echo $$',)
|
||||
|
||||
start = time.time()
|
||||
ret, stdout = xargs.xargs(
|
||||
bash_cmd, print_pid * 5,
|
||||
target_concurrency=5,
|
||||
_max_length=len(' '.join(bash_cmd + print_pid)) + 1,
|
||||
)
|
||||
elapsed = time.time() - start
|
||||
assert ret == 0
|
||||
pids = stdout.splitlines()
|
||||
assert len(pids) == 5
|
||||
# It would take 0.5*5=2.5 seconds ot run all of these in serial, so if it
|
||||
# takes less, they must have run concurrently.
|
||||
assert elapsed < 2.5
|
||||
|
||||
|
||||
def test_thread_mapper_concurrency_uses_threadpoolexecutor_map():
|
||||
with xargs._thread_mapper(10) as thread_map:
|
||||
_self = thread_map.__self__ # type: ignore
|
||||
assert isinstance(_self, concurrent.futures.ThreadPoolExecutor)
|
||||
|
||||
|
||||
def test_thread_mapper_concurrency_uses_regular_map():
|
||||
with xargs._thread_mapper(1) as thread_map:
|
||||
assert thread_map is map
|
||||
|
||||
|
||||
def test_xargs_propagate_kwargs_to_cmd():
|
||||
env = {'PRE_COMMIT_TEST_VAR': 'Pre commit is awesome'}
|
||||
cmd: Tuple[str, ...] = ('bash', '-c', 'echo $PRE_COMMIT_TEST_VAR', '--')
|
||||
cmd = parse_shebang.normalize_cmd(cmd)
|
||||
|
||||
ret, stdout = xargs.xargs(cmd, ('1',), env=env)
|
||||
assert ret == 0
|
||||
assert b'Pre commit is awesome' in stdout
|
||||
|
||||
|
||||
@pytest.mark.xfail(os.name == 'nt', reason='posix only')
|
||||
def test_xargs_color_true_makes_tty():
|
||||
retcode, out = xargs.xargs(
|
||||
(sys.executable, '-c', 'import sys; print(sys.stdout.isatty())'),
|
||||
('1',),
|
||||
color=True,
|
||||
)
|
||||
assert retcode == 0
|
||||
assert out == b'True\n'
|
Loading…
Add table
Add a link
Reference in a new issue