Adding upstream version 2.9.3.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
e8d3ba475e
commit
5f54aad01b
32 changed files with 252 additions and 75 deletions
2
.github/FUNDING.yml
vendored
Normal file
2
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
github: asottile
|
||||
open_collective: pre-commit
|
|
@ -21,16 +21,16 @@ repos:
|
|||
hooks:
|
||||
- id: autopep8
|
||||
- repo: https://github.com/pre-commit/pre-commit
|
||||
rev: v2.7.1
|
||||
rev: v2.9.3
|
||||
hooks:
|
||||
- id: validate_manifest
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.7.3
|
||||
rev: v2.7.4
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/asottile/reorder_python_imports
|
||||
rev: v2.3.5
|
||||
rev: v2.3.6
|
||||
hooks:
|
||||
- id: reorder-python-imports
|
||||
args: [--py3-plus]
|
||||
|
|
62
CHANGELOG.md
62
CHANGELOG.md
|
@ -1,3 +1,65 @@
|
|||
2.9.3 - 2020-12-07
|
||||
==================
|
||||
|
||||
### Fixes
|
||||
- Fix crash on cygwin mismatch check outside of a git directory
|
||||
- #1721 PR by @asottile.
|
||||
- #1720 issue by @chronoB.
|
||||
- Fix cleanup code on docker volumes for go
|
||||
- #1725 PR by @fsouza.
|
||||
- Fix working directory detection on SUBST drives on windows
|
||||
- #1727 PR by mrogaski.
|
||||
- #1610 issue by @jcameron73.
|
||||
|
||||
2.9.2 - 2020-11-25
|
||||
==================
|
||||
|
||||
### Fixes
|
||||
- Fix default value for `types_or` so `symlink` and `directory` can be matched
|
||||
- #1716 PR by @asottile.
|
||||
- #1718 issue by @CodeBleu.
|
||||
|
||||
2.9.1 - 2020-11-25
|
||||
==================
|
||||
|
||||
### Fixes
|
||||
- Improve error message for "hook goes missing"
|
||||
- #1709 PR by @paulhfischer.
|
||||
- #1708 issue by @theod07.
|
||||
- Add warning for `/*` in `files` / `exclude` regexes
|
||||
- #1707 PR by @paulhfischer.
|
||||
- #1702 issue by @asottile.
|
||||
- Fix `healthy()` check for `language: python` on windows when the base
|
||||
executable has non-ascii characters.
|
||||
- #1713 PR by @asottile.
|
||||
- #1711 issue by @Najiva.
|
||||
|
||||
2.9.0 - 2020-11-21
|
||||
==================
|
||||
|
||||
### Features
|
||||
- Add `types_or` which allows matching multiple disparate `types` in a hook
|
||||
- #1677 by @MarcoGorelli.
|
||||
- #607 by @asottile.
|
||||
- Add Github Sponsors / Open Collective links
|
||||
- https://github.com/sponsors/asottile
|
||||
- https://opencollective.com/pre-commit
|
||||
|
||||
### Fixes
|
||||
- Improve cleanup for `language: dotnet`
|
||||
- #1678 by @rkm.
|
||||
- Fix "xargs" when running windows batch files
|
||||
- #1686 PR by @asottile.
|
||||
- #1604 issue by @apietrzak.
|
||||
- #1604 issue by @ufwtlsb.
|
||||
- Fix conflict with external `rbenv` and `language_version: default`
|
||||
- #1700 PR by @asottile.
|
||||
- #1699 issue by @abuxton.
|
||||
- Improve performance of `git status` / `git diff` commands by ignoring
|
||||
submodules
|
||||
- #1704 PR by @Vynce.
|
||||
- #1701 issue by @Vynce.
|
||||
|
||||
2.8.2 - 2020-10-30
|
||||
==================
|
||||
|
||||
|
|
|
@ -61,6 +61,7 @@ MANIFEST_HOOK_DICT = cfgv.Map(
|
|||
cfgv.Optional('files', check_string_regex, ''),
|
||||
cfgv.Optional('exclude', check_string_regex, '^$'),
|
||||
cfgv.Optional('types', cfgv.check_array(check_type_tag), ['file']),
|
||||
cfgv.Optional('types_or', cfgv.check_array(check_type_tag), []),
|
||||
cfgv.Optional('exclude_types', cfgv.check_array(check_type_tag), []),
|
||||
|
||||
cfgv.Optional(
|
||||
|
@ -111,6 +112,18 @@ LOCAL = 'local'
|
|||
META = 'meta'
|
||||
|
||||
|
||||
class OptionalSensibleRegex(cfgv.OptionalNoDefault):
|
||||
def check(self, dct: Dict[str, Any]) -> None:
|
||||
super().check(dct)
|
||||
|
||||
if '/*' in dct.get(self.key, ''):
|
||||
logger.warning(
|
||||
f'The {self.key!r} field in hook {dct.get("id")!r} is a '
|
||||
f"regex, not a glob -- matching '/*' probably isn't what you "
|
||||
f'want here',
|
||||
)
|
||||
|
||||
|
||||
class MigrateShaToRev:
|
||||
key = 'rev'
|
||||
|
||||
|
@ -226,6 +239,8 @@ CONFIG_HOOK_DICT = cfgv.Map(
|
|||
for item in MANIFEST_HOOK_DICT.items
|
||||
if item.key != 'id'
|
||||
),
|
||||
OptionalSensibleRegex('files', cfgv.check_string),
|
||||
OptionalSensibleRegex('exclude', cfgv.check_string),
|
||||
)
|
||||
CONFIG_REPO_DICT = cfgv.Map(
|
||||
'Repository', 'repo',
|
||||
|
|
|
@ -79,14 +79,12 @@ def _check_hooks_still_exist_at_rev(
|
|||
hooks_missing = hooks - {hook['id'] for hook in manifest}
|
||||
if hooks_missing:
|
||||
raise RepositoryCannotBeUpdatedError(
|
||||
f'Cannot update because the tip of HEAD is missing these hooks:\n'
|
||||
f'{", ".join(sorted(hooks_missing))}',
|
||||
f'Cannot update because the update target is missing these '
|
||||
f'hooks:\n{", ".join(sorted(hooks_missing))}',
|
||||
)
|
||||
|
||||
|
||||
REV_LINE_RE = re.compile(
|
||||
r'^(\s+)rev:(\s*)([\'"]?)([^\s#]+)(.*)(\r?\n)$', re.DOTALL,
|
||||
)
|
||||
REV_LINE_RE = re.compile(r'^(\s+)rev:(\s*)([\'"]?)([^\s#]+)(.*)(\r?\n)$')
|
||||
|
||||
|
||||
def _original_lines(
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import re
|
||||
import textwrap
|
||||
|
||||
import yaml
|
||||
|
||||
|
@ -6,27 +7,22 @@ from pre_commit.clientlib import load_config
|
|||
from pre_commit.util import yaml_load
|
||||
|
||||
|
||||
def _indent(s: str) -> str:
|
||||
lines = s.splitlines(True)
|
||||
return ''.join(' ' * 4 + line if line.strip() else line for line in lines)
|
||||
|
||||
|
||||
def _is_header_line(line: str) -> bool:
|
||||
return line.startswith(('#', '---')) or not line.strip()
|
||||
|
||||
|
||||
def _migrate_map(contents: str) -> str:
|
||||
# Find the first non-header line
|
||||
lines = contents.splitlines(True)
|
||||
i = 0
|
||||
# Only loop on non empty configuration file
|
||||
while i < len(lines) and _is_header_line(lines[i]):
|
||||
i += 1
|
||||
|
||||
header = ''.join(lines[:i])
|
||||
rest = ''.join(lines[i:])
|
||||
|
||||
if isinstance(yaml_load(contents), list):
|
||||
# Find the first non-header line
|
||||
lines = contents.splitlines(True)
|
||||
i = 0
|
||||
# Only loop on non empty configuration file
|
||||
while i < len(lines) and _is_header_line(lines[i]):
|
||||
i += 1
|
||||
|
||||
header = ''.join(lines[:i])
|
||||
rest = ''.join(lines[i:])
|
||||
|
||||
# If they are using the "default" flow style of yaml, this operation
|
||||
# will yield a valid configuration
|
||||
try:
|
||||
|
@ -34,7 +30,7 @@ def _migrate_map(contents: str) -> str:
|
|||
yaml_load(trial_contents)
|
||||
contents = trial_contents
|
||||
except yaml.YAMLError:
|
||||
contents = f'{header}repos:\n{_indent(rest)}'
|
||||
contents = f'{header}repos:\n{textwrap.indent(rest, " " * 4)}'
|
||||
|
||||
return contents
|
||||
|
||||
|
|
|
@ -83,20 +83,32 @@ class Classifier:
|
|||
self,
|
||||
names: Sequence[str],
|
||||
types: Collection[str],
|
||||
types_or: Collection[str],
|
||||
exclude_types: Collection[str],
|
||||
) -> List[str]:
|
||||
types, exclude_types = frozenset(types), frozenset(exclude_types)
|
||||
types = frozenset(types)
|
||||
types_or = frozenset(types_or)
|
||||
exclude_types = frozenset(exclude_types)
|
||||
ret = []
|
||||
for filename in names:
|
||||
tags = self._types_for_file(filename)
|
||||
if tags >= types and not tags & exclude_types:
|
||||
if (
|
||||
tags >= types and
|
||||
(not types_or or tags & types_or) and
|
||||
not tags & exclude_types
|
||||
):
|
||||
ret.append(filename)
|
||||
return ret
|
||||
|
||||
def filenames_for_hook(self, hook: Hook) -> Tuple[str, ...]:
|
||||
names = self.filenames
|
||||
names = filter_by_include_exclude(names, hook.files, hook.exclude)
|
||||
names = self.by_types(names, hook.types, hook.exclude_types)
|
||||
names = self.by_types(
|
||||
names,
|
||||
hook.types,
|
||||
hook.types_or,
|
||||
hook.exclude_types,
|
||||
)
|
||||
return tuple(names)
|
||||
|
||||
@classmethod
|
||||
|
@ -250,7 +262,9 @@ def _all_filenames(args: argparse.Namespace) -> Collection[str]:
|
|||
|
||||
|
||||
def _get_diff() -> bytes:
|
||||
_, out, _ = cmd_output_b('git', 'diff', '--no-ext-diff', retcode=None)
|
||||
_, out, _ = cmd_output_b(
|
||||
'git', 'diff', '--no-ext-diff', '--ignore-submodules', retcode=None,
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
|
|
|
@ -8,11 +8,7 @@ from typing import Optional
|
|||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
|
||||
class _Unset(enum.Enum):
|
||||
UNSET = 1
|
||||
|
||||
|
||||
_Unset = enum.Enum('_Unset', 'UNSET')
|
||||
UNSET = _Unset.UNSET
|
||||
|
||||
|
||||
|
|
|
@ -47,21 +47,26 @@ def no_git_env(
|
|||
|
||||
|
||||
def get_root() -> str:
|
||||
# Git 2.25 introduced a change to "rev-parse --show-toplevel" that exposed
|
||||
# underlying volumes for Windows drives mapped with SUBST. We use
|
||||
# "rev-parse --show-cdup" to get the appropriate path, but must perform
|
||||
# an extra check to see if we are in the .git directory.
|
||||
try:
|
||||
root = cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()
|
||||
root = os.path.realpath(
|
||||
cmd_output('git', 'rev-parse', '--show-cdup')[1].strip(),
|
||||
)
|
||||
git_dir = os.path.realpath(get_git_dir())
|
||||
except CalledProcessError:
|
||||
raise FatalError(
|
||||
'git failed. Is it installed, and are you in a Git repository '
|
||||
'directory?',
|
||||
)
|
||||
else:
|
||||
if root == '': # pragma: no cover (old git)
|
||||
raise FatalError(
|
||||
'git toplevel unexpectedly empty! make sure you are not '
|
||||
'inside the `.git` directory of your repository.',
|
||||
)
|
||||
else:
|
||||
return root
|
||||
if os.path.commonpath((root, git_dir)) == git_dir:
|
||||
raise FatalError(
|
||||
'git toplevel unexpectedly empty! make sure you are not '
|
||||
'inside the `.git` directory of your repository.',
|
||||
)
|
||||
return root
|
||||
|
||||
|
||||
def get_git_dir(git_root: str = '.') -> str:
|
||||
|
@ -130,7 +135,9 @@ def get_staged_files(cwd: Optional[str] = None) -> List[str]:
|
|||
|
||||
|
||||
def intent_to_add_files() -> List[str]:
|
||||
_, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z')
|
||||
_, stdout, _ = cmd_output(
|
||||
'git', 'status', '--ignore-submodules', '--porcelain', '-z',
|
||||
)
|
||||
parts = list(reversed(zsplit(stdout)))
|
||||
intent_to_add = []
|
||||
while parts:
|
||||
|
@ -199,7 +206,10 @@ def check_for_cygwin_mismatch() -> None:
|
|||
"""See https://github.com/pre-commit/pre-commit/issues/354"""
|
||||
if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows)
|
||||
is_cygwin_python = sys.platform == 'cygwin'
|
||||
toplevel = get_root()
|
||||
try:
|
||||
toplevel = get_root()
|
||||
except FatalError: # skip the check if we're not in a git repo
|
||||
return
|
||||
is_cygwin_git = toplevel.startswith('/')
|
||||
|
||||
if is_cygwin_python ^ is_cygwin_git:
|
||||
|
|
|
@ -22,6 +22,7 @@ class Hook(NamedTuple):
|
|||
files: str
|
||||
exclude: str
|
||||
types: Sequence[str]
|
||||
types_or: Sequence[str]
|
||||
exclude_types: Sequence[str]
|
||||
additional_dependencies: Sequence[str]
|
||||
args: Sequence[str]
|
||||
|
|
|
@ -12,7 +12,6 @@ from pre_commit.hook import Hook
|
|||
from pre_commit.languages import helpers
|
||||
from pre_commit.prefix import Prefix
|
||||
from pre_commit.util import clean_path_on_failure
|
||||
from pre_commit.util import rmtree
|
||||
|
||||
ENVIRONMENT_DIR = 'dotnetenv'
|
||||
BIN_DIR = 'bin'
|
||||
|
@ -76,9 +75,9 @@ def install_environment(
|
|||
),
|
||||
)
|
||||
|
||||
# Cleanup build output
|
||||
for d in ('bin', 'obj', build_dir):
|
||||
rmtree(prefix.path(d))
|
||||
# Clean the git dir, ignoring the environment dir
|
||||
clean_cmd = ('git', 'clean', '-ffxd', '-e', f'{ENVIRONMENT_DIR}-*')
|
||||
helpers.run_setup_cmd(prefix, clean_cmd)
|
||||
|
||||
|
||||
def run_hook(
|
||||
|
|
|
@ -36,7 +36,7 @@ def _version_info(exe: str) -> str:
|
|||
|
||||
def _read_pyvenv_cfg(filename: str) -> Dict[str, str]:
|
||||
ret = {}
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding='UTF-8') as f:
|
||||
for line in f:
|
||||
try:
|
||||
k, v = line.split('=')
|
||||
|
|
|
@ -52,7 +52,6 @@ def get_env_patch(
|
|||
else: # pragma: win32 no cover
|
||||
patches += (
|
||||
('RBENV_ROOT', venv),
|
||||
('RBENV_VERSION', language_version),
|
||||
(
|
||||
'PATH', (
|
||||
os.path.join(venv, 'gems', 'bin'), os.pathsep,
|
||||
|
@ -61,6 +60,9 @@ def get_env_patch(
|
|||
),
|
||||
),
|
||||
)
|
||||
if language_version not in {'system', 'default'}: # pragma: win32 no cover
|
||||
patches += (('RBENV_VERSION', language_version),)
|
||||
|
||||
return patches
|
||||
|
||||
|
||||
|
|
|
@ -47,8 +47,10 @@ def check_useless_excludes(config_file: str) -> int:
|
|||
# the defaults applied during runtime
|
||||
hook = apply_defaults(hook, MANIFEST_HOOK_DICT)
|
||||
names = classifier.filenames
|
||||
types, exclude_types = hook['types'], hook['exclude_types']
|
||||
names = classifier.by_types(names, types, exclude_types)
|
||||
types = hook['types']
|
||||
types_or = hook['types_or']
|
||||
exclude_types = hook['exclude_types']
|
||||
names = classifier.by_types(names, types, types_or, exclude_types)
|
||||
include, exclude = hook['files'], hook['exclude']
|
||||
if not exclude_matches_any(names, include, exclude):
|
||||
print(
|
||||
|
|
|
@ -255,7 +255,7 @@ def rmtree(path: str) -> None:
|
|||
excvalue = exc[1]
|
||||
if (
|
||||
func in (os.rmdir, os.remove, os.unlink) and
|
||||
excvalue.errno == errno.EACCES
|
||||
excvalue.errno in {errno.EACCES, errno.EPERM}
|
||||
):
|
||||
for p in (path, os.path.dirname(path)):
|
||||
os.chmod(p, os.stat(p).st_mode | stat.S_IWUSR)
|
||||
|
|
|
@ -137,6 +137,16 @@ def xargs(
|
|||
except parse_shebang.ExecutableNotFoundError as e:
|
||||
return e.to_output()[:2]
|
||||
|
||||
# on windows, batch files have a separate length limit than windows itself
|
||||
if (
|
||||
sys.platform == 'win32' and
|
||||
cmd[0].lower().endswith(('.bat', '.cmd'))
|
||||
): # pragma: win32 cover
|
||||
# this is implementation details but the command gets translated into
|
||||
# full/path/to/cmd.exe /c *cmd
|
||||
cmd_exe = parse_shebang.find_executable('cmd.exe')
|
||||
_max_length = 8192 - len(cmd_exe) - len(' /c ')
|
||||
|
||||
partitions = partition(cmd, varargs, target_concurrency, _max_length)
|
||||
|
||||
def run_cmd_partition(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[metadata]
|
||||
name = pre_commit
|
||||
version = 2.8.2
|
||||
version = 2.9.3
|
||||
description = A framework for managing and maintaining multi-language pre-commit hooks.
|
||||
long_description = file: README.md
|
||||
long_description_content_type = text/markdown
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
echo $@
|
||||
echo "$@"
|
||||
exit 1
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
echo 'Fail'
|
||||
echo $@
|
||||
echo "$@"
|
||||
exit 1
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
echo $@
|
||||
echo "$@"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo $@
|
||||
echo "$@"
|
||||
echo 'Hello World'
|
||||
|
|
6
testing/resources/types_or_repo/.pre-commit-hooks.yaml
Normal file
6
testing/resources/types_or_repo/.pre-commit-hooks.yaml
Normal file
|
@ -0,0 +1,6 @@
|
|||
- id: python-cython-files
|
||||
name: Python and Cython files
|
||||
entry: bin/hook.sh
|
||||
language: script
|
||||
types: [file]
|
||||
types_or: [python, cython]
|
3
testing/resources/types_or_repo/bin/hook.sh
Executable file
3
testing/resources/types_or_repo/bin/hook.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
echo "$@"
|
||||
exit 1
|
|
@ -1,3 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
echo $@
|
||||
echo "$@"
|
||||
exit 1
|
||||
|
|
|
@ -99,6 +99,9 @@ def main() -> int:
|
|||
shebang = '/usr/bin/env python3'
|
||||
zipapp.create_archive(tmpdir, filename, interpreter=shebang)
|
||||
|
||||
with open(f'{filename}.sha256sum', 'w') as f:
|
||||
subprocess.check_call(('sha256sum', filename), stdout=f)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
|
|
@ -166,6 +166,23 @@ def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog):
|
|||
]
|
||||
|
||||
|
||||
def test_validate_optional_sensible_regex(caplog):
|
||||
config_obj = {
|
||||
'id': 'flake8',
|
||||
'files': 'dir/*.py',
|
||||
}
|
||||
cfgv.validate(config_obj, CONFIG_HOOK_DICT)
|
||||
|
||||
assert caplog.record_tuples == [
|
||||
(
|
||||
'pre_commit',
|
||||
logging.WARNING,
|
||||
"The 'files' field in hook 'flake8' is a regex, not a glob -- "
|
||||
"matching '/*' probably isn't what you want here",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main))
|
||||
def test_mains_not_ok(tmpdir, fn):
|
||||
not_yaml = tmpdir.join('f.notyaml')
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import shlex
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit import util
|
||||
from pre_commit.commands.autoupdate import _check_hooks_still_exist_at_rev
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.autoupdate import RepositoryCannotBeUpdatedError
|
||||
|
@ -173,6 +176,11 @@ def test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store):
|
|||
assert cfg.read() == fmt.format(out_of_date.path, out_of_date.head_rev)
|
||||
|
||||
|
||||
def test_autoupdate_pure_yaml(out_of_date, tmpdir, store):
|
||||
with mock.patch.object(util, 'Dumper', yaml.SafeDumper):
|
||||
test_autoupdate_out_of_date_repo(out_of_date, tmpdir, store)
|
||||
|
||||
|
||||
def test_autoupdate_only_one_to_update(up_to_date, out_of_date, tmpdir, store):
|
||||
fmt = (
|
||||
'repos:\n'
|
||||
|
|
|
@ -2,24 +2,9 @@ import pytest
|
|||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib import InvalidConfigError
|
||||
from pre_commit.commands.migrate_config import _indent
|
||||
from pre_commit.commands.migrate_config import migrate_config
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('s', 'expected'),
|
||||
(
|
||||
('', ''),
|
||||
('a', ' a'),
|
||||
('foo\nbar', ' foo\n bar'),
|
||||
('foo\n\nbar\n', ' foo\n\n bar\n'),
|
||||
('\n\n\n', '\n\n\n'),
|
||||
),
|
||||
)
|
||||
def test_indent(s, expected):
|
||||
assert _indent(s) == expected
|
||||
|
||||
|
||||
def test_migrate_config_normal_format(tmpdir, capsys):
|
||||
cfg = tmpdir.join(C.CONFIG_FILE)
|
||||
cfg.write(
|
||||
|
|
|
@ -219,6 +219,19 @@ def test_types_hook_repository(cap_out, store, tempdir_factory):
|
|||
assert b'bar.notpy' not in printed
|
||||
|
||||
|
||||
def test_types_or_hook_repository(cap_out, store, tempdir_factory):
|
||||
git_path = make_consuming_repo(tempdir_factory, 'types_or_repo')
|
||||
with cwd(git_path):
|
||||
stage_a_file('bar.notpy')
|
||||
stage_a_file('bar.pxd')
|
||||
stage_a_file('bar.py')
|
||||
ret, printed = _do_run(cap_out, store, git_path, run_opts())
|
||||
assert ret == 1
|
||||
assert b'bar.notpy' not in printed
|
||||
assert b'bar.pxd' in printed
|
||||
assert b'bar.py' in printed
|
||||
|
||||
|
||||
def test_exclude_types_hook_repository(cap_out, store, tempdir_factory):
|
||||
git_path = make_consuming_repo(tempdir_factory, 'exclude_types_repo')
|
||||
with cwd(git_path):
|
||||
|
@ -951,6 +964,27 @@ def test_classifier_does_not_normalize_backslashes_non_windows(tmpdir):
|
|||
assert classifier.filenames == [r'a/b\c']
|
||||
|
||||
|
||||
def test_classifier_empty_types_or(tmpdir):
|
||||
tmpdir.join('bar').ensure()
|
||||
os.symlink(tmpdir.join('bar'), tmpdir.join('foo'))
|
||||
with tmpdir.as_cwd():
|
||||
classifier = Classifier(('foo', 'bar'))
|
||||
for_symlink = classifier.by_types(
|
||||
classifier.filenames,
|
||||
types=['symlink'],
|
||||
types_or=[],
|
||||
exclude_types=[],
|
||||
)
|
||||
for_file = classifier.by_types(
|
||||
classifier.filenames,
|
||||
types=['file'],
|
||||
types_or=[],
|
||||
exclude_types=[],
|
||||
)
|
||||
assert for_symlink == ['foo']
|
||||
assert for_file == ['bar']
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def some_filenames():
|
||||
return (
|
||||
|
|
|
@ -23,6 +23,13 @@ def test_read_pyvenv_cfg(tmpdir):
|
|||
assert python._read_pyvenv_cfg(pyvenv_cfg) == expected
|
||||
|
||||
|
||||
def test_read_pyvenv_cfg_non_utf8(tmpdir):
|
||||
pyvenv_cfg = tmpdir.join('pyvenv_cfg')
|
||||
pyvenv_cfg.write_binary('hello = hello john.š\n'.encode())
|
||||
expected = {'hello': 'hello john.š'}
|
||||
assert python._read_pyvenv_cfg(pyvenv_cfg) == expected
|
||||
|
||||
|
||||
def test_norm_version_expanduser():
|
||||
home = os.path.expanduser('~')
|
||||
if os.name == 'nt': # pragma: nt cover
|
||||
|
|
|
@ -901,6 +901,7 @@ def test_manifest_hooks(tempdir_factory, store):
|
|||
'post-commit', 'manual', 'post-checkout', 'push',
|
||||
),
|
||||
types=['file'],
|
||||
types_or=[],
|
||||
verbose=False,
|
||||
)
|
||||
|
||||
|
|
|
@ -195,3 +195,12 @@ def test_xargs_color_true_makes_tty():
|
|||
)
|
||||
assert retcode == 0
|
||||
assert out == b'True\n'
|
||||
|
||||
|
||||
@pytest.mark.xfail(os.name == 'posix', reason='nt only')
|
||||
@pytest.mark.parametrize('filename', ('t.bat', 't.cmd', 'T.CMD'))
|
||||
def test_xargs_with_batch_files(tmpdir, filename):
|
||||
f = tmpdir.join(filename)
|
||||
f.write('echo it works\n')
|
||||
retcode, out = xargs.xargs((str(f),), ('x',) * 8192)
|
||||
assert retcode == 0, (retcode, out)
|
||||
|
|
Loading…
Add table
Reference in a new issue