pre-commit-hooks: python3.6+
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1bcc8c8..464609b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml
@@ -28,17 +28,22 @@ rev: v1.9.0 hooks: - id: reorder-python-imports - language_version: python3 + args: [--py3-plus] - repo: https://github.com/asottile/pyupgrade rev: v1.26.2 hooks: - id: pyupgrade + args: [--py36-plus] - repo: https://github.com/asottile/add-trailing-comma rev: v1.5.0 hooks: - id: add-trailing-comma + args: [--py36-plus] +- repo: https://github.com/asottile/setup-cfg-fmt + rev: v1.6.0 + hooks: + - id: setup-cfg-fmt - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.761 hooks: - id: mypy - language_version: python3
diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1337dc6..dc3a57a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml
@@ -16,9 +16,9 @@ - template: job--pre-commit.yml@asottile - template: job--python-tox.yml@asottile parameters: - toxenvs: [py27, py37] + toxenvs: [py38] os: windows - template: job--python-tox.yml@asottile parameters: - toxenvs: [pypy, pypy3, py27, py36, py37] + toxenvs: [pypy3, py36, py37, py38] os: linux
diff --git a/pre_commit_hooks/autopep8_wrapper.py b/pre_commit_hooks/autopep8_wrapper.py index 8b69a04..78a1cce 100644 --- a/pre_commit_hooks/autopep8_wrapper.py +++ b/pre_commit_hooks/autopep8_wrapper.py
@@ -1,9 +1,4 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - - -def main(): # type: () -> int +def main() -> int: raise SystemExit( 'autopep8-wrapper is deprecated. Instead use autopep8 directly via ' 'https://github.com/pre-commit/mirrors-autopep8',
diff --git a/pre_commit_hooks/check_added_large_files.py b/pre_commit_hooks/check_added_large_files.py index be39498..91f5754 100644 --- a/pre_commit_hooks/check_added_large_files.py +++ b/pre_commit_hooks/check_added_large_files.py
@@ -1,13 +1,7 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - import argparse import json import math import os -from typing import Iterable from typing import Optional from typing import Sequence from typing import Set @@ -17,7 +11,7 @@ from pre_commit_hooks.util import cmd_output -def lfs_files(): # type: () -> Set[str] +def lfs_files() -> Set[str]: try: # Introduced in git-lfs 2.2.0, first working in 2.2.1 lfs_ret = cmd_output('git', 'lfs', 'status', '--json') @@ -27,23 +21,20 @@ return set(json.loads(lfs_ret)['files']) -def find_large_added_files(filenames, maxkb): - # type: (Iterable[str], int) -> int +def find_large_added_files(filenames: Sequence[str], maxkb: int) -> int: # Find all added files that are also in the list of files pre-commit tells # us about - filenames = (added_files() & set(filenames)) - lfs_files() - retv = 0 - for filename in filenames: + for filename in (added_files() & set(filenames)) - lfs_files(): kb = int(math.ceil(os.stat(filename).st_size / 1024)) if kb > maxkb: - print('{} ({} KB) exceeds {} KB.'.format(filename, kb, maxkb)) + print(f'{filename} ({kb} KB) exceeds {maxkb} KB.') retv = 1 return retv -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( 'filenames', nargs='*',
diff --git a/pre_commit_hooks/check_ast.py b/pre_commit_hooks/check_ast.py index cb33ea0..2be6e1a 100644 --- a/pre_commit_hooks/check_ast.py +++ b/pre_commit_hooks/check_ast.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse import ast import platform @@ -11,7 +7,7 @@ from typing import Sequence -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv) @@ -23,15 +19,11 @@ with open(filename, 'rb') as f: ast.parse(f.read(), filename=filename) except SyntaxError: - print( - '{}: failed parsing with {} {}:'.format( - filename, - platform.python_implementation(), - sys.version.partition(' ')[0], - ), - ) + impl = platform.python_implementation() + version = sys.version.split()[0] + print(f'{filename}: failed parsing with {impl} {version}:') tb = ' ' + traceback.format_exc().replace('\n', '\n ') - print('\n{}'.format(tb)) + print(f'\n{tb}') retval = 1 return retval
diff --git a/pre_commit_hooks/check_builtin_literals.py b/pre_commit_hooks/check_builtin_literals.py index 4ddaa8c..6bcd838 100644 --- a/pre_commit_hooks/check_builtin_literals.py +++ b/pre_commit_hooks/check_builtin_literals.py
@@ -1,10 +1,7 @@ -from __future__ import unicode_literals - import argparse import ast -import collections -import sys from typing import List +from typing import NamedTuple from typing import Optional from typing import Sequence from typing import Set @@ -21,23 +18,26 @@ } -Call = collections.namedtuple('Call', ['name', 'line', 'column']) +class Call(NamedTuple): + name: str + line: int + column: int class Visitor(ast.NodeVisitor): - def __init__(self, ignore=None, allow_dict_kwargs=True): - # type: (Optional[Sequence[str]], bool) -> None - self.builtin_type_calls = [] # type: List[Call] + def __init__( + self, + ignore: Optional[Sequence[str]] = None, + allow_dict_kwargs: bool = True, + ) -> None: + self.builtin_type_calls: List[Call] = [] self.ignore = set(ignore) if ignore else set() self.allow_dict_kwargs = allow_dict_kwargs - def _check_dict_call(self, node): # type: (ast.Call) -> bool - return ( - self.allow_dict_kwargs and - (getattr(node, 'kwargs', None) or getattr(node, 'keywords', None)) - ) + def _check_dict_call(self, node: ast.Call) -> bool: + return self.allow_dict_kwargs and bool(node.keywords) - def visit_Call(self, node): # type: (ast.Call) -> None + def visit_Call(self, node: ast.Call) -> None: if not isinstance(node.func, ast.Name): # Ignore functions that are object attributes (`foo.bar()`). # Assume that if the user calls `builtins.list()`, they know what @@ -54,8 +54,11 @@ ) -def check_file(filename, ignore=None, allow_dict_kwargs=True): - # type: (str, Optional[Sequence[str]], bool) -> List[Call] +def check_file( + filename: str, + ignore: Optional[Sequence[str]] = None, + allow_dict_kwargs: bool = True, +) -> List[Call]: with open(filename, 'rb') as f: tree = ast.parse(f.read(), filename=filename) visitor = Visitor(ignore=ignore, allow_dict_kwargs=allow_dict_kwargs) @@ -63,11 +66,11 @@ return visitor.builtin_type_calls -def parse_ignore(value): # type: (str) -> Set[str] +def parse_ignore(value: str) -> Set[str]: return set(value.split(',')) -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') parser.add_argument('--ignore', type=parse_ignore, default=set()) @@ -93,15 +96,11 @@ rc = rc or 1 for call in calls: print( - '{filename}:{call.line}:{call.column}: ' - 'replace {call.name}() with {replacement}'.format( - filename=filename, - call=call, - replacement=BUILTIN_TYPES[call.name], - ), + f'{filename}:{call.line}:{call.column}: ' + f'replace {call.name}() with {BUILTIN_TYPES[call.name]}', ) return rc if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/check_byte_order_marker.py b/pre_commit_hooks/check_byte_order_marker.py index 10667c3..c0c2969 100644 --- a/pre_commit_hooks/check_byte_order_marker.py +++ b/pre_commit_hooks/check_byte_order_marker.py
@@ -1,13 +1,9 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse from typing import Optional from typing import Sequence -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check') args = parser.parse_args(argv) @@ -18,7 +14,7 @@ with open(filename, 'rb') as f: if f.read(3) == b'\xef\xbb\xbf': retv = 1 - print('{}: Has a byte-order marker'.format(filename)) + print(f'{filename}: Has a byte-order marker') return retv
diff --git a/pre_commit_hooks/check_case_conflict.py b/pre_commit_hooks/check_case_conflict.py index e343d61..6b8ba82 100644 --- a/pre_commit_hooks/check_case_conflict.py +++ b/pre_commit_hooks/check_case_conflict.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse from typing import Iterable from typing import Optional @@ -12,11 +8,11 @@ from pre_commit_hooks.util import cmd_output -def lower_set(iterable): # type: (Iterable[str]) -> Set[str] +def lower_set(iterable: Iterable[str]) -> Set[str]: return {x.lower() for x in iterable} -def find_conflicting_filenames(filenames): # type: (Sequence[str]) -> int +def find_conflicting_filenames(filenames: Sequence[str]) -> int: repo_files = set(cmd_output('git', 'ls-files').splitlines()) relevant_files = set(filenames) | added_files() repo_files -= relevant_files @@ -39,13 +35,13 @@ if x.lower() in conflicts ] for filename in sorted(conflicting_files): - print('Case-insensitivity conflict found: {}'.format(filename)) + print(f'Case-insensitivity conflict found: {filename}') retv = 1 return retv -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( 'filenames', nargs='*',
diff --git a/pre_commit_hooks/check_docstring_first.py b/pre_commit_hooks/check_docstring_first.py index 6c19381..875c0fb 100644 --- a/pre_commit_hooks/check_docstring_first.py +++ b/pre_commit_hooks/check_docstring_first.py
@@ -1,30 +1,17 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse import io import tokenize +from tokenize import tokenize as tokenize_tokenize from typing import Optional from typing import Sequence -import six - -if six.PY2: # pragma: no cover (PY2) - from tokenize import generate_tokens as tokenize_tokenize - OTHER_NON_CODE = () -else: # pragma: no cover (PY3) - from tokenize import tokenize as tokenize_tokenize - OTHER_NON_CODE = (tokenize.ENCODING,) - -NON_CODE_TOKENS = frozenset( - (tokenize.COMMENT, tokenize.ENDMARKER, tokenize.NEWLINE, tokenize.NL) + - OTHER_NON_CODE, -) +NON_CODE_TOKENS = frozenset(( + tokenize.COMMENT, tokenize.ENDMARKER, tokenize.NEWLINE, tokenize.NL, + tokenize.ENCODING, +)) -def check_docstring_first(src, filename='<unknown>'): - # type: (bytes, str) -> int +def check_docstring_first(src: bytes, filename: str = '<unknown>') -> int: """Returns nonzero if the source has what looks like a docstring that is not at the beginning of the source. @@ -40,18 +27,14 @@ if tok_type == tokenize.STRING and scol == 0: if found_docstring_line is not None: print( - '{}:{} Multiple module docstrings ' - '(first docstring on line {}).'.format( - filename, sline, found_docstring_line, - ), + f'{filename}:{sline} Multiple module docstrings ' + f'(first docstring on line {found_docstring_line}).', ) return 1 elif found_code_line is not None: print( - '{}:{} Module docstring appears after code ' - '(code seen on line {}).'.format( - filename, sline, found_code_line, - ), + f'{filename}:{sline} Module docstring appears after code ' + f'(code seen on line {found_code_line}).', ) return 1 else: @@ -62,7 +45,7 @@ return 0 -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv)
diff --git a/pre_commit_hooks/check_executables_have_shebangs.py b/pre_commit_hooks/check_executables_have_shebangs.py index 4db2f9f..c34c7b7 100644 --- a/pre_commit_hooks/check_executables_have_shebangs.py +++ b/pre_commit_hooks/check_executables_have_shebangs.py
@@ -1,28 +1,22 @@ """Check that executable text files have a shebang.""" -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse -import pipes +import shlex import sys from typing import Optional from typing import Sequence -def check_has_shebang(path): # type: (str) -> int +def check_has_shebang(path: str) -> int: with open(path, 'rb') as f: first_bytes = f.read(2) if first_bytes != b'#!': + quoted = shlex.quote(path) print( - '{path}: marked executable but has no (or invalid) shebang!\n' - " If it isn't supposed to be executable, try: chmod -x {quoted}\n" - ' If it is supposed to be executable, double-check its shebang.' - .format( - path=path, - quoted=pipes.quote(path), - ), + f'{path}: marked executable but has no (or invalid) shebang!\n' + f" If it isn't supposed to be executable, try: " + f'`chmod -x {quoted}`\n' + f' If it is supposed to be executable, double-check its shebang.', file=sys.stderr, ) return 1 @@ -30,7 +24,7 @@ return 0 -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv)
diff --git a/pre_commit_hooks/check_json.py b/pre_commit_hooks/check_json.py index f26e0a5..25dbfd9 100644 --- a/pre_commit_hooks/check_json.py +++ b/pre_commit_hooks/check_json.py
@@ -1,27 +1,25 @@ -from __future__ import print_function - import argparse -import io import json -import sys from typing import Optional from typing import Sequence -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check.') args = parser.parse_args(argv) retval = 0 for filename in args.filenames: - try: - json.load(io.open(filename, encoding='UTF-8')) - except (ValueError, UnicodeDecodeError) as exc: - print('{}: Failed to json decode ({})'.format(filename, exc)) - retval = 1 + with open(filename, 'rb') as f: + try: + json.load(f) + # TODO: need UnicodeDecodeError? + except (ValueError, UnicodeDecodeError) as exc: + print(f'{filename}: Failed to json decode ({exc})') + retval = 1 return retval if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/check_merge_conflict.py b/pre_commit_hooks/check_merge_conflict.py index 2a03c3a..c20a8af 100644 --- a/pre_commit_hooks/check_merge_conflict.py +++ b/pre_commit_hooks/check_merge_conflict.py
@@ -1,5 +1,3 @@ -from __future__ import print_function - import argparse import os.path from typing import Optional @@ -12,10 +10,9 @@ b'=======\n', b'>>>>>>> ', ] -WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}' -def is_in_merge(): # type: () -> int +def is_in_merge() -> int: return ( os.path.exists(os.path.join('.git', 'MERGE_MSG')) and ( @@ -26,7 +23,7 @@ ) -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') parser.add_argument('--assume-in-merge', action='store_true') @@ -42,9 +39,8 @@ for pattern in CONFLICT_PATTERNS: if line.startswith(pattern): print( - WARNING_MSG.format( - pattern.decode(), filename, i + 1, - ), + f'Merge conflict string "{pattern.decode()}" ' + f'found in {filename}:{i + 1}', ) retcode = 1
diff --git a/pre_commit_hooks/check_symlinks.py b/pre_commit_hooks/check_symlinks.py index 736bf99..f014714 100644 --- a/pre_commit_hooks/check_symlinks.py +++ b/pre_commit_hooks/check_symlinks.py
@@ -1,14 +1,10 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse import os.path from typing import Optional from typing import Sequence -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser(description='Checks for broken symlinks.') parser.add_argument('filenames', nargs='*', help='Filenames to check') args = parser.parse_args(argv) @@ -20,7 +16,7 @@ os.path.islink(filename) and not os.path.exists(filename) ): # pragma: no cover (symlink support required) - print('{}: Broken symlink'.format(filename)) + print(f'{filename}: Broken symlink') retv = 1 return retv
diff --git a/pre_commit_hooks/check_toml.py b/pre_commit_hooks/check_toml.py index e16e17c..51a1f15 100644 --- a/pre_commit_hooks/check_toml.py +++ b/pre_commit_hooks/check_toml.py
@@ -1,14 +1,11 @@ -from __future__ import print_function - import argparse -import sys from typing import Optional from typing import Sequence import toml -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check.') args = parser.parse_args(argv) @@ -19,10 +16,10 @@ with open(filename) as f: toml.load(f) except toml.TomlDecodeError as exc: - print('{}: {}'.format(filename, exc)) + print(f'{filename}: {exc}') retval = 1 return retval if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/check_vcs_permalinks.py b/pre_commit_hooks/check_vcs_permalinks.py index f6e2a7d..bf698e1 100644 --- a/pre_commit_hooks/check_vcs_permalinks.py +++ b/pre_commit_hooks/check_vcs_permalinks.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse import re import sys @@ -14,19 +10,19 @@ ) -def _check_filename(filename): # type: (str) -> int +def _check_filename(filename: str) -> int: retv = 0 with open(filename, 'rb') as f: for i, line in enumerate(f, 1): if GITHUB_NON_PERMALINK.search(line): - sys.stdout.write('{}:{}:'.format(filename, i)) + sys.stdout.write(f'{filename}:{i}:') sys.stdout.flush() - getattr(sys.stdout, 'buffer', sys.stdout).write(line) + sys.stdout.buffer.write(line) retv = 1 return retv -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv)
diff --git a/pre_commit_hooks/check_xml.py b/pre_commit_hooks/check_xml.py index 66e10ba..eddfdf9 100644 --- a/pre_commit_hooks/check_xml.py +++ b/pre_commit_hooks/check_xml.py
@@ -1,30 +1,26 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse -import io -import sys import xml.sax.handler from typing import Optional from typing import Sequence -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='XML filenames to check.') args = parser.parse_args(argv) retval = 0 + handler = xml.sax.handler.ContentHandler() for filename in args.filenames: try: - with io.open(filename, 'rb') as xml_file: - xml.sax.parse(xml_file, xml.sax.handler.ContentHandler()) + with open(filename, 'rb') as xml_file: + # https://github.com/python/typeshed/pull/3725 + xml.sax.parse(xml_file, handler) # type: ignore except xml.sax.SAXException as exc: - print('{}: Failed to xml parse ({})'.format(filename, exc)) + print(f'{filename}: Failed to xml parse ({exc})') retval = 1 return retval if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/check_yaml.py b/pre_commit_hooks/check_yaml.py index 5b66485..7453f6f 100644 --- a/pre_commit_hooks/check_yaml.py +++ b/pre_commit_hooks/check_yaml.py
@@ -1,11 +1,7 @@ -from __future__ import print_function - import argparse -import collections -import io -import sys from typing import Any from typing import Generator +from typing import NamedTuple from typing import Optional from typing import Sequence @@ -14,20 +10,24 @@ yaml = ruamel.yaml.YAML(typ='safe') -def _exhaust(gen): # type: (Generator[str, None, None]) -> None +def _exhaust(gen: Generator[str, None, None]) -> None: for _ in gen: pass -def _parse_unsafe(*args, **kwargs): # type: (*Any, **Any) -> None +def _parse_unsafe(*args: Any, **kwargs: Any) -> None: _exhaust(yaml.parse(*args, **kwargs)) -def _load_all(*args, **kwargs): # type: (*Any, **Any) -> None +def _load_all(*args: Any, **kwargs: Any) -> None: _exhaust(yaml.load_all(*args, **kwargs)) -Key = collections.namedtuple('Key', ('multi', 'unsafe')) +class Key(NamedTuple): + multi: bool + unsafe: bool + + LOAD_FNS = { Key(multi=False, unsafe=False): yaml.load, Key(multi=False, unsafe=True): _parse_unsafe, @@ -36,7 +36,7 @@ } -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '-m', '--multi', '--allow-multiple-documents', action='store_true', @@ -59,7 +59,7 @@ retval = 0 for filename in args.filenames: try: - with io.open(filename, encoding='UTF-8') as f: + with open(filename, encoding='UTF-8') as f: load_fn(f) except ruamel.yaml.YAMLError as exc: print(exc) @@ -68,4 +68,4 @@ if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/debug_statement_hook.py b/pre_commit_hooks/debug_statement_hook.py index 4864873..00423ac 100644 --- a/pre_commit_hooks/debug_statement_hook.py +++ b/pre_commit_hooks/debug_statement_hook.py
@@ -1,35 +1,38 @@ -from __future__ import print_function -from __future__ import unicode_literals - import argparse import ast -import collections import traceback from typing import List +from typing import NamedTuple from typing import Optional from typing import Sequence DEBUG_STATEMENTS = {'pdb', 'ipdb', 'pudb', 'q', 'rdb', 'rpdb'} -Debug = collections.namedtuple('Debug', ('line', 'col', 'name', 'reason')) + + +class Debug(NamedTuple): + line: int + col: int + name: str + reason: str class DebugStatementParser(ast.NodeVisitor): - def __init__(self): # type: () -> None - self.breakpoints = [] # type: List[Debug] + def __init__(self) -> None: + self.breakpoints: List[Debug] = [] - def visit_Import(self, node): # type: (ast.Import) -> None + def visit_Import(self, node: ast.Import) -> None: for name in node.names: if name.name in DEBUG_STATEMENTS: st = Debug(node.lineno, node.col_offset, name.name, 'imported') self.breakpoints.append(st) - def visit_ImportFrom(self, node): # type: (ast.ImportFrom) -> None + def visit_ImportFrom(self, node: ast.ImportFrom) -> None: if node.module in DEBUG_STATEMENTS: st = Debug(node.lineno, node.col_offset, node.module, 'imported') self.breakpoints.append(st) - def visit_Call(self, node): # type: (ast.Call) -> None + def visit_Call(self, node: ast.Call) -> None: """python3.7+ breakpoint()""" if isinstance(node.func, ast.Name) and node.func.id == 'breakpoint': st = Debug(node.lineno, node.col_offset, node.func.id, 'called') @@ -37,12 +40,12 @@ self.generic_visit(node) -def check_file(filename): # type: (str) -> int +def check_file(filename: str) -> int: try: with open(filename, 'rb') as f: ast_obj = ast.parse(f.read(), filename=filename) except SyntaxError: - print('{} - Could not parse ast'.format(filename)) + print(f'{filename} - Could not parse ast') print() print('\t' + traceback.format_exc().replace('\n', '\n\t')) print() @@ -52,16 +55,12 @@ visitor.visit(ast_obj) for bp in visitor.breakpoints: - print( - '{}:{}:{} - {} {}'.format( - filename, bp.line, bp.col, bp.name, bp.reason, - ), - ) + print(f'{filename}:{bp.line}:{bp.col} - {bp.name} {bp.reason}') return int(bool(visitor.breakpoints)) -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') args = parser.parse_args(argv)
diff --git a/pre_commit_hooks/detect_aws_credentials.py b/pre_commit_hooks/detect_aws_credentials.py index da80ab4..fe18f4d 100644 --- a/pre_commit_hooks/detect_aws_credentials.py +++ b/pre_commit_hooks/detect_aws_credentials.py
@@ -1,18 +1,19 @@ -from __future__ import print_function -from __future__ import unicode_literals - import argparse +import configparser import os -from typing import Dict from typing import List +from typing import NamedTuple from typing import Optional from typing import Sequence from typing import Set -from six.moves import configparser + +class BadFile(NamedTuple): + filename: str + key: str -def get_aws_cred_files_from_env(): # type: () -> Set[str] +def get_aws_cred_files_from_env() -> Set[str]: """Extract credential file paths from environment variables.""" return { os.environ[env_var] @@ -24,7 +25,7 @@ } -def get_aws_secrets_from_env(): # type: () -> Set[str] +def get_aws_secrets_from_env() -> Set[str]: """Extract AWS secrets from environment variables.""" keys = set() for env_var in ( @@ -35,7 +36,7 @@ return keys -def get_aws_secrets_from_file(credentials_file): # type: (str) -> Set[str] +def get_aws_secrets_from_file(credentials_file: str) -> Set[str]: """Extract AWS secrets from configuration files. Read an ini-style configuration file and return a set with all found AWS @@ -66,8 +67,10 @@ return keys -def check_file_for_aws_keys(filenames, keys): - # type: (Sequence[str], Set[str]) -> List[Dict[str, str]] +def check_file_for_aws_keys( + filenames: Sequence[str], + keys: Set[str], +) -> List[BadFile]: """Check if files contain AWS secrets. Return a list of all files containing AWS secrets and keys found, with all @@ -82,13 +85,11 @@ # naively match the entire file, low chance of incorrect # collision if key in text_body: - bad_files.append({ - 'filename': filename, 'key': key[:4] + '*' * 28, - }) + bad_files.append(BadFile(filename, key[:4].ljust(28, '*'))) return bad_files -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='+', help='Filenames to run') parser.add_argument( @@ -117,7 +118,7 @@ # of files to to gather AWS secrets from. credential_files |= get_aws_cred_files_from_env() - keys = set() # type: Set[str] + keys: Set[str] = set() for credential_file in credential_files: keys |= get_aws_secrets_from_file(credential_file) @@ -139,7 +140,7 @@ bad_filenames = check_file_for_aws_keys(args.filenames, keys) if bad_filenames: for bad_file in bad_filenames: - print('AWS secret found in {filename}: {key}'.format(**bad_file)) + print(f'AWS secret found in {bad_file.filename}: {bad_file.key}') return 1 else: return 0
diff --git a/pre_commit_hooks/detect_private_key.py b/pre_commit_hooks/detect_private_key.py index d31957d..7bbc2f9 100644 --- a/pre_commit_hooks/detect_private_key.py +++ b/pre_commit_hooks/detect_private_key.py
@@ -1,7 +1,4 @@ -from __future__ import print_function - import argparse -import sys from typing import Optional from typing import Sequence @@ -17,7 +14,7 @@ ] -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check') args = parser.parse_args(argv) @@ -32,11 +29,11 @@ if private_key_files: for private_key_file in private_key_files: - print('Private key found: {}'.format(private_key_file)) + print(f'Private key found: {private_key_file}') return 1 else: return 0 if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/end_of_file_fixer.py b/pre_commit_hooks/end_of_file_fixer.py index 4e77c94..1c07379 100644 --- a/pre_commit_hooks/end_of_file_fixer.py +++ b/pre_commit_hooks/end_of_file_fixer.py
@@ -1,20 +1,16 @@ -from __future__ import print_function -from __future__ import unicode_literals - import argparse import os -import sys from typing import IO from typing import Optional from typing import Sequence -def fix_file(file_obj): # type: (IO[bytes]) -> int +def fix_file(file_obj: IO[bytes]) -> int: # Test for newline at end of file # Empty files will throw IOError here try: file_obj.seek(-1, os.SEEK_END) - except IOError: + except OSError: return 0 last_character = file_obj.read(1) # last_character will be '' for an empty file @@ -52,7 +48,7 @@ return 0 -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -64,11 +60,11 @@ with open(filename, 'rb+') as file_obj: ret_for_file = fix_file(file_obj) if ret_for_file: - print('Fixing {}'.format(filename)) + print(f'Fixing {filename}') retv |= ret_for_file return retv if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/file_contents_sorter.py b/pre_commit_hooks/file_contents_sorter.py index 1598d2d..41ce306 100644 --- a/pre_commit_hooks/file_contents_sorter.py +++ b/pre_commit_hooks/file_contents_sorter.py
@@ -9,10 +9,7 @@ this hook on that file should reduce the instances of git merge conflicts and keep the file nicely ordered. """ -from __future__ import print_function - import argparse -import sys from typing import IO from typing import Optional from typing import Sequence @@ -21,7 +18,7 @@ FAIL = 1 -def sort_file_contents(f): # type: (IO[bytes]) -> int +def sort_file_contents(f: IO[bytes]) -> int: before = list(f) after = sorted([line.strip(b'\n\r') for line in before if line.strip()]) @@ -37,7 +34,7 @@ return FAIL -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='+', help='Files to sort') args = parser.parse_args(argv) @@ -49,7 +46,7 @@ ret_for_file = sort_file_contents(file_obj) if ret_for_file: - print('Sorting {}'.format(arg)) + print(f'Sorting {arg}') retv |= ret_for_file @@ -57,4 +54,4 @@ if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/fix_encoding_pragma.py b/pre_commit_hooks/fix_encoding_pragma.py index 31bb52c..88d72ed 100644 --- a/pre_commit_hooks/fix_encoding_pragma.py +++ b/pre_commit_hooks/fix_encoding_pragma.py
@@ -1,18 +1,13 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse -import collections from typing import IO +from typing import NamedTuple from typing import Optional from typing import Sequence -from typing import Union DEFAULT_PRAGMA = b'# -*- coding: utf-8 -*-' -def has_coding(line): # type: (bytes) -> bool +def has_coding(line: bytes) -> bool: if not line.strip(): return False return ( @@ -25,30 +20,30 @@ ) -class ExpectedContents( - collections.namedtuple( - 'ExpectedContents', ('shebang', 'rest', 'pragma_status', 'ending'), - ), -): - """ - pragma_status: - - True: has exactly the coding pragma expected - - False: missing coding pragma entirely - - None: has a coding pragma, but it does not match - """ - __slots__ = () +class ExpectedContents(NamedTuple): + shebang: bytes + rest: bytes + # True: has exactly the coding pragma expected + # False: missing coding pragma entirely + # None: has a coding pragma, but it does not match + pragma_status: Optional[bool] + ending: bytes @property - def has_any_pragma(self): # type: () -> bool + def has_any_pragma(self) -> bool: return self.pragma_status is not False - def is_expected_pragma(self, remove): # type: (bool) -> bool + def is_expected_pragma(self, remove: bool) -> bool: expected_pragma_status = not remove return self.pragma_status is expected_pragma_status -def _get_expected_contents(first_line, second_line, rest, expected_pragma): - # type: (bytes, bytes, bytes, bytes) -> ExpectedContents +def _get_expected_contents( + first_line: bytes, + second_line: bytes, + rest: bytes, + expected_pragma: bytes, +) -> ExpectedContents: ending = b'\r\n' if first_line.endswith(b'\r\n') else b'\n' if first_line.startswith(b'#!'): @@ -60,7 +55,7 @@ rest = second_line + rest if potential_coding.rstrip(b'\r\n') == expected_pragma: - pragma_status = True # type: Optional[bool] + pragma_status: Optional[bool] = True elif has_coding(potential_coding): pragma_status = None else: @@ -72,8 +67,11 @@ ) -def fix_encoding_pragma(f, remove=False, expected_pragma=DEFAULT_PRAGMA): - # type: (IO[bytes], bool, bytes) -> int +def fix_encoding_pragma( + f: IO[bytes], + remove: bool = False, + expected_pragma: bytes = DEFAULT_PRAGMA, +) -> int: expected = _get_expected_contents( f.readline(), f.readline(), f.read(), expected_pragma, ) @@ -103,21 +101,20 @@ return 1 -def _normalize_pragma(pragma): # type: (Union[bytes, str]) -> bytes - if not isinstance(pragma, bytes): - pragma = pragma.encode('UTF-8') - return pragma.rstrip() +def _normalize_pragma(pragma: str) -> bytes: + return pragma.encode().rstrip() -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser( 'Fixes the encoding pragma of python files', ) parser.add_argument('filenames', nargs='*', help='Filenames to fix') parser.add_argument( '--pragma', default=DEFAULT_PRAGMA, type=_normalize_pragma, - help='The encoding pragma to use. Default: {}'.format( - DEFAULT_PRAGMA.decode(), + help=( + f'The encoding pragma to use. ' + f'Default: {DEFAULT_PRAGMA.decode()}' ), ) parser.add_argument( @@ -141,9 +138,7 @@ retv |= file_ret if file_ret: print( - fmt.format( - pragma=args.pragma.decode(), filename=filename, - ), + fmt.format(pragma=args.pragma.decode(), filename=filename), ) return retv
diff --git a/pre_commit_hooks/forbid_new_submodules.py b/pre_commit_hooks/forbid_new_submodules.py index bdbd6f7..c144d72 100644 --- a/pre_commit_hooks/forbid_new_submodules.py +++ b/pre_commit_hooks/forbid_new_submodules.py
@@ -1,14 +1,10 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - from typing import Optional from typing import Sequence from pre_commit_hooks.util import cmd_output -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: # `argv` is ignored, pre-commit will send us a list of files that we # don't care about added_diff = cmd_output( @@ -19,7 +15,7 @@ metadata, filename = line.split('\t', 1) new_mode = metadata.split(' ')[1] if new_mode == '160000': - print('{}: new submodule introduced'.format(filename)) + print(f'{filename}: new submodule introduced') retv = 1 if retv:
diff --git a/pre_commit_hooks/mixed_line_ending.py b/pre_commit_hooks/mixed_line_ending.py index 90aef03..0ef8e2c 100644 --- a/pre_commit_hooks/mixed_line_ending.py +++ b/pre_commit_hooks/mixed_line_ending.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse import collections from typing import Dict @@ -17,7 +13,7 @@ FIX_TO_LINE_ENDING = {'cr': CR, 'crlf': CRLF, 'lf': LF} -def _fix(filename, contents, ending): # type: (str, bytes, bytes) -> None +def _fix(filename: str, contents: bytes, ending: bytes) -> None: new_contents = b''.join( line.rstrip(b'\r\n') + ending for line in contents.splitlines(True) ) @@ -25,11 +21,11 @@ f.write(new_contents) -def fix_filename(filename, fix): # type: (str, str) -> int +def fix_filename(filename: str, fix: str) -> int: with open(filename, 'rb') as f: contents = f.read() - counts = collections.defaultdict(int) # type: Dict[bytes, int] + counts: Dict[bytes, int] = collections.defaultdict(int) for line in contents.splitlines(True): for ending in ALL_ENDINGS: @@ -66,7 +62,7 @@ return other_endings -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '-f', '--fix', @@ -81,9 +77,9 @@ for filename in args.filenames: if fix_filename(filename, args.fix): if args.fix == 'no': - print('{}: mixed line endings'.format(filename)) + print(f'{filename}: mixed line endings') else: - print('{}: fixed mixed line endings'.format(filename)) + print(f'{filename}: fixed mixed line endings') retv = 1 return retv
diff --git a/pre_commit_hooks/no_commit_to_branch.py b/pre_commit_hooks/no_commit_to_branch.py index 3131059..fb1506f 100644 --- a/pre_commit_hooks/no_commit_to_branch.py +++ b/pre_commit_hooks/no_commit_to_branch.py
@@ -1,5 +1,3 @@ -from __future__ import print_function - import argparse import re from typing import AbstractSet @@ -10,8 +8,10 @@ from pre_commit_hooks.util import cmd_output -def is_on_branch(protected, patterns=frozenset()): - # type: (AbstractSet[str], AbstractSet[str]) -> bool +def is_on_branch( + protected: AbstractSet[str], + patterns: AbstractSet[str] = frozenset(), +) -> bool: try: ref_name = cmd_output('git', 'symbolic-ref', 'HEAD') except CalledProcessError: @@ -23,7 +23,7 @@ ) -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '-b', '--branch', action='append',
diff --git a/pre_commit_hooks/pretty_format_json.py b/pre_commit_hooks/pretty_format_json.py index 7d515f4..25827dc 100644 --- a/pre_commit_hooks/pretty_format_json.py +++ b/pre_commit_hooks/pretty_format_json.py
@@ -1,10 +1,5 @@ -from __future__ import print_function - import argparse -import io import json -import sys -from collections import OrderedDict from difflib import unified_diff from typing import List from typing import Mapping @@ -13,38 +8,36 @@ from typing import Tuple from typing import Union -from six import text_type - def _get_pretty_format( - contents, indent, ensure_ascii=True, sort_keys=True, top_keys=(), -): # type: (str, str, bool, bool, Sequence[str]) -> str - def pairs_first(pairs): - # type: (Sequence[Tuple[str, str]]) -> Mapping[str, str] + contents: str, + indent: str, + ensure_ascii: bool = True, + sort_keys: bool = True, + top_keys: Sequence[str] = (), +) -> str: + def pairs_first(pairs: Sequence[Tuple[str, str]]) -> Mapping[str, str]: before = [pair for pair in pairs if pair[0] in top_keys] before = sorted(before, key=lambda x: top_keys.index(x[0])) after = [pair for pair in pairs if pair[0] not in top_keys] if sort_keys: - after = sorted(after, key=lambda x: x[0]) - return OrderedDict(before + after) + after.sort() + return dict(before + after) json_pretty = json.dumps( json.loads(contents, object_pairs_hook=pairs_first), indent=indent, ensure_ascii=ensure_ascii, - # Workaround for https://bugs.python.org/issue16333 - separators=(',', ': '), ) - # Ensure unicode (Py2) and add the newline that dumps does not end with. - return text_type(json_pretty) + '\n' + return f'{json_pretty}\n' -def _autofix(filename, new_contents): # type: (str, str) -> None - print('Fixing file {}'.format(filename)) - with io.open(filename, 'w', encoding='UTF-8') as f: +def _autofix(filename: str, new_contents: str) -> None: + print(f'Fixing file {filename}') + with open(filename, 'w', encoding='UTF-8') as f: f.write(new_contents) -def parse_num_to_int(s): # type: (str) -> Union[int, str] +def parse_num_to_int(s: str) -> Union[int, str]: """Convert string numbers to int, leaving strings as is.""" try: return int(s) @@ -52,18 +45,18 @@ return s -def parse_topkeys(s): # type: (str) -> List[str] +def parse_topkeys(s: str) -> List[str]: return s.split(',') -def get_diff(source, target, file): # type: (str, str, str) -> str +def get_diff(source: str, target: str, file: str) -> str: source_lines = source.splitlines(True) target_lines = target.splitlines(True) diff = unified_diff(source_lines, target_lines, fromfile=file, tofile=file) return ''.join(diff) -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '--autofix', @@ -110,7 +103,7 @@ status = 0 for json_file in args.filenames: - with io.open(json_file, encoding='UTF-8') as f: + with open(json_file, encoding='UTF-8') as f: contents = f.read() try: @@ -131,8 +124,8 @@ status = 1 except ValueError: print( - 'Input File {} is not a valid JSON, consider using check-json' - .format(json_file), + f'Input File {json_file} is not a valid JSON, consider using ' + f'check-json', ) return 1 @@ -140,4 +133,4 @@ if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/requirements_txt_fixer.py b/pre_commit_hooks/requirements_txt_fixer.py index 1aa0dff..dc41815 100644 --- a/pre_commit_hooks/requirements_txt_fixer.py +++ b/pre_commit_hooks/requirements_txt_fixer.py
@@ -1,5 +1,3 @@ -from __future__ import print_function - import argparse from typing import IO from typing import List @@ -11,15 +9,13 @@ FAIL = 1 -class Requirement(object): - - def __init__(self): # type: () -> None - super(Requirement, self).__init__() - self.value = None # type: Optional[bytes] - self.comments = [] # type: List[bytes] +class Requirement: + def __init__(self) -> None: + self.value: Optional[bytes] = None + self.comments: List[bytes] = [] @property - def name(self): # type: () -> bytes + def name(self) -> bytes: assert self.value is not None, self.value for egg in (b'#egg=', b'&egg='): if egg in self.value: @@ -27,7 +23,7 @@ return self.value.lower().partition(b'==')[0] - def __lt__(self, requirement): # type: (Requirement) -> int + def __lt__(self, requirement: 'Requirement') -> int: # \n means top of file comment, so always return True, # otherwise just do a string comparison with value. assert self.value is not None, self.value @@ -39,10 +35,10 @@ return self.name < requirement.name -def fix_requirements(f): # type: (IO[bytes]) -> int - requirements = [] # type: List[Requirement] +def fix_requirements(f: IO[bytes]) -> int: + requirements: List[Requirement] = [] before = list(f) - after = [] # type: List[bytes] + after: List[bytes] = [] before_string = b''.join(before) @@ -109,7 +105,7 @@ return FAIL -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -121,7 +117,7 @@ ret_for_file = fix_requirements(file_obj) if ret_for_file: - print('Sorting {}'.format(arg)) + print(f'Sorting {arg}') retv |= ret_for_file
diff --git a/pre_commit_hooks/sort_simple_yaml.py b/pre_commit_hooks/sort_simple_yaml.py index a381679..8ebc84f 100755 --- a/pre_commit_hooks/sort_simple_yaml.py +++ b/pre_commit_hooks/sort_simple_yaml.py
@@ -18,8 +18,6 @@ In other words, we don't sort deeper than the top layer, and might corrupt complicated YAML files. """ -from __future__ import print_function - import argparse from typing import List from typing import Optional @@ -29,7 +27,7 @@ QUOTES = ["'", '"'] -def sort(lines): # type: (List[str]) -> List[str] +def sort(lines: List[str]) -> List[str]: """Sort a YAML file in alphabetical order, keeping blocks together. :param lines: array of strings (without newlines) @@ -47,7 +45,7 @@ return new_lines -def parse_block(lines, header=False): # type: (List[str], bool) -> List[str] +def parse_block(lines: List[str], header: bool = False) -> List[str]: """Parse and return a single block, popping off the start of `lines`. If parsing a header block, we stop after we reach a line that is not a @@ -63,7 +61,7 @@ return block_lines -def parse_blocks(lines): # type: (List[str]) -> List[List[str]] +def parse_blocks(lines: List[str]) -> List[List[str]]: """Parse and return all possible blocks, popping off the start of `lines`. :param lines: list of lines @@ -80,7 +78,7 @@ return blocks -def first_key(lines): # type: (List[str]) -> str +def first_key(lines: List[str]) -> str: """Returns a string representing the sort key of a block. The sort key is the first YAML key we encounter, ignoring comments, and @@ -102,7 +100,7 @@ return '' # not actually reached in reality -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -115,7 +113,7 @@ new_lines = sort(lines) if lines != new_lines: - print('Fixing file `{filename}`'.format(filename=filename)) + print(f'Fixing file `{filename}`') f.seek(0) f.write('\n'.join(new_lines) + '\n') f.truncate()
diff --git a/pre_commit_hooks/string_fixer.py b/pre_commit_hooks/string_fixer.py index 813ef64..3fdb6e2 100644 --- a/pre_commit_hooks/string_fixer.py +++ b/pre_commit_hooks/string_fixer.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import argparse import io import re @@ -13,7 +9,7 @@ START_QUOTE_RE = re.compile('^[a-zA-Z]*"') -def handle_match(token_text): # type: (str) -> str +def handle_match(token_text: str) -> str: if '"""' in token_text or "'''" in token_text: return token_text @@ -28,7 +24,7 @@ return token_text -def get_line_offsets_by_line_no(src): # type: (str) -> List[int] +def get_line_offsets_by_line_no(src: str) -> List[int]: # Padded so we can index with line number offsets = [-1, 0] for line in src.splitlines(True): @@ -36,8 +32,8 @@ return offsets -def fix_strings(filename): # type: (str) -> int - with io.open(filename, encoding='UTF-8', newline='') as f: +def fix_strings(filename: str) -> int: + with open(filename, encoding='UTF-8', newline='') as f: contents = f.read() line_offsets = get_line_offsets_by_line_no(contents) @@ -57,14 +53,14 @@ new_contents = ''.join(splitcontents) if contents != new_contents: - with io.open(filename, 'w', encoding='UTF-8', newline='') as f: + with open(filename, 'w', encoding='UTF-8', newline='') as f: f.write(new_contents) return 1 else: return 0 -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -74,7 +70,7 @@ for filename in args.filenames: return_value = fix_strings(filename) if return_value != 0: - print('Fixing strings in {}'.format(filename)) + print(f'Fixing strings in {filename}') retv |= return_value return retv
diff --git a/pre_commit_hooks/tests_should_end_in_test.py b/pre_commit_hooks/tests_should_end_in_test.py index d93595f..b8cf915 100644 --- a/pre_commit_hooks/tests_should_end_in_test.py +++ b/pre_commit_hooks/tests_should_end_in_test.py
@@ -1,14 +1,11 @@ -from __future__ import print_function - import argparse import os.path import re -import sys from typing import Optional from typing import Sequence -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') parser.add_argument( @@ -27,14 +24,10 @@ not base == 'conftest.py' ): retcode = 1 - print( - '{} does not match pattern "{}"'.format( - filename, test_name_pattern, - ), - ) + print(f'{filename} does not match pattern "{test_name_pattern}"') return retcode if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/trailing_whitespace_fixer.py b/pre_commit_hooks/trailing_whitespace_fixer.py index a21b54f..05ed999 100644 --- a/pre_commit_hooks/trailing_whitespace_fixer.py +++ b/pre_commit_hooks/trailing_whitespace_fixer.py
@@ -1,14 +1,14 @@ -from __future__ import print_function - import argparse import os -import sys from typing import Optional from typing import Sequence -def _fix_file(filename, is_markdown, chars): - # type: (str, bool, Optional[bytes]) -> bool +def _fix_file( + filename: str, + is_markdown: bool, + chars: Optional[bytes], +) -> bool: with open(filename, mode='rb') as file_processed: lines = file_processed.readlines() newlines = [_process_line(line, is_markdown, chars) for line in lines] @@ -21,8 +21,11 @@ return False -def _process_line(line, is_markdown, chars): - # type: (bytes, bool, Optional[bytes]) -> bytes +def _process_line( + line: bytes, + is_markdown: bool, + chars: Optional[bytes], +) -> bytes: if line[-2:] == b'\r\n': eol = b'\r\n' line = line[:-2] @@ -37,7 +40,7 @@ return line.rstrip(chars) + eol -def main(argv=None): # type: (Optional[Sequence[str]]) -> int +def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '--no-markdown-linebreak-ext', @@ -80,20 +83,20 @@ for ext in md_exts: if any(c in ext[1:] for c in r'./\:'): parser.error( - 'bad --markdown-linebreak-ext extension {!r} (has . / \\ :)\n' - " (probably filename; use '--markdown-linebreak-ext=EXT')" - .format(ext), + f'bad --markdown-linebreak-ext extension ' + f'{ext!r} (has . / \\ :)\n' + f" (probably filename; use '--markdown-linebreak-ext=EXT')", ) - chars = None if args.chars is None else args.chars.encode('utf-8') + chars = None if args.chars is None else args.chars.encode() return_code = 0 for filename in args.filenames: _, extension = os.path.splitext(filename.lower()) md = all_markdown or extension in md_exts if _fix_file(filename, md, chars): - print('Fixing {}'.format(filename)) + print(f'Fixing {filename}') return_code = 1 return return_code if __name__ == '__main__': - sys.exit(main()) + exit(main())
diff --git a/pre_commit_hooks/util.py b/pre_commit_hooks/util.py index 3b960e3..e04b015 100644 --- a/pre_commit_hooks/util.py +++ b/pre_commit_hooks/util.py
@@ -1,9 +1,6 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import subprocess from typing import Any +from typing import Optional from typing import Set @@ -11,18 +8,17 @@ pass -def added_files(): # type: () -> Set[str] +def added_files() -> Set[str]: cmd = ('git', 'diff', '--staged', '--name-only', '--diff-filter=A') return set(cmd_output(*cmd).splitlines()) -def cmd_output(*cmd, **kwargs): # type: (*str, **Any) -> str - retcode = kwargs.pop('retcode', 0) +def cmd_output(*cmd: str, retcode: Optional[int] = 0, **kwargs: Any) -> str: kwargs.setdefault('stdout', subprocess.PIPE) kwargs.setdefault('stderr', subprocess.PIPE) proc = subprocess.Popen(cmd, **kwargs) stdout, stderr = proc.communicate() - stdout = stdout.decode('UTF-8') + stdout = stdout.decode() if retcode is not None and proc.returncode != retcode: raise CalledProcessError(cmd, retcode, proc.returncode, stdout, stderr) return stdout
diff --git a/setup.cfg b/setup.cfg index 4b793f7..6b1a34d 100644 --- a/setup.cfg +++ b/setup.cfg
@@ -11,13 +11,11 @@ license_file = LICENSE classifiers = License :: OSI Approved :: MIT License - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.4 - Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy @@ -27,9 +25,7 @@ flake8 ruamel.yaml>=0.15 toml - six - typing; python_version<"3.5" -python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +python_requires = >=3.6 [options.entry_points] console_scripts =
diff --git a/testing/util.py b/testing/util.py index fac498c..8e468d6 100644 --- a/testing/util.py +++ b/testing/util.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import os.path
diff --git a/tests/autopep8_wrapper_test.py b/tests/autopep8_wrapper_test.py index 615ec25..f8030b5 100644 --- a/tests/autopep8_wrapper_test.py +++ b/tests/autopep8_wrapper_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.autopep8_wrapper import main
diff --git a/tests/check_added_large_files_test.py b/tests/check_added_large_files_test.py index 2f67d1b..c33a9ca 100644 --- a/tests/check_added_large_files_test.py +++ b/tests/check_added_large_files_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import distutils.spawn import pytest @@ -78,7 +75,7 @@ @xfailif_no_gitlfs def test_allows_gitlfs(temp_git_dir, monkeypatch): # pragma: no cover with temp_git_dir.as_cwd(): - monkeypatch.setenv(str('HOME'), str(temp_git_dir.strpath)) + monkeypatch.setenv('HOME', str(temp_git_dir.strpath)) cmd_output('git', 'lfs', 'install') temp_git_dir.join('f.py').write('a' * 10000) cmd_output('git', 'lfs', 'track', 'f.py') @@ -90,7 +87,7 @@ @xfailif_no_gitlfs def test_moves_with_gitlfs(temp_git_dir, monkeypatch): # pragma: no cover with temp_git_dir.as_cwd(): - monkeypatch.setenv(str('HOME'), str(temp_git_dir.strpath)) + monkeypatch.setenv('HOME', str(temp_git_dir.strpath)) cmd_output('git', 'lfs', 'install') cmd_output('git', 'lfs', 'track', 'a.bin', 'b.bin') # First add the file we're going to move
diff --git a/tests/check_ast_test.py b/tests/check_ast_test.py index c16f5fc..686fd11 100644 --- a/tests/check_ast_test.py +++ b/tests/check_ast_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - from pre_commit_hooks.check_ast import main from testing.util import get_resource_path
diff --git a/tests/check_builtin_literals_test.py b/tests/check_builtin_literals_test.py index 8e18854..01193e8 100644 --- a/tests/check_builtin_literals_test.py +++ b/tests/check_builtin_literals_test.py
@@ -7,7 +7,7 @@ from pre_commit_hooks.check_builtin_literals import Visitor BUILTIN_CONSTRUCTORS = '''\ -from six.moves import builtins +import builtins c1 = complex() d1 = dict()
diff --git a/tests/check_byte_order_marker_test.py b/tests/check_byte_order_marker_test.py index 53cb4a1..9995200 100644 --- a/tests/check_byte_order_marker_test.py +++ b/tests/check_byte_order_marker_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - from pre_commit_hooks import check_byte_order_marker
diff --git a/tests/check_case_conflict_test.py b/tests/check_case_conflict_test.py index 077b41b..53de852 100644 --- a/tests/check_case_conflict_test.py +++ b/tests/check_case_conflict_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - from pre_commit_hooks.check_case_conflict import find_conflicting_filenames from pre_commit_hooks.check_case_conflict import main from pre_commit_hooks.util import cmd_output
diff --git a/tests/check_docstring_first_test.py b/tests/check_docstring_first_test.py index 0973a58..7ad876f 100644 --- a/tests/check_docstring_first_test.py +++ b/tests/check_docstring_first_test.py
@@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.check_docstring_first import check_docstring_first
diff --git a/tests/check_executables_have_shebangs_test.py b/tests/check_executables_have_shebangs_test.py index 0cb9dcf..15f0c79 100644 --- a/tests/check_executables_have_shebangs_test.py +++ b/tests/check_executables_have_shebangs_test.py
@@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.check_executables_have_shebangs import main @@ -12,7 +8,7 @@ b'#!/bin/bash\nhello world\n', b'#!/usr/bin/env python3.6', b'#!python', - '#!☃'.encode('UTF-8'), + '#!☃'.encode(), ), ) def test_has_shebang(content, tmpdir): @@ -27,7 +23,7 @@ b' #!python\n', b'\n#!python\n', b'python\n', - '☃'.encode('UTF-8'), + '☃'.encode(), ), ) @@ -36,4 +32,4 @@ path.write(content, 'wb') assert main((path.strpath,)) == 1 _, stderr = capsys.readouterr() - assert stderr.startswith('{}: marked executable but'.format(path.strpath)) + assert stderr.startswith(f'{path}: marked executable but')
diff --git a/tests/check_merge_conflict_test.py b/tests/check_merge_conflict_test.py index af7cc43..9968507 100644 --- a/tests/check_merge_conflict_test.py +++ b/tests/check_merge_conflict_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import os import shutil
diff --git a/tests/check_toml_test.py b/tests/check_toml_test.py index 1172c40..9f186d1 100644 --- a/tests/check_toml_test.py +++ b/tests/check_toml_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - from pre_commit_hooks.check_toml import main
diff --git a/tests/check_vcs_permalinks_test.py b/tests/check_vcs_permalinks_test.py index 00e5396..b893c98 100644 --- a/tests/check_vcs_permalinks_test.py +++ b/tests/check_vcs_permalinks_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - from pre_commit_hooks.check_vcs_permalinks import main
diff --git a/tests/check_yaml_test.py b/tests/check_yaml_test.py index d267150..2f869d1 100644 --- a/tests/check_yaml_test.py +++ b/tests/check_yaml_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.check_yaml import main
diff --git a/tests/conftest.py b/tests/conftest.py index da206cb..f98ae34 100644 --- a/tests/conftest.py +++ b/tests/conftest.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import pytest from pre_commit_hooks.util import cmd_output
diff --git a/tests/debug_statement_hook_test.py b/tests/debug_statement_hook_test.py index d15f5f7..f2cabc1 100644 --- a/tests/debug_statement_hook_test.py +++ b/tests/debug_statement_hook_test.py
@@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import unicode_literals - import ast from pre_commit_hooks.debug_statement_hook import Debug
diff --git a/tests/fix_encoding_pragma_test.py b/tests/fix_encoding_pragma_test.py index d94b725..f3531f2 100644 --- a/tests/fix_encoding_pragma_test.py +++ b/tests/fix_encoding_pragma_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import io import pytest @@ -129,9 +126,6 @@ @pytest.mark.parametrize( ('input_s', 'expected'), ( - # Python 2 cli parameters are bytes - (b'# coding: utf-8', b'# coding: utf-8'), - # Python 3 cli parameters are text ('# coding: utf-8', b'# coding: utf-8'), # trailing whitespace ('# coding: utf-8\n', b'# coding: utf-8'), @@ -149,7 +143,7 @@ assert main((f.strpath, '--pragma', pragma)) == 1 assert f.read() == '# coding: utf-8\nx = 1\n' out, _ = capsys.readouterr() - assert out == 'Added `# coding: utf-8` to {}\n'.format(f.strpath) + assert out == f'Added `# coding: utf-8` to {f.strpath}\n' def test_crlf_ok(tmpdir):
diff --git a/tests/forbid_new_submodules_test.py b/tests/forbid_new_submodules_test.py index 523628d..7619182 100644 --- a/tests/forbid_new_submodules_test.py +++ b/tests/forbid_new_submodules_test.py
@@ -1,5 +1,3 @@ -from __future__ import absolute_import - import subprocess import pytest
diff --git a/tests/mixed_line_ending_test.py b/tests/mixed_line_ending_test.py index 8ae9354..c438f74 100644 --- a/tests/mixed_line_ending_test.py +++ b/tests/mixed_line_ending_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.mixed_line_ending import main @@ -86,7 +83,7 @@ assert ret == 1 assert path.read_binary() == contents out, _ = capsys.readouterr() - assert out == '{}: mixed line endings\n'.format(path) + assert out == f'{path}: mixed line endings\n' def test_fix_lf(tmpdir, capsys): @@ -97,7 +94,7 @@ assert ret == 1 assert path.read_binary() == b'foo\nbar\nbaz\n' out, _ = capsys.readouterr() - assert out == '{}: fixed mixed line endings\n'.format(path) + assert out == f'{path}: fixed mixed line endings\n' def test_fix_crlf(tmpdir):
diff --git a/tests/no_commit_to_branch_test.py b/tests/no_commit_to_branch_test.py index a2ab1f1..72b32e6 100644 --- a/tests/no_commit_to_branch_test.py +++ b/tests/no_commit_to_branch_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.no_commit_to_branch import is_on_branch
diff --git a/tests/pretty_format_json_test.py b/tests/pretty_format_json_test.py index b42e504..59a87f0 100644 --- a/tests/pretty_format_json_test.py +++ b/tests/pretty_format_json_test.py
@@ -2,7 +2,6 @@ import shutil import pytest -from six import PY2 from pre_commit_hooks.pretty_format_json import main from pre_commit_hooks.pretty_format_json import parse_num_to_int @@ -42,7 +41,6 @@ assert ret == expected_retval -@pytest.mark.skipif(PY2, reason='Requires Python3') @pytest.mark.parametrize( ('filename', 'expected_retval'), ( ('not_pretty_formatted_json.json', 1), @@ -52,7 +50,7 @@ ('tab_pretty_formatted_json.json', 0), ), ) -def test_tab_main(filename, expected_retval): # pragma: no cover +def test_tab_main(filename, expected_retval): ret = main(['--indent', '\t', get_resource_path(filename)]) assert ret == expected_retval @@ -113,9 +111,9 @@ expected_retval = 1 a = os.path.join('a', resource_path) b = os.path.join('b', resource_path) - expected_out = '''\ ---- {} -+++ {} + expected_out = f'''\ +--- {a} ++++ {b} @@ -1,6 +1,9 @@ {{ - "foo": @@ -130,7 +128,7 @@ + "blah": null, + "foo": "bar" }} -'''.format(a, b) +''' actual_retval = main([resource_path]) actual_out, actual_err = capsys.readouterr()
diff --git a/tests/readme_test.py b/tests/readme_test.py index fd6d265..7df7fcf 100644 --- a/tests/readme_test.py +++ b/tests/readme_test.py
@@ -1,15 +1,10 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import io - from pre_commit_hooks.check_yaml import yaml def test_readme_contains_all_hooks(): - with io.open('README.md', encoding='UTF-8') as f: + with open('README.md', encoding='UTF-8') as f: readme_contents = f.read() - with io.open('.pre-commit-hooks.yaml', encoding='UTF-8') as f: + with open('.pre-commit-hooks.yaml', encoding='UTF-8') as f: hooks = yaml.load(f) for hook in hooks: - assert '`{}`'.format(hook['id']) in readme_contents + assert f'`{hook["id"]}`' in readme_contents
diff --git a/tests/sort_simple_yaml_test.py b/tests/sort_simple_yaml_test.py index 4261d5d..69ad388 100644 --- a/tests/sort_simple_yaml_test.py +++ b/tests/sort_simple_yaml_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import os import pytest
diff --git a/tests/string_fixer_test.py b/tests/string_fixer_test.py index 4adca4a..77a51cf 100644 --- a/tests/string_fixer_test.py +++ b/tests/string_fixer_test.py
@@ -1,7 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - import textwrap import pytest
diff --git a/tests/trailing_whitespace_fixer_test.py b/tests/trailing_whitespace_fixer_test.py index 97f9aef..53177ac 100644 --- a/tests/trailing_whitespace_fixer_test.py +++ b/tests/trailing_whitespace_fixer_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.trailing_whitespace_fixer import main @@ -46,7 +43,7 @@ '\t\n' # trailing tabs are stripped anyway '\n ', # whitespace at the end of the file is removed ) - ret = main((path.strpath, '--markdown-linebreak-ext={}'.format(ext))) + ret = main((path.strpath, f'--markdown-linebreak-ext={ext}')) assert ret == 1 assert path.read() == ( 'foo \n'
diff --git a/tests/util_test.py b/tests/util_test.py index 9b2d723..b42ee6f 100644 --- a/tests/util_test.py +++ b/tests/util_test.py
@@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import pytest from pre_commit_hooks.util import CalledProcessError
diff --git a/tox.ini b/tox.ini index a6b3fb4..cb58fee 100644 --- a/tox.ini +++ b/tox.ini
@@ -1,5 +1,5 @@ [tox] -envlist = py27,py36,py37,pypy,pypy3,pre-commit +envlist = py36,py37,py38,pypy3,pre-commit [testenv] deps = -rrequirements-dev.txt