diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 690a24e4..faed6a46 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.1.1 +current_version = 6.0.0 commit = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)((?P.*))? serialize = diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 49b67a3c..3e62215c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,12 +8,12 @@ jobs: strategy: matrix: os: [macos-latest, ubuntu-latest, windows-latest] - python-version: [3.5, 3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8, 3.9-dev] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install tox diff --git a/LICENSE-MIT b/LICENSE-MIT index c13417d3..d8dcb3b9 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,6 +1,8 @@ Copyright (c) 2012 GreenSteam, -Copyright (c) 2014-2017 Amir Rachum, +Copyright (c) 2014-2020 Amir Rachum, + +Copyright (c) 2020 Sambhav Kothari, Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/README.rst b/README.rst index 088b5f01..a4e8aa7a 100644 --- a/README.rst +++ b/README.rst @@ -15,6 +15,12 @@ pydocstyle - docstring style checker .. image:: https://pepy.tech/badge/pydocstyle :target: https://pepy.tech/project/pydocstyle +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + +.. image:: https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336 + :target: https://pycqa.github.io/isort/ + **pydocstyle** is a static analysis tool for checking compliance with Python docstring conventions. @@ -22,7 +28,7 @@ docstring conventions. `PEP 257 `_ out of the box, but it should not be considered a reference implementation. -**pydocstyle** supports Python 3.5, 3.6, 3.7 and 3.8. +**pydocstyle** supports Python 3.6, 3.7 and 3.8. Quick Start diff --git a/docs/conf.py b/docs/conf.py index 6ad4687f..6f6412a5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# # pydocstyle documentation build configuration file, created by # sphinx-quickstart on Fri Jan 30 20:30:42 2015. # @@ -32,7 +30,6 @@ 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', - 'sphinxcontrib.issuetracker', # autolinks issue numbers (like #78) ] # Add any paths that contain templates here, relative to this directory. @@ -49,7 +46,7 @@ # General information about the project. project = 'pydocstyle' -copyright = '2016, Amir Rachum' +copyright = '2020, Amir Rachum, Sambhav Kothari' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -267,10 +264,6 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False -# sphinxcontrib.issuetracker settings -issuetracker = 'github' -issuetracker_project = 'PyCQA/pydocstyle' - def generate_error_code_table(): from pydocstyle.violations import ErrorRegistry diff --git a/docs/index.rst b/docs/index.rst index 00980b2b..991a01e4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,7 +8,7 @@ docstring conventions. `PEP 257 `_ out of the box, but it should not be considered a reference implementation. -**pydocstyle** supports Python 3.5, 3.6, 3.7 and 3.8. +**pydocstyle** supports Python 3.6, 3.7 and 3.8. .. include:: quickstart.rst @@ -31,5 +31,4 @@ Credits pydocstyle is a rename and continuation of pep257, a project created by Vladimir Keleshev. -Maintained by Amir Rachum. - +Maintained by Amir Rachum and Sambhav Kothari. diff --git a/docs/release_notes.rst b/docs/release_notes.rst index d275cf4e..0c9b87fe 100644 --- a/docs/release_notes.rst +++ b/docs/release_notes.rst @@ -5,6 +5,35 @@ Release Notes `Semantic Versioning `_ specification. +6.0.0 - March 18th, 2021 +--------------------------- + +Major Updates + +* Support for Python 3.5 has been dropped (#510). + +New Features + +* Add flag to disable `# noqa` comment processing in API (#485). +* Methods, Functions and Nested functions that have a docstring now throw D418 (#511). +* Methods decorated with @overload no longer reported as D102 (#511). +* Functions and nested functions decorated with @overload no longer reported as D103 (#511). + +Bug Fixes + +* Treat "package" as an imperative verb for D401 (#356). +* Fix the parsing of decorated one line functions (#499). + +5.1.2 - September 13th, 2020 +---------------------------- + +New Features + +* Methods, Functions and Nested functions that have a docstring now throw D418 (#511). +* Methods decorated with @overload no longer reported as D102. +* Functions and nested functions decorated with @overload no longer reported as D103. + + 5.1.1 - August 29th, 2020 --------------------------- @@ -12,7 +41,6 @@ Bug Fixes * Fix ``IndexError`` crash on one-line backslashed docstrings (#506). - 5.1.0 - August 22nd, 2020 --------------------------- @@ -32,7 +60,6 @@ Bug Fixes * Support backslash-continued descriptions in docstrings (#472). * Correctly detect publicity of modules inside directories (#470, #494). - 5.0.2 - January 8th, 2020 --------------------------- @@ -111,7 +138,6 @@ Bug Fixes * Fix parsing tuple syntax ``__all__`` (#355, #352). - 3.0.0 - October 14th, 2018 -------------------------- diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..c6e2fb8e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.black] +line-length = 79 +target-version = ['py36'] +skip-string-normalization = true + +[tool.isort] +profile = "black" +src_paths = ["src/pydocstyle"] +line_length = 79 diff --git a/requirements/docs.txt b/requirements/docs.txt index bad1df95..623c766f 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,4 +1,3 @@ -sphinxcontrib-issuetracker sphinx_rtd_theme # Pinned to 1.6.2 due to a bug in 1.6.3. See GitHub PR #270 for details. # TODO: remove this restriction once 1.6.4 or later is released. diff --git a/requirements/tests.txt b/requirements/tests.txt index 65fd55fd..947eea4a 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -1,3 +1,5 @@ pytest==3.0.2 pytest-pep8==1.0.6 -mypy==0.730 +mypy==0.782 +black==20.8b1 +isort==5.4.2 diff --git a/setup.py b/setup.py index f333e8ef..2ffaf4ed 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,7 @@ from setuptools import setup -import sys # Do not update the version manually - it is managed by `bumpversion`. -version = '5.1.1' +version = '6.0.0' requirements = [ @@ -24,14 +23,14 @@ 'Environment :: Console', 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3 :: Only', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', ], - python_requires='>=3.5', + python_requires='>=3.6', keywords='pydocstyle, PEP 257, pep257, PEP 8, pep8, docstrings', packages=('pydocstyle',), package_dir={'': 'src'}, @@ -43,6 +42,6 @@ ], }, project_urls={ - 'Release Notes': 'http://www.pydocstyle.org/en/latest/release_notes.html', + 'Release Notes': 'https://www.pydocstyle.org/en/latest/release_notes.html', }, ) diff --git a/src/pydocstyle/__init__.py b/src/pydocstyle/__init__.py index a44b4c0b..3fb5499e 100644 --- a/src/pydocstyle/__init__.py +++ b/src/pydocstyle/__init__.py @@ -1,7 +1,5 @@ -from .checker import check -from .violations import Error, conventions -from .utils import __version__ - # Temporary hotfix for flake8-docstrings -from .checker import ConventionChecker +from .checker import ConventionChecker, check from .parser import AllError +from .utils import __version__ +from .violations import Error, conventions diff --git a/src/pydocstyle/__main__.py b/src/pydocstyle/__main__.py index 967914a9..cbde4e59 100644 --- a/src/pydocstyle/__main__.py +++ b/src/pydocstyle/__main__.py @@ -12,6 +12,7 @@ def main() -> None: from pydocstyle import cli + cli.main() diff --git a/src/pydocstyle/checker.py b/src/pydocstyle/checker.py index 03fc1634..41e3f35f 100644 --- a/src/pydocstyle/checker.py +++ b/src/pydocstyle/checker.py @@ -2,23 +2,37 @@ import ast import string -import sys -import textwrap import tokenize as tk -from itertools import takewhile, chain -from re import compile as re from collections import namedtuple +from itertools import chain, takewhile +from re import compile as re from . import violations from .config import IllegalConfiguration -from .parser import (Package, Module, Class, NestedClass, Definition, AllError, - Method, Function, NestedFunction, Parser, StringIO, - ParseError) -from .utils import log, is_blank, pairwise, common_prefix_length, strip_non_alphanumeric -from .wordlists import IMPERATIVE_VERBS, IMPERATIVE_BLACKLIST, stem - - -__all__ = ('check', ) +from .parser import ( + AllError, + Class, + Definition, + Function, + Method, + Module, + NestedClass, + NestedFunction, + Package, + ParseError, + Parser, + StringIO, +) +from .utils import ( + common_prefix_length, + is_blank, + log, + pairwise, + strip_non_alphanumeric, +) +from .wordlists import IMPERATIVE_BLACKLIST, IMPERATIVE_VERBS, stem + +__all__ = ('check',) def check_for(kind, terminal=False): @@ -26,6 +40,7 @@ def decorator(f): f._check_for = kind f._terminal = terminal return f + return decorator @@ -52,7 +67,7 @@ class ConventionChecker: 'References', 'Examples', 'Attributes', - 'Methods' + 'Methods', ) GOOGLE_SECTION_NAMES = ( @@ -90,43 +105,63 @@ class ConventionChecker: # " random: Test" where random will be captured as the param # " random : test" where random will be captured as the param # " random_t (Test) : test " where random_t will be captured as the param + # Matches anything that fulfills all the following conditions: GOOGLE_ARGS_REGEX = re( - # Matches anything that fulfills all the following conditions: - r"^\s*" # Begins with 0 or more whitespace characters - r"(\w+)" # Followed by 1 or more unicode chars, numbers or underscores - # The above is captured as the first group as this is the paramater name. - r"\s*" # Followed by 0 or more whitespace characters - r"(\(.*?\))?" # Matches patterns contained within round brackets. - # The `.*?`matches any sequence of characters in a non-greedy - # way (denoted by the `*?`) - r"\s*" # Followed by 0 or more whitespace chars - r":" # Followed by a colon - ".+" # Followed by 1 or more characters - which is the docstring for the parameter + # Begins with 0 or more whitespace characters + r"^\s*" + # Followed by 1 or more unicode chars, numbers or underscores + # The above is captured as the first group as this is the paramater name. + r"(\w+)" + # Followed by 0 or more whitespace characters + r"\s*" + # Matches patterns contained within round brackets. + # The `.*?`matches any sequence of characters in a non-greedy + # way (denoted by the `*?`) + r"(\(.*?\))?" + # Followed by 0 or more whitespace chars + r"\s*" + # Followed by a colon + r":" + # Followed by 1 or more characters - which is the docstring for the parameter + ".+" ) - def check_source(self, source, filename, ignore_decorators=None): + def check_source( + self, + source, + filename, + ignore_decorators=None, + ignore_inline_noqa=False, + ): module = parse(StringIO(source), filename) for definition in module: for this_check in self.checks: terminate = False if isinstance(definition, this_check._check_for): - skipping_all = (definition.skipped_error_codes == 'all') + skipping_all = definition.skipped_error_codes == 'all' decorator_skip = ignore_decorators is not None and any( len(ignore_decorators.findall(dec.name)) > 0 - for dec in definition.decorators) - if not skipping_all and not decorator_skip: - error = this_check(self, definition, - definition.docstring) + for dec in definition.decorators + ) + if ( + ignore_inline_noqa or not skipping_all + ) and not decorator_skip: + error = this_check( + self, definition, definition.docstring + ) else: error = None errors = error if hasattr(error, '__iter__') else [error] for error in errors: - if error is not None and error.code not in \ - definition.skipped_error_codes: + if error is not None and ( + ignore_inline_noqa + or error.code not in definition.skipped_error_codes + ): partition = this_check.__doc__.partition('.\n') message, _, explanation = partition - error.set_context(explanation=explanation, - definition=definition) + error.set_context( + explanation=explanation, definition=definition + ) yield error if this_check._terminal: terminate = True @@ -136,8 +171,11 @@ def check_source(self, source, filename, ignore_decorators=None): @property def checks(self): - all = [this_check for this_check in vars(type(self)).values() - if hasattr(this_check, '_check_for')] + all = [ + this_check + for this_check in vars(type(self)).values() + if hasattr(this_check, '_check_for') + ] return sorted(all, key=lambda this_check: not this_check._terminal) @check_for(Definition, terminal=True) @@ -154,17 +192,35 @@ def check_docstring_missing(self, definition, docstring): with a single underscore. """ - if (not docstring and definition.is_public or - docstring and is_blank(ast.literal_eval(docstring))): - codes = {Module: violations.D100, - Class: violations.D101, - NestedClass: violations.D106, - Method: (lambda: violations.D105() if definition.is_magic - else (violations.D107() if definition.is_init - else violations.D102())), - Function: violations.D103, - NestedFunction: violations.D103, - Package: violations.D104} + if ( + not docstring + and definition.is_public + or docstring + and is_blank(ast.literal_eval(docstring)) + ): + codes = { + Module: violations.D100, + Class: violations.D101, + NestedClass: violations.D106, + Method: lambda: violations.D105() + if definition.is_magic + else ( + violations.D107() + if definition.is_init + else ( + violations.D102() + if not definition.is_overload + else None + ) + ), + NestedFunction: violations.D103, + Function: ( + lambda: violations.D103() + if not definition.is_overload + else None + ), + Package: violations.D104, + } return codes[type(definition)]() @check_for(Definition) @@ -202,8 +258,8 @@ def check_no_blank_before(self, function, docstring): # def # and the blank line is not itself followed by an inner function or # class. if not ( - blanks_after_count == 1 and - re(r"\s+(?:(?:class|def|async def)\s|@)").match(after) + blanks_after_count == 1 + and re(r"\s+(?:(?:class|def|async def)\s|@)").match(after) ): yield violations.D202(blanks_after_count) @@ -279,8 +335,11 @@ def check_indent(self, definition, docstring): lines = docstring.split('\n') if len(lines) > 1: # First line and line continuations need no indent. - lines = [line for i, line in enumerate(lines) - if i and not lines[i-1].endswith('\\')] + lines = [ + line + for i, line in enumerate(lines) + if i and not lines[i - 1].endswith('\\') + ] indents = [leading_space(l) for l in lines if not is_blank(l)] if set(' \t') == set(''.join(indents) + indent): yield violations.D206() @@ -300,8 +359,11 @@ def check_newline_after_last_paragraph(self, definition, docstring): """ if docstring: - lines = [l for l in ast.literal_eval(docstring).split('\n') - if not is_blank(l)] + lines = [ + l + for l in ast.literal_eval(docstring).split('\n') + if not is_blank(l) + ] if len(lines) > 1: if docstring.split("\n")[-1].strip() not in ['"""', "'''"]: return violations.D209() @@ -311,8 +373,11 @@ def check_surrounding_whitespaces(self, definition, docstring): """D210: No whitespaces allowed surrounding docstring text.""" if docstring: lines = ast.literal_eval(docstring).split('\n') - if lines[0].startswith(' ') or \ - len(lines) == 1 and lines[0].endswith(' '): + if ( + lines[0].startswith(' ') + or len(lines) == 1 + and lines[0].endswith(' ') + ): return violations.D210() @check_for(Definition) @@ -325,10 +390,14 @@ def check_multi_line_summary_start(self, definition, docstring): """ if docstring: start_triple = [ - '"""', "'''", - 'u"""', "u'''", - 'r"""', "r'''", - 'ur"""', "ur'''" + '"""', + "'''", + 'u"""', + "u'''", + 'r"""', + "r'''", + 'ur"""', + "ur'''", ] lines = ast.literal_eval(docstring).split('\n') @@ -380,28 +449,13 @@ def check_backslashes(self, definition, docstring): # Just check that docstring is raw, check_triple_double_quotes # ensures the correct quotes. - if (docstring - and re(r'\\[^\nuN]').search(docstring) - and not docstring.startswith(('r', 'ur'))): + if ( + docstring + and re(r'\\[^\nuN]').search(docstring) + and not docstring.startswith(('r', 'ur')) + ): return violations.D301() - @check_for(Definition) - def check_unicode_docstring(self, definition, docstring): - r'''D302: Use u""" for docstrings with Unicode. - - For Unicode docstrings, use u"""Unicode triple-quoted strings""". - - ''' - if 'unicode_literals' in definition.module.future_imports: - return - - # Just check that docstring is unicode, check_triple_double_quotes - # ensures the correct quotes. - if docstring and sys.version_info[0] <= 2: - if not is_ascii(docstring) and not docstring.startswith( - ('u', 'ur')): - return violations.D302() - @staticmethod def _check_ends_with(docstring, chars, violation): """First line ends with one of `chars`. @@ -433,7 +487,9 @@ def check_ends_with_punctuation(self, definition, docstring): question mark, or exclamation point """ - return self._check_ends_with(docstring, ('.', '!', '?'), violations.D415) + return self._check_ends_with( + docstring, ('.', '!', '?'), violations.D415 + ) @check_for(Function) def check_imperative_mood(self, function, docstring): # def context @@ -453,23 +509,14 @@ def check_imperative_mood(self, function, docstring): # def context if check_word in IMPERATIVE_BLACKLIST: return violations.D401b(first_word) - try: - correct_forms = IMPERATIVE_VERBS.get(stem(check_word)) - except UnicodeDecodeError: - # This is raised when the docstring contains unicode - # characters in the first word, but is not a unicode - # string. In which case D302 will be reported. Ignoring. - return + correct_forms = IMPERATIVE_VERBS.get(stem(check_word)) if correct_forms and check_word not in correct_forms: best = max( correct_forms, - key=lambda f: common_prefix_length(check_word, f) - ) - return violations.D401( - best.capitalize(), - first_word + key=lambda f: common_prefix_length(check_word, f), ) + return violations.D401(best.capitalize(), first_word) @check_for(Function) def check_no_signature(self, function, docstring): # def context @@ -501,6 +548,18 @@ def check_capitalized(self, function, docstring): if first_word != first_word.capitalize(): return violations.D403(first_word.capitalize(), first_word) + @check_for(Function) + def check_if_needed(self, function, docstring): + """D418: Function decorated with @overload shouldn't contain a docstring. + + Functions that are decorated with @overload are definitions, + and are for the benefit of the type checker only, + since they will be overwritten by the non-@overload-decorated definition. + + """ + if docstring and function.is_overload: + return violations.D418() + @check_for(Definition) def check_starts_with_this(self, function, docstring): """D404: First word of the docstring should not be `This`. @@ -556,26 +615,34 @@ def _is_docstring_section(context): If one of the conditions is true, we will consider the line as a section name. """ - section_name_suffix = \ + section_name_suffix = ( context.line.strip().lstrip(context.section_name.strip()).strip() + ) section_suffix_is_only_colon = section_name_suffix == ':' punctuation = [',', ';', '.', '-', '\\', '/', ']', '}', ')'] - prev_line_ends_with_punctuation = \ - any(context.previous_line.strip().endswith(x) for x in punctuation) + prev_line_ends_with_punctuation = any( + context.previous_line.strip().endswith(x) for x in punctuation + ) - this_line_looks_like_a_section_name = \ + this_line_looks_like_a_section_name = ( is_blank(section_name_suffix) or section_suffix_is_only_colon + ) - prev_line_looks_like_end_of_paragraph = \ + prev_line_looks_like_end_of_paragraph = ( prev_line_ends_with_punctuation or is_blank(context.previous_line) + ) - return (this_line_looks_like_a_section_name and - prev_line_looks_like_end_of_paragraph) + return ( + this_line_looks_like_a_section_name + and prev_line_looks_like_end_of_paragraph + ) @classmethod - def _check_blanks_and_section_underline(cls, section_name, context, indentation): + def _check_blanks_and_section_underline( + cls, section_name, context, indentation + ): """D4{07,08,09,12,14}, D215: Section underline checks. Check for correct formatting for docstring sections. Checks that: @@ -613,9 +680,11 @@ def _check_blanks_and_section_underline(cls, section_name, context, indentation) yield violations.D408(section_name) if non_empty_line.strip() != "-" * len(section_name): - yield violations.D409(len(section_name), - section_name, - len(non_empty_line.strip())) + yield violations.D409( + len(section_name), + section_name, + len(non_empty_line.strip()), + ) if leading_space(non_empty_line) > indentation: yield violations.D215(section_name) @@ -624,11 +693,13 @@ def _check_blanks_and_section_underline(cls, section_name, context, indentation) # If the line index after the dashes is in range (perhaps we have # a header + underline followed by another section header). if line_after_dashes_index < len(context.following_lines): - line_after_dashes = \ - context.following_lines[line_after_dashes_index] + line_after_dashes = context.following_lines[ + line_after_dashes_index + ] if is_blank(line_after_dashes): - rest_of_lines = \ - context.following_lines[line_after_dashes_index:] + rest_of_lines = context.following_lines[ + line_after_dashes_index: + ] if not is_blank(''.join(rest_of_lines)): yield violations.D412(section_name) else: @@ -637,7 +708,9 @@ def _check_blanks_and_section_underline(cls, section_name, context, indentation) yield violations.D414(section_name) @classmethod - def _check_common_section(cls, docstring, definition, context, valid_section_names): + def _check_common_section( + cls, docstring, definition, context, valid_section_names + ): """D4{05,10,11,13}, D214: Section name checks. Check for valid section names. Checks that: @@ -651,15 +724,18 @@ def _check_common_section(cls, docstring, definition, context, valid_section_nam indentation = cls._get_docstring_indent(definition, docstring) capitalized_section = context.section_name.title() - if (context.section_name not in valid_section_names and - capitalized_section in valid_section_names): + if ( + context.section_name not in valid_section_names + and capitalized_section in valid_section_names + ): yield violations.D405(capitalized_section, context.section_name) if leading_space(context.line) > indentation: yield violations.D214(capitalized_section) - if (not context.following_lines or - not is_blank(context.following_lines[-1])): + if not context.following_lines or not is_blank( + context.following_lines[-1] + ): if context.is_last_section: yield violations.D413(capitalized_section) else: @@ -668,9 +744,9 @@ def _check_common_section(cls, docstring, definition, context, valid_section_nam if not is_blank(context.previous_line): yield violations.D411(capitalized_section) - yield from cls._check_blanks_and_section_underline(capitalized_section, - context, - indentation) + yield from cls._check_blanks_and_section_underline( + capitalized_section, context, indentation + ) @classmethod def _check_numpy_section(cls, docstring, definition, context): @@ -684,16 +760,17 @@ def _check_numpy_section(cls, docstring, definition, context): """ indentation = cls._get_docstring_indent(definition, docstring) capitalized_section = context.section_name.title() - yield from cls._check_common_section(docstring, - definition, - context, - cls.NUMPY_SECTION_NAMES) + yield from cls._check_common_section( + docstring, definition, context, cls.NUMPY_SECTION_NAMES + ) suffix = context.line.strip().lstrip(context.section_name) if suffix: yield violations.D406(capitalized_section, context.line.strip()) if capitalized_section == "Parameters": - yield from cls._check_parameters_section(docstring, definition, context) + yield from cls._check_parameters_section( + docstring, definition, context + ) @staticmethod def _check_parameters_section(docstring, definition, context): @@ -708,7 +785,8 @@ def _check_parameters_section(docstring, definition, context): section_level_indent = leading_space(context.line) # Join line continuations, then resplit by line. content = ( - '\n'.join(context.following_lines).replace('\\\n', '').split('\n')) + '\n'.join(context.following_lines).replace('\\\n', '').split('\n') + ) for current_line, next_line in zip(content, content[1:]): # All parameter definitions in the Numpy parameters # section must be at the same indent level as the section @@ -717,9 +795,14 @@ def _check_parameters_section(docstring, definition, context): # and has some string, to ensure that the parameter actually # has a description. # This means, this is a parameter doc with some description - if ((leading_space(current_line) == section_level_indent) - and (len(leading_space(next_line)) > len(leading_space(current_line))) - and next_line.strip()): + if ( + (leading_space(current_line) == section_level_indent) + and ( + len(leading_space(next_line)) + > len(leading_space(current_line)) + ) + and next_line.strip() + ): # In case the parameter has type definitions, it # will have a colon if ":" in current_line: @@ -733,8 +816,9 @@ def _check_parameters_section(docstring, definition, context): parameter_list = parameters.split(",") for parameter in parameter_list: docstring_args.add(parameter.strip()) - yield from ConventionChecker._check_missing_args(docstring_args, definition) - + yield from ConventionChecker._check_missing_args( + docstring_args, definition + ) @staticmethod def _check_args_section(docstring, definition, context): @@ -750,8 +834,9 @@ def _check_args_section(docstring, definition, context): match = ConventionChecker.GOOGLE_ARGS_REGEX.match(line) if match: docstring_args.add(match.group(1)) - yield from ConventionChecker._check_missing_args(docstring_args, definition) - + yield from ConventionChecker._check_missing_args( + docstring_args, definition + ) @staticmethod def _check_missing_args(docstring_args, definition): @@ -778,9 +863,9 @@ def _check_missing_args(docstring_args, definition): ] missing_args = set(function_args) - docstring_args if missing_args: - yield violations.D417(", ".join(sorted(missing_args)), - definition.name) - + yield violations.D417( + ", ".join(sorted(missing_args)), definition.name + ) @classmethod def _check_google_section(cls, docstring, definition, context): @@ -796,18 +881,18 @@ def _check_google_section(cls, docstring, definition, context): which are style-agnostic section checks. """ capitalized_section = context.section_name.title() - yield from cls._check_common_section(docstring, - definition, - context, - cls.GOOGLE_SECTION_NAMES) + yield from cls._check_common_section( + docstring, definition, context, cls.GOOGLE_SECTION_NAMES + ) suffix = context.line.strip().lstrip(context.section_name) if suffix != ":": - yield violations.D416(capitalized_section + ":", context.line.strip()) + yield violations.D416( + capitalized_section + ":", context.line.strip() + ) if capitalized_section in ("Args", "Arguments"): yield from cls._check_args_section(docstring, definition, context) - @staticmethod def _get_section_contexts(lines, valid_section_names): """Generate `SectionContext` objects for valid sections. @@ -830,39 +915,53 @@ def _suspected_as_section(_line): return result in lower_section_names # Finding our suspects. - suspected_section_indices = [i for i, line in enumerate(lines) if - _suspected_as_section(line)] - - SectionContext = namedtuple('SectionContext', ('section_name', - 'previous_line', - 'line', - 'following_lines', - 'original_index', - 'is_last_section')) + suspected_section_indices = [ + i for i, line in enumerate(lines) if _suspected_as_section(line) + ] + + SectionContext = namedtuple( + 'SectionContext', + ( + 'section_name', + 'previous_line', + 'line', + 'following_lines', + 'original_index', + 'is_last_section', + ), + ) # First - create a list of possible contexts. Note that the # `following_lines` member is until the end of the docstring. - contexts = (SectionContext(get_leading_words(lines[i].strip()), - lines[i - 1], - lines[i], - lines[i + 1:], - i, - False) - for i in suspected_section_indices) + contexts = ( + SectionContext( + get_leading_words(lines[i].strip()), + lines[i - 1], + lines[i], + lines[i + 1 :], + i, + False, + ) + for i in suspected_section_indices + ) # Now that we have manageable objects - rule out false positives. - contexts = (c for c in contexts if ConventionChecker._is_docstring_section(c)) + contexts = ( + c for c in contexts if ConventionChecker._is_docstring_section(c) + ) # Now we shall trim the `following lines` field to only reach the # next section name. for a, b in pairwise(contexts, None): end = -1 if b is None else b.original_index - yield SectionContext(a.section_name, - a.previous_line, - a.line, - lines[a.original_index + 1:end], - a.original_index, - b is None) + yield SectionContext( + a.section_name, + a.previous_line, + a.line, + lines[a.original_index + 1 : end], + a.original_index, + b is None, + ) def _check_numpy_sections(self, lines, definition, docstring): """NumPy-style docstring sections checks. @@ -885,8 +984,7 @@ def _check_numpy_sections(self, lines, definition, docstring): Numpy-style section. """ found_any_numpy_section = False - for ctx in self._get_section_contexts(lines, - self.NUMPY_SECTION_NAMES): + for ctx in self._get_section_contexts(lines, self.NUMPY_SECTION_NAMES): found_any_numpy_section = True yield from self._check_numpy_section(docstring, definition, ctx) @@ -910,8 +1008,9 @@ def _check_google_sections(self, lines, definition, docstring): Yields all violation from `_check_google_section` for each valid Google-style section. """ - for ctx in self._get_section_contexts(lines, - self.GOOGLE_SECTION_NAMES): + for ctx in self._get_section_contexts( + lines, self.GOOGLE_SECTION_NAMES + ): yield from self._check_google_section(docstring, definition, ctx) @check_for(Definition) @@ -924,15 +1023,25 @@ def check_docstring_sections(self, definition, docstring): if len(lines) < 2: return - found_numpy = yield from self._check_numpy_sections(lines, definition, docstring) + found_numpy = yield from self._check_numpy_sections( + lines, definition, docstring + ) if not found_numpy: - yield from self._check_google_sections(lines, definition, docstring) + yield from self._check_google_sections( + lines, definition, docstring + ) parse = Parser() -def check(filenames, select=None, ignore=None, ignore_decorators=None): +def check( + filenames, + select=None, + ignore=None, + ignore_decorators=None, + ignore_inline_noqa=False, +): """Generate docstring errors that exist in `filenames` iterable. By default, the PEP-257 convention is checked. To specifically define the @@ -949,6 +1058,8 @@ def check(filenames, select=None, ignore=None, ignore_decorators=None): convenience, you may use `pydocstyle.violations.conventions.pep257` as a base set to add or remove errors from. + `ignore_inline_noqa` controls if `# noqa` comments are respected or not. + Examples --------- >>> check(['pydocstyle.py']) @@ -962,13 +1073,16 @@ def check(filenames, select=None, ignore=None, ignore_decorators=None): """ if select is not None and ignore is not None: - raise IllegalConfiguration('Cannot pass both select and ignore. ' - 'They are mutually exclusive.') + raise IllegalConfiguration( + 'Cannot pass both select and ignore. ' + 'They are mutually exclusive.' + ) elif select is not None: checked_codes = select elif ignore is not None: - checked_codes = list(set(violations.ErrorRegistry.get_error_codes()) - - set(ignore)) + checked_codes = list( + set(violations.ErrorRegistry.get_error_codes()) - set(ignore) + ) else: checked_codes = violations.conventions.pep257 @@ -977,12 +1091,13 @@ def check(filenames, select=None, ignore=None, ignore_decorators=None): try: with tk.open(filename) as file: source = file.read() - for error in ConventionChecker().check_source(source, filename, - ignore_decorators): + for error in ConventionChecker().check_source( + source, filename, ignore_decorators, ignore_inline_noqa + ): code = getattr(error, 'code', None) if code in checked_codes: yield error - except (EnvironmentError, AllError, ParseError) as error: + except (OSError, AllError, ParseError) as error: log.warning('Error in file %s: %s', filename, error) yield error except tk.TokenError: @@ -1008,10 +1123,12 @@ def get_leading_words(line): if result is not None: return result.group() + def is_def_arg_private(arg_name): """Return a boolean indicating if the argument name is private.""" return arg_name.startswith("_") + def get_function_args(function_source): """Return the function arguments given the source-code string.""" # We are stripping the whitespace from the left of the diff --git a/src/pydocstyle/cli.py b/src/pydocstyle/cli.py index 03aba304..21fc490c 100644 --- a/src/pydocstyle/cli.py +++ b/src/pydocstyle/cli.py @@ -2,13 +2,12 @@ import logging import sys +from .checker import check +from .config import ConfigurationParser, IllegalConfiguration from .utils import log from .violations import Error -from .config import ConfigurationParser, IllegalConfiguration -from .checker import check - -__all__ = ('main', ) +__all__ = ('main',) class ReturnCode: @@ -39,10 +38,18 @@ def run_pydocstyle(): errors = [] try: - for filename, checked_codes, ignore_decorators in \ - conf.get_files_to_check(): - errors.extend(check((filename,), select=checked_codes, - ignore_decorators=ignore_decorators)) + for ( + filename, + checked_codes, + ignore_decorators, + ) in conf.get_files_to_check(): + errors.extend( + check( + (filename,), + select=checked_codes, + ignore_decorators=ignore_decorators, + ) + ) except IllegalConfiguration as error: # An illegal configuration file was found during file generation. log.error(error.args[0]) @@ -72,6 +79,7 @@ def main(): def setup_stream_handlers(conf): """Set up logging stream handlers according to the options.""" + class StdoutFilter(logging.Filter): def filter(self, record): return record.levelno in (logging.DEBUG, logging.INFO) diff --git a/src/pydocstyle/config.py b/src/pydocstyle/config.py index 60d11f00..f5226204 100644 --- a/src/pydocstyle/config.py +++ b/src/pydocstyle/config.py @@ -5,8 +5,8 @@ import os from collections import namedtuple from collections.abc import Set -from re import compile as re from configparser import RawConfigParser +from re import compile as re from .utils import __version__, log from .violations import ErrorRegistry, conventions @@ -14,10 +14,12 @@ def check_initialized(method): """Check that the configuration object was initialized.""" + def _decorator(self, *args, **kwargs): if self._arguments is None or self._options is None: raise RuntimeError('using an uninitialized configuration') return method(self, *args, **kwargs) + return _decorator @@ -31,7 +33,7 @@ class ConfigurationParser: ------------------ Responsible for deciding things that are related to the user interface and configuration discovery, e.g. verbosity, debug options, etc. - All run configurations default to `False` or `None` and are decided only + All run configurations default to `False` or `None` and are decided only by CLI. Check Configurations: @@ -59,9 +61,16 @@ class ConfigurationParser: """ - CONFIG_FILE_OPTIONS = ('convention', 'select', 'ignore', 'add-select', - 'add-ignore', 'match', 'match-dir', - 'ignore-decorators') + CONFIG_FILE_OPTIONS = ( + 'convention', + 'select', + 'ignore', + 'add-select', + 'add-ignore', + 'match', + 'match-dir', + 'ignore-decorators', + ) BASE_ERROR_SELECTION_OPTIONS = ('ignore', 'select', 'convention') DEFAULT_MATCH_RE = r'(?!test_).*\.py' @@ -133,6 +142,7 @@ def get_files_to_check(self): might be raised. """ + def _get_matches(conf): """Return the `match` and `match_dir` functions for `config`.""" match_func = re(conf.match + '$').match @@ -141,8 +151,9 @@ def _get_matches(conf): def _get_ignore_decorators(conf): """Return the `ignore_decorators` as None or regex.""" - return (re(conf.ignore_decorators) if conf.ignore_decorators - else None) + return ( + re(conf.ignore_decorators) if conf.ignore_decorators else None + ) for name in self._arguments: if os.path.isdir(name): @@ -157,8 +168,11 @@ def _get_ignore_decorators(conf): for filename in filenames: if match(filename): full_path = os.path.join(root, filename) - yield (full_path, list(config.checked_codes), - ignore_decorators) + yield ( + full_path, + list(config.checked_codes), + ignore_decorators, + ) else: config = self._get_config(os.path.abspath(name)) match, _ = _get_matches(config) @@ -170,14 +184,14 @@ def _get_ignore_decorators(conf): def _get_config_by_discovery(self, node): """Get a configuration for checking `node` by config discovery. - + Config discovery happens when no explicit config file is specified. The file system is searched for config files starting from the directory containing the file being checked, and up until the root directory of the project. - + See `_get_config` for further details. - + """ path = self._get_node_dir(node) @@ -245,17 +259,20 @@ def _get_config(self, node): else: log.debug('Using config file %r', self._run_conf.config) if not os.path.exists(self._run_conf.config): - raise IllegalConfiguration('Configuration file {!r} specified ' - 'via --config was not found.' - .format(self._run_conf.config)) + raise IllegalConfiguration( + 'Configuration file {!r} specified ' + 'via --config was not found.'.format(self._run_conf.config) + ) if None in self._cache: return self._cache[None] options, _ = self._read_configuration_file(self._run_conf.config) if options is None: - log.warning('Configuration file does not contain a ' - 'pydocstyle section. Using default configuration.') + log.warning( + 'Configuration file does not contain a ' + 'pydocstyle section. Using default configuration.' + ) config = self._create_check_config(self._options) else: config = self._create_check_config(options) @@ -302,8 +319,7 @@ def _read_configuration_file(self, path): for group in self._parser.option_groups: all_options.extend(group.option_list) - option_list = {o.dest: o.type or o.action - for o in all_options} + option_list = {o.dest: o.type or o.action for o in all_options} # First, read the default values new_options, _ = self._parse_args([]) @@ -316,7 +332,7 @@ def _read_configuration_file(self, path): continue if opt.replace('_', '-') not in self.CONFIG_FILE_OPTIONS: - log.warning("Unknown option '{}' ignored".format(opt)) + log.warning(f"Unknown option '{opt}' ignored") continue normalized_opt = opt.replace('-', '_') @@ -335,7 +351,7 @@ def _read_configuration_file(self, path): if options is not None: if not self._validate_options(options): - raise IllegalConfiguration('in file: {}'.format(path)) + raise IllegalConfiguration(f'in file: {path}') return options, should_inherit @@ -356,8 +372,9 @@ def _merge_configuration(self, parent_config, child_options): kwargs = dict(checked_codes=error_codes) for key in ('match', 'match_dir', 'ignore_decorators'): - kwargs[key] = \ - getattr(child_options, key) or getattr(parent_config, key) + kwargs[key] = getattr(child_options, key) or getattr( + parent_config, key + ) return CheckConfiguration(**kwargs) def _parse_args(self, args=None, values=None): @@ -368,8 +385,9 @@ def _parse_args(self, args=None, values=None): @staticmethod def _create_run_config(options): """Create a `RunConfiguration` object from `options`.""" - values = {opt: getattr(options, opt) for opt in - RunConfiguration._fields} + values = { + opt: getattr(options, opt) for opt in RunConfiguration._fields + } return RunConfiguration(**values) @classmethod @@ -388,9 +406,11 @@ def _create_check_config(cls, options, use_defaults=True): kwargs = dict(checked_codes=checked_codes) for key in ('match', 'match_dir', 'ignore_decorators'): - kwargs[key] = getattr(cls, 'DEFAULT_{}_RE'.format(key.upper())) \ - if getattr(options, key) is None and use_defaults \ + kwargs[key] = ( + getattr(cls, f'DEFAULT_{key.upper()}_RE') + if getattr(options, key) is None and use_defaults else getattr(options, key) + ) return CheckConfiguration(**kwargs) @classmethod @@ -455,12 +475,15 @@ def _expand_error_codes(code_parts): if not part: continue - codes_to_add = {code for code in codes - if code.startswith(part)} + codes_to_add = { + code for code in codes if code.startswith(part) + } if not codes_to_add: log.warning( 'Error code passed is not a prefix of any ' - 'known errors: %s', part) + 'known errors: %s', + part, + ) expanded_codes.update(codes_to_add) except TypeError as e: raise IllegalConfiguration(e) from e @@ -486,25 +509,34 @@ def _validate_options(cls, options): was selected. """ - for opt1, opt2 in \ - itertools.permutations(cls.BASE_ERROR_SELECTION_OPTIONS, 2): + for opt1, opt2 in itertools.permutations( + cls.BASE_ERROR_SELECTION_OPTIONS, 2 + ): if getattr(options, opt1) and getattr(options, opt2): - log.error('Cannot pass both {} and {}. They are ' - 'mutually exclusive.'.format(opt1, opt2)) + log.error( + 'Cannot pass both {} and {}. They are ' + 'mutually exclusive.'.format(opt1, opt2) + ) return False if options.convention and options.convention not in conventions: - log.error("Illegal convention '{}'. Possible conventions: {}" - .format(options.convention, - ', '.join(conventions.keys()))) + log.error( + "Illegal convention '{}'. Possible conventions: {}".format( + options.convention, ', '.join(conventions.keys()) + ) + ) return False return True @classmethod def _has_exclusive_option(cls, options): """Return `True` iff one or more exclusive options were selected.""" - return any([getattr(options, opt) is not None for opt in - cls.BASE_ERROR_SELECTION_OPTIONS]) + return any( + [ + getattr(options, opt) is not None + for opt in cls.BASE_ERROR_SELECTION_OPTIONS + ] + ) @classmethod def _fix_set_options(cls, options): @@ -521,7 +553,7 @@ def _get_set(value_str): """ return cls._expand_error_codes( - set([x.strip() for x in value_str.split(",")]) - {""} + {x.strip() for x in value_str.split(",")} - {""} ) for opt in optional_set_options: @@ -544,34 +576,66 @@ def _get_set(value_str): @classmethod def _create_option_parser(cls): """Return an option parser to parse the command line arguments.""" - from optparse import OptionParser, OptionGroup + from optparse import OptionGroup, OptionParser parser = OptionParser( version=__version__, - usage='Usage: pydocstyle [options] [...]') + usage='Usage: pydocstyle [options] [...]', + ) option = parser.add_option # Run configuration options - option('-e', '--explain', action='store_true', default=False, - help='show explanation of each error') - option('-s', '--source', action='store_true', default=False, - help='show source for each error') - option('-d', '--debug', action='store_true', default=False, - help='print debug information') - option('-v', '--verbose', action='store_true', default=False, - help='print status information') - option('--count', action='store_true', default=False, - help='print total number of errors to stdout') - option('--config', metavar='', default=None, - help='use given config file and disable config discovery') - - parser.add_option_group(OptionGroup( - parser, - 'Note', - 'When using --match, --match-dir or --ignore-decorators consider ' - 'whether you should use a single quote (\') or a double quote ("), ' - 'depending on your OS, Shell, etc.')) + option( + '-e', + '--explain', + action='store_true', + default=False, + help='show explanation of each error', + ) + option( + '-s', + '--source', + action='store_true', + default=False, + help='show source for each error', + ) + option( + '-d', + '--debug', + action='store_true', + default=False, + help='print debug information', + ) + option( + '-v', + '--verbose', + action='store_true', + default=False, + help='print status information', + ) + option( + '--count', + action='store_true', + default=False, + help='print total number of errors to stdout', + ) + option( + '--config', + metavar='', + default=None, + help='use given config file and disable config discovery', + ) + + parser.add_option_group( + OptionGroup( + parser, + 'Note', + 'When using --match, --match-dir or --ignore-decorators consider ' + 'whether you should use a single quote (\') or a double quote ("), ' + 'depending on your OS, Shell, etc.', + ) + ) check_group = OptionGroup( parser, @@ -582,63 +646,105 @@ def _create_option_parser(cls): 'list" of error codes to check. If you wish to change that list ' '(for example, if you selected a known convention but wish to ' 'ignore a specific error from it or add a new one) you can ' - 'use `--add-[ignore/select]` in order to do so.') + 'use `--add-[ignore/select]` in order to do so.', + ) add_check = check_group.add_option # Error check options - add_check('--select', metavar='', default=None, - help='choose the basic list of checked errors by ' - 'specifying which errors to check for (with a list of ' - 'comma-separated error codes or prefixes). ' - 'for example: --select=D101,D2') - add_check('--ignore', metavar='', default=None, - help='choose the basic list of checked errors by ' - 'specifying which errors to ignore out of all of the ' - 'available error codes (with a list of ' - 'comma-separated error codes or prefixes). ' - 'for example: --ignore=D101,D2') - add_check('--convention', metavar='', default=None, - help='choose the basic list of checked errors by specifying ' - 'an existing convention. Possible conventions: {}.' - .format(', '.join(conventions))) - add_check('--add-select', metavar='', default=None, - help='add extra error codes to check to the basic list of ' - 'errors previously set by --select, --ignore or ' - '--convention.') - add_check('--add-ignore', metavar='', default=None, - help='ignore extra error codes by removing them from the ' - 'basic list previously set by --select, --ignore ' - 'or --convention.') + add_check( + '--select', + metavar='', + default=None, + help='choose the basic list of checked errors by ' + 'specifying which errors to check for (with a list of ' + 'comma-separated error codes or prefixes). ' + 'for example: --select=D101,D2', + ) + add_check( + '--ignore', + metavar='', + default=None, + help='choose the basic list of checked errors by ' + 'specifying which errors to ignore out of all of the ' + 'available error codes (with a list of ' + 'comma-separated error codes or prefixes). ' + 'for example: --ignore=D101,D2', + ) + add_check( + '--convention', + metavar='', + default=None, + help='choose the basic list of checked errors by specifying ' + 'an existing convention. Possible conventions: {}.'.format( + ', '.join(conventions) + ), + ) + add_check( + '--add-select', + metavar='', + default=None, + help='add extra error codes to check to the basic list of ' + 'errors previously set by --select, --ignore or ' + '--convention.', + ) + add_check( + '--add-ignore', + metavar='', + default=None, + help='ignore extra error codes by removing them from the ' + 'basic list previously set by --select, --ignore ' + 'or --convention.', + ) parser.add_option_group(check_group) # Match clauses - option('--match', metavar='', default=None, - help=("check only files that exactly match regular " - "expression; default is --match='{}' which matches " - "files that don't start with 'test_' but end with " - "'.py'").format(cls.DEFAULT_MATCH_RE)) - option('--match-dir', metavar='', default=None, - help=("search only dirs that exactly match regular " - "expression; default is --match-dir='{}', which " - "matches all dirs that don't start with " - "a dot").format(cls.DEFAULT_MATCH_DIR_RE)) + option( + '--match', + metavar='', + default=None, + help=( + "check only files that exactly match regular " + "expression; default is --match='{}' which matches " + "files that don't start with 'test_' but end with " + "'.py'" + ).format(cls.DEFAULT_MATCH_RE), + ) + option( + '--match-dir', + metavar='', + default=None, + help=( + "search only dirs that exactly match regular " + "expression; default is --match-dir='{}', which " + "matches all dirs that don't start with " + "a dot" + ).format(cls.DEFAULT_MATCH_DIR_RE), + ) # Decorators - option('--ignore-decorators', metavar='', default=None, - help=("ignore any functions or methods that are decorated " - "by a function with a name fitting the " - "regular expression; default is --ignore-decorators='{}'" - " which does not ignore any decorated functions." - .format(cls.DEFAULT_IGNORE_DECORATORS_RE))) + option( + '--ignore-decorators', + metavar='', + default=None, + help=( + "ignore any functions or methods that are decorated " + "by a function with a name fitting the " + "regular expression; default is --ignore-decorators='{}'" + " which does not ignore any decorated functions.".format( + cls.DEFAULT_IGNORE_DECORATORS_RE + ) + ), + ) return parser # Check configuration - used by the ConfigurationParser class. -CheckConfiguration = namedtuple('CheckConfiguration', - ('checked_codes', 'match', 'match_dir', - 'ignore_decorators')) +CheckConfiguration = namedtuple( + 'CheckConfiguration', + ('checked_codes', 'match', 'match_dir', 'ignore_decorators'), +) class IllegalConfiguration(Exception): @@ -648,6 +754,7 @@ class IllegalConfiguration(Exception): # General configurations for pydocstyle run. -RunConfiguration = namedtuple('RunConfiguration', - ('explain', 'source', 'debug', - 'verbose', 'count', 'config')) +RunConfiguration = namedtuple( + 'RunConfiguration', + ('explain', 'source', 'debug', 'verbose', 'count', 'config'), +) diff --git a/src/pydocstyle/data/imperatives.txt b/src/pydocstyle/data/imperatives.txt index 36a76683..cb73f48f 100644 --- a/src/pydocstyle/data/imperatives.txt +++ b/src/pydocstyle/data/imperatives.txt @@ -137,6 +137,7 @@ open output override overwrite +package pad parse partial diff --git a/src/pydocstyle/data/imperatives_blacklist.txt b/src/pydocstyle/data/imperatives_blacklist.txt index 5057b9fc..d6b5fade 100644 --- a/src/pydocstyle/data/imperatives_blacklist.txt +++ b/src/pydocstyle/data/imperatives_blacklist.txt @@ -63,7 +63,6 @@ module new number optional -package placeholder reference result diff --git a/src/pydocstyle/parser.py b/src/pydocstyle/parser.py index 1b17c36c..a6f1ed0b 100644 --- a/src/pydocstyle/parser.py +++ b/src/pydocstyle/parser.py @@ -3,16 +3,28 @@ import sys import textwrap import tokenize as tk -from itertools import chain, dropwhile -from re import compile as re from io import StringIO +from itertools import chain, dropwhile from pathlib import Path +from re import compile as re +from typing import Tuple from .utils import log -__all__ = ('Parser', 'Definition', 'Module', 'Package', 'Function', - 'NestedFunction', 'Method', 'Class', 'NestedClass', 'AllError', - 'StringIO', 'ParseError') +__all__ = ( + 'Parser', + 'Definition', + 'Module', + 'Package', + 'Function', + 'NestedFunction', + 'Method', + 'Class', + 'NestedClass', + 'AllError', + 'StringIO', + 'ParseError', +) class ParseError(Exception): @@ -29,7 +41,8 @@ def __init__(self, token, expected_kind): def __str__(self): return "Unexpected token {}, expected {}".format( - self.token, self.expected_kind) + self.token, self.expected_kind + ) def humanize(string): @@ -41,9 +54,14 @@ class Value: def __init__(self, *args): if len(self._fields) != len(args): - raise ValueError('got {} arguments for {} fields for {}: {}' - .format(len(args), len(self._fields), - self.__class__.__name__, self._fields)) + raise ValueError( + 'got {} arguments for {} fields for {}: {}'.format( + len(args), + len(self._fields), + self.__class__.__name__, + self._fields, + ) + ) vars(self).update(zip(self._fields, args)) def __hash__(self): @@ -53,16 +71,27 @@ def __eq__(self, other): return other and vars(self) == vars(other) def __repr__(self): - kwargs = ', '.join('{}={!r}'.format(field, getattr(self, field)) - for field in self._fields) - return '{}({})'.format(self.__class__.__name__, kwargs) + kwargs = ', '.join( + '{}={!r}'.format(field, getattr(self, field)) + for field in self._fields + ) + return f'{self.__class__.__name__}({kwargs})' class Definition(Value): """A Python source code definition (could be class, function, etc).""" - _fields = ('name', '_source', 'start', 'end', 'decorators', 'docstring', - 'children', 'parent', 'skipped_error_codes') + _fields = ( + 'name', + '_source', + 'start', + 'end', + 'decorators', + 'docstring', + 'children', + 'parent', + 'skipped_error_codes', + ) # type: Tuple[str, ...] _human = property(lambda self: humanize(type(self).__name__)) kind = property(lambda self: self._human.split()[-1]) @@ -97,18 +126,29 @@ def is_empty_or_comment(line): return ''.join(reversed(list(filtered_src))) def __str__(self): - out = 'in {} {} `{}`'.format(self._publicity, self._human, self.name) + out = f'in {self._publicity} {self._human} `{self.name}`' if self.skipped_error_codes: - out += ' (skipping {})'.format(self.skipped_error_codes) + out += f' (skipping {self.skipped_error_codes})' return out class Module(Definition): """A Python source code module.""" - _fields = ('name', '_source', 'start', 'end', 'decorators', 'docstring', # type: ignore - 'children', 'parent', '_dunder_all', 'dunder_all_error', - 'future_imports', 'skipped_error_codes') + _fields = ( + 'name', + '_source', + 'start', + 'end', + 'decorators', + 'docstring', + 'children', + 'parent', + '_dunder_all', + 'dunder_all_error', + 'future_imports', + 'skipped_error_codes', + ) _nest = staticmethod(lambda s: {'def': Function, 'class': Class}[s]) module = property(lambda self: self) dunder_all = property(lambda self: self._dunder_all) @@ -120,9 +160,8 @@ def is_public(self): This helps determine if it requires a docstring. """ module_name = Path(self.name).stem - return ( - not self._is_inside_private_package() and - self._is_public_name(module_name) + return not self._is_inside_private_package() and self._is_public_name( + module_name ) def _is_inside_private_package(self): @@ -140,10 +179,8 @@ def _is_inside_private_package(self): def _is_public_name(self, module_name): """Determine whether a "module name" (i.e. module or package name) is public.""" - return ( - not module_name.startswith('_') or ( - module_name.startswith('__') and module_name.endswith('__') - ) + return not module_name.startswith('_') or ( + module_name.startswith('__') and module_name.endswith('__') ) def _is_private_name(self, module_name): @@ -161,8 +198,9 @@ class Package(Module): class Function(Definition): """A Python source code function.""" - _nest = staticmethod(lambda s: {'def': NestedFunction, - 'class': NestedClass}[s]) + _nest = staticmethod( + lambda s: {'def': NestedFunction, 'class': NestedClass}[s] + ) @property def is_public(self): @@ -172,6 +210,14 @@ def is_public(self): else: return not self.name.startswith('_') + @property + def is_overload(self): + """Return True iff the method decorated with overload.""" + for decorator in self.decorators: + if decorator.name == "overload": + return True + return False + @property def is_test(self): """Return True if this function is a test function/method. @@ -196,9 +242,11 @@ class Method(Function): @property def is_magic(self): """Return True iff this method is a magic method (e.g., `__str__`).""" - return (self.name.startswith('__') and - self.name.endswith('__') and - self.name not in VARIADIC_MAGIC_METHODS) + return ( + self.name.startswith('__') + and self.name.endswith('__') + and self.name not in VARIADIC_MAGIC_METHODS + ) @property def is_init(self): @@ -211,11 +259,13 @@ def is_public(self): # Check if we are a setter/deleter method, and mark as private if so. for decorator in self.decorators: # Given 'foo', match 'foo.bar' but not 'foobar' or 'sfoo' - if re(r"^{}\.".format(self.name)).match(decorator.name): + if re(fr"^{self.name}\.").match(decorator.name): return False - name_is_public = (not self.name.startswith('_') or - self.name in VARIADIC_MAGIC_METHODS or - self.is_magic) + name_is_public = ( + not self.name.startswith('_') + or self.name in VARIADIC_MAGIC_METHODS + or self.is_magic + ) return self.parent.is_public and name_is_public @property @@ -241,9 +291,11 @@ class NestedClass(Class): @property def is_public(self): """Return True iff this class should be considered public.""" - return (not self.name.startswith('_') and - self.parent.is_class and - self.parent.is_public) + return ( + not self.name.startswith('_') + and self.parent.is_class + and self.parent.is_public + ) class Decorator(Value): @@ -277,13 +329,18 @@ class AllError(Exception): def __init__(self, message): """Initialize the error with a more specific message.""" Exception.__init__( - self, message + textwrap.dedent(""" + self, + message + + textwrap.dedent( + """ That means pydocstyle cannot decide which definitions are public. Variable __all__ should be present at most once in each file, in form `__all__ = ('a_public_function', 'APublicClass', ...)`. More info on __all__: http://stackoverflow.com/q/44834/. ') - """)) + """ + ), + ) class TokenStream: @@ -343,7 +400,7 @@ def __init__(self, *args): self.kind = TokenKind(self.kind) def __str__(self): - return "{!r} ({})".format(self.kind, self.value) + return f"{self.kind!r} ({self.value})" class Parser: @@ -387,8 +444,9 @@ def leapfrog(self, kind, value=None): be skipped. """ while self.current is not None: - if (self.current.kind == kind and - (value is None or self.current.value == value)): + if self.current.kind == kind and ( + value is None or self.current.value == value + ): self.consume(kind) return self.stream.move() @@ -398,13 +456,14 @@ def parse_docstring(self): self.log.debug("parsing docstring, token is %s", self.current) while self.current.kind in (tk.COMMENT, tk.NEWLINE, tk.NL): self.stream.move() - self.log.debug("parsing docstring, token is %r (%s)", - self.current.kind, self.current.value) + self.log.debug( + "parsing docstring, token is %r (%s)", + self.current.kind, + self.current.value, + ) if self.current.kind == tk.STRING: docstring = Docstring( - self.current.value, - self.current.start[0], - self.current.end[0] + self.current.value, self.current.start[0], self.current.end[0] ) self.stream.move() return docstring @@ -421,16 +480,22 @@ def parse_decorators(self): at_arguments = False while self.current is not None: - self.log.debug("parsing decorators, current token is %r (%s)", - self.current.kind, self.current.value) - if (self.current.kind == tk.NAME and - self.current.value in ['def', 'class']): + self.log.debug( + "parsing decorators, current token is %r (%s)", + self.current.kind, + self.current.value, + ) + if self.current.kind == tk.NAME and self.current.value in [ + 'def', + 'class', + ]: # Done with decorators - found function or class proper break elif self.current.kind == tk.OP and self.current.value == '@': # New decorator found. Store the decorator accumulated so far: self._accumulated_decorators.append( - Decorator(''.join(name), ''.join(arguments))) + Decorator(''.join(name), ''.join(arguments)) + ) # Now reset to begin accumulating the new decorator: name = [] arguments = [] @@ -453,27 +518,32 @@ def parse_decorators(self): # Add decorator accumulated so far self._accumulated_decorators.append( - Decorator(''.join(name), ''.join(arguments))) + Decorator(''.join(name), ''.join(arguments)) + ) def parse_definitions(self, class_, dunder_all=False): """Parse multiple definitions and yield them.""" while self.current is not None: - self.log.debug("parsing definition list, current token is %r (%s)", - self.current.kind, self.current.value) + self.log.debug( + "parsing definition list, current token is %r (%s)", + self.current.kind, + self.current.value, + ) self.log.debug('got_newline: %s', self.stream.got_logical_newline) if dunder_all and self.current.value == '__all__': self.parse_dunder_all() - elif (self.current.kind == tk.OP and - self.current.value == '@' and - self.stream.got_logical_newline): + elif ( + self.current.kind == tk.OP + and self.current.value == '@' + and self.stream.got_logical_newline + ): self.consume(tk.OP) self.parse_decorators() elif self.current.value in ['def', 'class']: yield self.parse_definition(class_._nest(self.current.value)) elif self.current.kind == tk.INDENT: self.consume(tk.INDENT) - for definition in self.parse_definitions(class_): - yield definition + yield from self.parse_definitions(class_) elif self.current.kind == tk.DEDENT: self.consume(tk.DEDENT) return @@ -509,18 +579,22 @@ def parse_dunder_all(self): break if self.current.kind in (tk.NL, tk.COMMENT): pass - elif (self.current.kind == tk.STRING or self.current.value == ','): + elif self.current.kind == tk.STRING or self.current.value == ',': dunder_all_content += self.current.value else: - self.dunder_all_error = 'Could not evaluate contents of __all__.' + self.dunder_all_error = ( + 'Could not evaluate contents of __all__.' + ) return self.stream.move() if is_surrounded: self.consume(tk.OP) if not is_surrounded and ',' not in dunder_all_content: self.dunder_all_error = ( - 'Unexpected token kind in __all__: {!r}. ' - .format(self.current.kind)) + 'Unexpected token kind in __all__: {!r}. '.format( + self.current.kind + ) + ) return dunder_all_content += ")" @@ -529,14 +603,20 @@ def parse_dunder_all(self): except BaseException as e: self.dunder_all_error = ( 'Could not evaluate contents of __all__.' - '\bThe value was {}. The exception was:\n{}' - .format(dunder_all_content, e)) + '\bThe value was {}. The exception was:\n{}'.format( + dunder_all_content, e + ) + ) - while (self.current.kind not in self.stream.LOGICAL_NEWLINES and - self.current.kind != tk.ENDMARKER): + while ( + self.current.kind not in self.stream.LOGICAL_NEWLINES + and self.current.kind != tk.ENDMARKER + ): if self.current.kind != tk.COMMENT: self.dunder_all = None - self.dunder_all_error = 'Could not evaluate contents of __all__. ' + self.dunder_all_error = ( + 'Could not evaluate contents of __all__. ' + ) return self.stream.move() @@ -552,9 +632,20 @@ def parse_module(self): cls = Module if self.filename.endswith('__init__.py'): cls = Package - module = cls(self.filename, self.source, start, end, - [], docstring, children, None, self.dunder_all, - self.dunder_all_error, None, skipped_error_codes) + module = cls( + self.filename, + self.source, + start, + end, + [], + docstring, + children, + None, + self.dunder_all, + self.dunder_all_error, + None, + skipped_error_codes, + ) for child in module.children: child.parent = module module.future_imports = self.future_imports @@ -593,23 +684,38 @@ def parse_definition(self, class_): self._accumulated_decorators = [] self.log.debug("parsing nested definitions.") children = list(self.parse_definitions(class_)) - self.log.debug("finished parsing nested definitions for '%s'", - name) + self.log.debug( + "finished parsing nested definitions for '%s'", name + ) end = self.line - 1 else: # one-liner definition skipped_error_codes = '' docstring = self.parse_docstring() - decorators = [] # TODO + decorators = self._accumulated_decorators + self.log.debug("current accumulated decorators: %s", decorators) + self._accumulated_decorators = [] children = [] end = self.line self.leapfrog(tk.NEWLINE) - definition = class_(name, self.source, start, end, - decorators, docstring, children, None, - skipped_error_codes) + definition = class_( + name, + self.source, + start, + end, + decorators, + docstring, + children, + None, + skipped_error_codes, + ) for child in definition.children: child.parent = definition - self.log.debug("finished parsing %s '%s'. Next token is %r", - class_.__name__, name, self.current) + self.log.debug( + "finished parsing %s '%s'. Next token is %r", + class_.__name__, + name, + self.current, + ) return definition def parse_skip_comment(self): @@ -619,12 +725,16 @@ def parse_skip_comment(self): if self.current.kind == tk.COMMENT: if 'noqa: ' in self.current.value: skipped_error_codes = ''.join( - self.current.value.split('noqa: ')[1:]) + self.current.value.split('noqa: ')[1:] + ) elif self.current.value.startswith('# noqa'): skipped_error_codes = 'all' self.stream.move() - self.log.debug("parsing comments before docstring, token is %r (%s)", - self.current.kind, self.current.value) + self.log.debug( + "parsing comments before docstring, token is %r (%s)", + self.current.kind, + self.current.value, + ) if skipped_error_codes: break @@ -633,14 +743,18 @@ def parse_skip_comment(self): def check_current(self, kind=None, value=None): """Verify the current token is of type `kind` and equals `value`.""" - msg = textwrap.dedent(""" + msg = textwrap.dedent( + """ Unexpected token at line {self.line}: In file: {self.filename} Got kind {self.current.kind!r} Got value {self.current.value} - """.format(self=self)) + """.format( + self=self + ) + ) kind_valid = self.current.kind == kind if kind else True value_valid = self.current.value == value if value else True assert kind_valid and value_valid, msg @@ -664,9 +778,11 @@ def _parse_from_import_source(self): self.stream.move() is_future_import = self.current.value == '__future__' self.stream.move() - while (self.current is not None and - self.current.kind in (tk.DOT, tk.NAME, tk.OP) and - self.current.value != 'import'): + while ( + self.current is not None + and self.current.kind in (tk.DOT, tk.NAME, tk.OP) + and self.current.value != 'import' + ): self.stream.move() if self.current is None or self.current.value != 'import': return False @@ -679,27 +795,37 @@ def _parse_from_import_names(self, is_future_import): """Parse the 'y' part in a 'from x import y' statement.""" if self.current.value == '(': self.consume(tk.OP) - expected_end_kinds = (tk.OP, ) + expected_end_kinds = (tk.OP,) else: expected_end_kinds = (tk.NEWLINE, tk.ENDMARKER) while self.current.kind not in expected_end_kinds and not ( - self.current.kind == tk.OP and self.current.value == ';'): + self.current.kind == tk.OP and self.current.value == ';' + ): if self.current.kind != tk.NAME: self.stream.move() continue - self.log.debug("parsing import, token is %r (%s)", - self.current.kind, self.current.value) + self.log.debug( + "parsing import, token is %r (%s)", + self.current.kind, + self.current.value, + ) if is_future_import: self.log.debug('found future import: %s', self.current.value) self.future_imports.add(self.current.value) self.consume(tk.NAME) - self.log.debug("parsing import, token is %r (%s)", - self.current.kind, self.current.value) + self.log.debug( + "parsing import, token is %r (%s)", + self.current.kind, + self.current.value, + ) if self.current.kind == tk.NAME and self.current.value == 'as': self.consume(tk.NAME) # as if self.current.kind == tk.NAME: self.consume(tk.NAME) # new name, irrelevant if self.current.value == ',': self.consume(tk.OP) - self.log.debug("parsing import, token is %r (%s)", - self.current.kind, self.current.value) + self.log.debug( + "parsing import, token is %r (%s)", + self.current.kind, + self.current.value, + ) diff --git a/src/pydocstyle/utils.py b/src/pydocstyle/utils.py index ba0b29ae..5c6471e3 100644 --- a/src/pydocstyle/utils.py +++ b/src/pydocstyle/utils.py @@ -2,12 +2,11 @@ import ast import logging import re -from typing import Iterable, Any, Tuple from itertools import tee, zip_longest - +from typing import Any, Iterable, Tuple # Do not update the version manually - it is managed by `bumpversion`. -__version__ = '5.1.1' +__version__ = '6.0.0' log = logging.getLogger(__name__) #: Regular expression for stripping non-alphanumeric characters diff --git a/src/pydocstyle/violations.py b/src/pydocstyle/violations.py index 6515d3d6..eb2b6d4c 100644 --- a/src/pydocstyle/violations.py +++ b/src/pydocstyle/violations.py @@ -1,13 +1,12 @@ """Docstring violation definition.""" -from itertools import dropwhile -from functools import partial from collections import namedtuple -from typing import Iterable, Optional, List, Callable, Any +from functools import partial +from itertools import dropwhile +from typing import Any, Callable, Iterable, List, Optional -from .utils import is_blank from .parser import Definition - +from .utils import is_blank __all__ = ('Error', 'ErrorRegistry', 'conventions') @@ -23,11 +22,11 @@ class Error: source = False def __init__( - self, - code: str, - short_desc: str, - context: str, - *parameters: Iterable[str] + self, + code: str, + short_desc: str, + context: str, + *parameters: Iterable[str], ) -> None: """Initialize the object. @@ -52,10 +51,10 @@ def set_context(self, definition: Definition, explanation: str) -> None: @property def message(self) -> str: """Return the message to print to the user.""" - ret = '{}: {}'.format(self.code, self.short_desc) + ret = f'{self.code}: {self.short_desc}' if self.context is not None: specific_error_msg = self.context.format(*self.parameters) - ret += ' ({})'.format(specific_error_msg) + ret += f' ({specific_error_msg})' return ret @property @@ -66,10 +65,11 @@ def lines(self) -> str: source = '' lines = self.definition.source offset = self.definition.start # type: ignore - lines_stripped = list(reversed(list(dropwhile(is_blank, - reversed(lines))))) + lines_stripped = list( + reversed(list(dropwhile(is_blank, reversed(lines)))) + ) numbers_width = len(str(offset + len(lines_stripped))) - line_format = '{{:{}}}:{{}}'.format(numbers_width) + line_format = f'{{:{numbers_width}}}:{{}}' for n, line in enumerate(lines_stripped): if line: line = ' ' + line @@ -81,8 +81,9 @@ def lines(self) -> str: def __str__(self) -> str: if self.explanation: - self.explanation = '\n'.join(l for l in self.explanation.split('\n') - if not is_blank(l)) + self.explanation = '\n'.join( + l for l in self.explanation.split('\n') if not is_blank(l) + ) template = '{filename}:{line} {definition}:\n {message}' if self.source and self.explain: template += '\n\n{explanation}\n\n{lines}\n' @@ -90,9 +91,19 @@ def __str__(self) -> str: template += '\n\n{lines}\n' elif self.explain and not self.source: template += '\n\n{explanation}\n\n' - return template.format(**{name: getattr(self, name) for name in - ['filename', 'line', 'definition', 'message', - 'explanation', 'lines']}) + return template.format( + **{ + name: getattr(self, name) + for name in [ + 'filename', + 'line', + 'definition', + 'message', + 'explanation', + 'lines', + ] + } + ) def __repr__(self) -> str: return str(self) @@ -122,10 +133,10 @@ def __init__(self, prefix: str, name: str) -> None: self.errors = [] # type: List[ErrorParams] def create_error( - self, - error_code: str, - error_desc: str, - error_context: Optional[str] = None, + self, + error_code: str, + error_desc: str, + error_context: Optional[str] = None, ) -> Callable[[Iterable[str]], Error]: """Create an error, register it to this group and return it.""" # TODO: check prefix @@ -152,108 +163,259 @@ def get_error_codes(cls) -> Iterable[str]: @classmethod def to_rst(cls) -> str: """Output the registry as reStructuredText, for documentation.""" - max_len = max(len(error.short_desc) for group in cls.groups for error in group.errors) + max_len = max( + len(error.short_desc) + for group in cls.groups + for error in group.errors + ) sep_line = '+' + 6 * '-' + '+' + '-' * (max_len + 2) + '+\n' blank_line = '|' + (max_len + 9) * ' ' + '|\n' table = '' for group in cls.groups: table += sep_line table += blank_line - table += '|' + '**{}**'.format(group.name).center(max_len + 9) + '|\n' + table += '|' + f'**{group.name}**'.center(max_len + 9) + '|\n' table += blank_line for error in group.errors: table += sep_line - table += ('|' + error.code.center(6) + '| ' + - error.short_desc.ljust(max_len + 1) + '|\n') + table += ( + '|' + + error.code.center(6) + + '| ' + + error.short_desc.ljust(max_len + 1) + + '|\n' + ) table += sep_line return table D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings') -D100 = D1xx.create_error('D100', 'Missing docstring in public module') -D101 = D1xx.create_error('D101', 'Missing docstring in public class') -D102 = D1xx.create_error('D102', 'Missing docstring in public method') -D103 = D1xx.create_error('D103', 'Missing docstring in public function') -D104 = D1xx.create_error('D104', 'Missing docstring in public package') -D105 = D1xx.create_error('D105', 'Missing docstring in magic method') -D106 = D1xx.create_error('D106', 'Missing docstring in public nested class') -D107 = D1xx.create_error('D107', 'Missing docstring in __init__') +D100 = D1xx.create_error( + 'D100', + 'Missing docstring in public module', +) +D101 = D1xx.create_error( + 'D101', + 'Missing docstring in public class', +) +D102 = D1xx.create_error( + 'D102', + 'Missing docstring in public method', +) +D103 = D1xx.create_error( + 'D103', + 'Missing docstring in public function', +) +D104 = D1xx.create_error( + 'D104', + 'Missing docstring in public package', +) +D105 = D1xx.create_error( + 'D105', + 'Missing docstring in magic method', +) +D106 = D1xx.create_error( + 'D106', + 'Missing docstring in public nested class', +) +D107 = D1xx.create_error( + 'D107', + 'Missing docstring in __init__', +) D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues') -D200 = D2xx.create_error('D200', 'One-line docstring should fit on one line ' - 'with quotes', 'found {0}') -D201 = D2xx.create_error('D201', 'No blank lines allowed before function ' - 'docstring', 'found {0}') -D202 = D2xx.create_error('D202', 'No blank lines allowed after function ' - 'docstring', 'found {0}') -D203 = D2xx.create_error('D203', '1 blank line required before class ' - 'docstring', 'found {0}') -D204 = D2xx.create_error('D204', '1 blank line required after class ' - 'docstring', 'found {0}') -D205 = D2xx.create_error('D205', '1 blank line required between summary line ' - 'and description', 'found {0}') -D206 = D2xx.create_error('D206', 'Docstring should be indented with spaces, ' - 'not tabs') -D207 = D2xx.create_error('D207', 'Docstring is under-indented') -D208 = D2xx.create_error('D208', 'Docstring is over-indented') -D209 = D2xx.create_error('D209', 'Multi-line docstring closing quotes should ' - 'be on a separate line') -D210 = D2xx.create_error('D210', 'No whitespaces allowed surrounding ' - 'docstring text') -D211 = D2xx.create_error('D211', 'No blank lines allowed before class ' - 'docstring', 'found {0}') -D212 = D2xx.create_error('D212', 'Multi-line docstring summary should start ' - 'at the first line') -D213 = D2xx.create_error('D213', 'Multi-line docstring summary should start ' - 'at the second line') -D214 = D2xx.create_error('D214', 'Section is over-indented', '{0!r}') -D215 = D2xx.create_error('D215', 'Section underline is over-indented', - 'in section {0!r}') +D200 = D2xx.create_error( + 'D200', + 'One-line docstring should fit on one line ' 'with quotes', + 'found {0}', +) +D201 = D2xx.create_error( + 'D201', + 'No blank lines allowed before function docstring', + 'found {0}', +) +D202 = D2xx.create_error( + 'D202', + 'No blank lines allowed after function docstring', + 'found {0}', +) +D203 = D2xx.create_error( + 'D203', + '1 blank line required before class docstring', + 'found {0}', +) +D204 = D2xx.create_error( + 'D204', + '1 blank line required after class docstring', + 'found {0}', +) +D205 = D2xx.create_error( + 'D205', + '1 blank line required between summary line and description', + 'found {0}', +) +D206 = D2xx.create_error( + 'D206', + 'Docstring should be indented with spaces, not tabs', +) +D207 = D2xx.create_error( + 'D207', + 'Docstring is under-indented', +) +D208 = D2xx.create_error( + 'D208', + 'Docstring is over-indented', +) +D209 = D2xx.create_error( + 'D209', + 'Multi-line docstring closing quotes should be on a separate line', +) +D210 = D2xx.create_error( + 'D210', + 'No whitespaces allowed surrounding docstring text', +) +D211 = D2xx.create_error( + 'D211', + 'No blank lines allowed before class docstring', + 'found {0}', +) +D212 = D2xx.create_error( + 'D212', + 'Multi-line docstring summary should start at the first line', +) +D213 = D2xx.create_error( + 'D213', + 'Multi-line docstring summary should start at the second line', +) +D214 = D2xx.create_error( + 'D214', + 'Section is over-indented', + '{0!r}', +) +D215 = D2xx.create_error( + 'D215', + 'Section underline is over-indented', + 'in section {0!r}', +) D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues') -D300 = D3xx.create_error('D300', 'Use """triple double quotes"""', - 'found {0}-quotes') -D301 = D3xx.create_error('D301', 'Use r""" if any backslashes in a docstring') -D302 = D3xx.create_error('D302', 'Use u""" for Unicode docstrings') +D300 = D3xx.create_error( + 'D300', + 'Use """triple double quotes"""', + 'found {0}-quotes', +) +D301 = D3xx.create_error( + 'D301', + 'Use r""" if any backslashes in a docstring', +) +D302 = D3xx.create_error( + 'D302', + 'Deprecated: Use u""" for Unicode docstrings', +) D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues') -D400 = D4xx.create_error('D400', 'First line should end with a period', - 'not {0!r}') -D401 = D4xx.create_error('D401', 'First line should be in imperative mood', - "perhaps '{0}', not '{1}'") -D401b = D4xx.create_error('D401', 'First line should be in imperative mood; ' - 'try rephrasing', "found '{0}'") -D402 = D4xx.create_error('D402', 'First line should not be the function\'s ' - '"signature"') -D403 = D4xx.create_error('D403', 'First word of the first line should be ' - 'properly capitalized', '{0!r}, not {1!r}') -D404 = D4xx.create_error('D404', 'First word of the docstring should not ' - 'be `This`') -D405 = D4xx.create_error('D405', 'Section name should be properly capitalized', - '{0!r}, not {1!r}') -D406 = D4xx.create_error('D406', 'Section name should end with a newline', - '{0!r}, not {1!r}') -D407 = D4xx.create_error('D407', 'Missing dashed underline after section', - '{0!r}') -D408 = D4xx.create_error('D408', 'Section underline should be in the line ' - 'following the section\'s name', - '{0!r}') -D409 = D4xx.create_error('D409', 'Section underline should match the length ' - 'of its name', - 'Expected {0!r} dashes in section {1!r}, got {2!r}') -D410 = D4xx.create_error('D410', 'Missing blank line after section', '{0!r}') -D411 = D4xx.create_error('D411', 'Missing blank line before section', '{0!r}') -D412 = D4xx.create_error('D412', 'No blank lines allowed between a section ' - 'header and its content', '{0!r}') -D413 = D4xx.create_error('D413', 'Missing blank line after last section', - '{0!r}') -D414 = D4xx.create_error('D414', 'Section has no content', '{0!r}') -D415 = D4xx.create_error('D415', 'First line should end with a period, question ' - 'mark, or exclamation point', 'not {0!r}') -D416 = D4xx.create_error('D416', 'Section name should end with a colon', - '{0!r}, not {1!r}') -D417 = D4xx.create_error('D417', 'Missing argument descriptions in the docstring', - 'argument(s) {0} are missing descriptions in {1!r} docstring') +D400 = D4xx.create_error( + 'D400', + 'First line should end with a period', + 'not {0!r}', +) +D401 = D4xx.create_error( + 'D401', + 'First line should be in imperative mood', + "perhaps '{0}', not '{1}'", +) +D401b = D4xx.create_error( + 'D401', + 'First line should be in imperative mood; try rephrasing', + "found '{0}'", +) +D402 = D4xx.create_error( + 'D402', + 'First line should not be the function\'s "signature"', +) +D403 = D4xx.create_error( + 'D403', + 'First word of the first line should be properly capitalized', + '{0!r}, not {1!r}', +) +D404 = D4xx.create_error( + 'D404', + 'First word of the docstring should not be `This`', +) +D405 = D4xx.create_error( + 'D405', + 'Section name should be properly capitalized', + '{0!r}, not {1!r}', +) +D406 = D4xx.create_error( + 'D406', + 'Section name should end with a newline', + '{0!r}, not {1!r}', +) +D407 = D4xx.create_error( + 'D407', + 'Missing dashed underline after section', + '{0!r}', +) +D408 = D4xx.create_error( + 'D408', + 'Section underline should be in the line following the section\'s name', + '{0!r}', +) +D409 = D4xx.create_error( + 'D409', + 'Section underline should match the length of its name', + 'Expected {0!r} dashes in section {1!r}, got {2!r}', +) +D410 = D4xx.create_error( + 'D410', + 'Missing blank line after section', + '{0!r}', +) +D411 = D4xx.create_error( + 'D411', + 'Missing blank line before section', + '{0!r}', +) +D412 = D4xx.create_error( + 'D412', + 'No blank lines allowed between a section header and its content', + '{0!r}', +) +D413 = D4xx.create_error( + 'D413', + 'Missing blank line after last section', + '{0!r}', +) +D414 = D4xx.create_error( + 'D414', + 'Section has no content', + '{0!r}', +) +D415 = D4xx.create_error( + 'D415', + ( + 'First line should end with a period, question ' + 'mark, or exclamation point' + ), + 'not {0!r}', +) +D416 = D4xx.create_error( + 'D416', + 'Section name should end with a colon', + '{0!r}, not {1!r}', +) +D417 = D4xx.create_error( + 'D417', + 'Missing argument descriptions in the docstring', + 'argument(s) {0} are missing descriptions in {1!r} docstring', +) + +D418 = D4xx.create_error( + 'D418', + 'Function/ Method decorated with @overload shouldn\'t contain a docstring', +) + class AttrDict(dict): def __getattr__(self, item: str) -> Any: @@ -263,12 +425,55 @@ def __getattr__(self, item: str) -> Any: all_errors = set(ErrorRegistry.get_error_codes()) -conventions = AttrDict({ - 'pep257': all_errors - {'D203', 'D212', 'D213', 'D214', 'D215', 'D404', - 'D405', 'D406', 'D407', 'D408', 'D409', 'D410', - 'D411', 'D413', 'D415', 'D416', 'D417'}, - 'numpy': all_errors - {'D107', 'D203', 'D212', 'D213', 'D402', 'D413', - 'D415', 'D416', 'D417'}, - 'google': all_errors - {'D203', 'D204', 'D213', 'D215', 'D400', 'D401', - 'D404', 'D406', 'D407', 'D408', 'D409', 'D413'} -}) +conventions = AttrDict( + { + 'pep257': all_errors + - { + 'D203', + 'D212', + 'D213', + 'D214', + 'D215', + 'D404', + 'D405', + 'D406', + 'D407', + 'D408', + 'D409', + 'D410', + 'D411', + 'D413', + 'D415', + 'D416', + 'D417', + 'D418', + }, + 'numpy': all_errors + - { + 'D107', + 'D203', + 'D212', + 'D213', + 'D402', + 'D413', + 'D415', + 'D416', + 'D417', + }, + 'google': all_errors + - { + 'D203', + 'D204', + 'D213', + 'D215', + 'D400', + 'D401', + 'D404', + 'D406', + 'D407', + 'D408', + 'D409', + 'D413', + }, + } +) diff --git a/src/pydocstyle/wordlists.py b/src/pydocstyle/wordlists.py index ec5c3177..c44f000f 100644 --- a/src/pydocstyle/wordlists.py +++ b/src/pydocstyle/wordlists.py @@ -4,11 +4,11 @@ therefore we load them at import time, rather than on-demand. """ -import re import pkgutil -import snowballstemmer -from typing import Iterator, Dict, Set +import re +from typing import Dict, Iterator, Set +import snowballstemmer #: Regular expression for stripping comments from the wordlists COMMENT_RE = re.compile(r'\s*#.*') diff --git a/src/tests/parser_test.py b/src/tests/parser_test.py index 05a329ec..582c6cde 100644 --- a/src/tests/parser_test.py +++ b/src/tests/parser_test.py @@ -52,10 +52,6 @@ def do_something(pos_param0, pos_param1, kw_param0="default"): def test_simple_fstring(): """Test parsing of a function with a simple fstring as a docstring.""" - # fstrings are not supported in Python 3.5 - if sys.version_info[0:2] == (3, 5): - return - parser = Parser() code = CodeSnippet("""\ def do_something(pos_param0, pos_param1, kw_param0="default"): @@ -85,10 +81,6 @@ def do_something(pos_param0, pos_param1, kw_param0="default"): def test_fstring_with_args(): """Test parsing of a function with an fstring with args as a docstring.""" - # fstrings are not supported in Python 3.5 - if sys.version_info[0:2] == (3, 5): - return - parser = Parser() code = CodeSnippet("""\ foo = "bar" @@ -888,6 +880,19 @@ def test(): def test(): pass """), + CodeSnippet("""\ + '''Test this''' + + @property + def test(): pass + """), + CodeSnippet("""\ + '''Test this''' + + @first_decorator + @property + def test(): pass + """), )) def test_parsing_function_decorators(code): """Test to ensure we are correctly parsing function decorators.""" diff --git a/src/tests/test_cases/canonical_google_examples.py b/src/tests/test_cases/canonical_google_examples.py index c219191f..301f83c4 100644 --- a/src/tests/test_cases/canonical_google_examples.py +++ b/src/tests/test_cases/canonical_google_examples.py @@ -83,7 +83,7 @@ def fetch_bigtable_rows(big_table, keys, other_silly_variable=None): "('Attributes', not 'Attributes:')") @expect("D407: Missing dashed underline after section ('Attributes')") @expect("D413: Missing blank line after last section ('Attributes')") -class SampleClass(object): +class SampleClass: """Summary of class here. Longer class information.... diff --git a/src/tests/test_cases/noqa.py b/src/tests/test_cases/noqa.py index 7eeeac87..bbbc6a41 100644 --- a/src/tests/test_cases/noqa.py +++ b/src/tests/test_cases/noqa.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # noqa: D400,D415 """Test case for "# noqa" comments""" from .expected import Expectation diff --git a/src/tests/test_cases/test.py b/src/tests/test_cases/test.py index 20b81fce..49fd471a 100644 --- a/src/tests/test_cases/test.py +++ b/src/tests/test_cases/test.py @@ -1,9 +1,8 @@ -# encoding: utf-8 # No docstring, so we can test D100 from functools import wraps import os -import sys from .expected import Expectation +from typing import overload expectation = Expectation() @@ -26,6 +25,23 @@ def method(self=None): def _ok_since_private(self=None): pass + @overload + def overloaded_method(self, a: int) -> str: + ... + + @overload + def overloaded_method(self, a: str) -> str: + """Foo bar documentation.""" + ... + + def overloaded_method(a): + """Foo bar documentation.""" + return str(a) + + expect('overloaded_method', + "D418: Function/ Method decorated with @overload" + " shouldn't contain a docstring") + @expect('D102: Missing docstring in public method') def __new__(self=None): pass @@ -54,6 +70,48 @@ def nested(): '' +def function_with_nesting(): + """Foo bar documentation.""" + @overload + def nested_overloaded_func(a: int) -> str: + ... + + @overload + def nested_overloaded_func(a: str) -> str: + """Foo bar documentation.""" + ... + + def nested_overloaded_func(a): + """Foo bar documentation.""" + return str(a) + + +expect('nested_overloaded_func', + "D418: Function/ Method decorated with @overload" + " shouldn't contain a docstring") + + +@overload +def overloaded_func(a: int) -> str: + ... + + +@overload +def overloaded_func(a: str) -> str: + """Foo bar documentation.""" + ... + + +def overloaded_func(a): + """Foo bar documentation.""" + return str(a) + + +expect('overloaded_func', + "D418: Function/ Method decorated with @overload" + " shouldn't contain a docstring") + + @expect('D200: One-line docstring should fit on one line with quotes ' '(found 3)') @expect('D212: Multi-line docstring summary should start at the first line') @@ -280,16 +338,6 @@ def exceptions_of_D301(): """ -if sys.version_info[0] <= 2: - @expect('D302: Use u""" for Unicode docstrings') - def unicode_unmarked(): - """Юникод.""" - - @expect('D302: Use u""" for Unicode docstrings') - def first_word_has_unicode_byte(): - """あy.""" - - @expect("D400: First line should end with a period (not 'y')") @expect("D415: First line should end with a period, question mark, " "or exclamation point (not 'y')") @@ -348,6 +396,33 @@ def oneliner_d102(): return def oneliner_withdoc(): """One liner""" +def ignored_decorator(func): # noqa: D400,D401,D415 + """Runs something""" + func() + pass + + +def decorator_for_test(func): # noqa: D400,D401,D415 + """Runs something""" + func() + pass + + +@ignored_decorator +def oneliner_ignored_decorator(): """One liner""" + + +@decorator_for_test +@expect("D400: First line should end with a period (not 'r')") +@expect("D415: First line should end with a period, question mark," + " or exclamation point (not 'r')") +def oneliner_with_decorator_expecting_errors(): """One liner""" + + +@decorator_for_test +def valid_oneliner_with_decorator(): """One liner.""" + + @expect("D207: Docstring is under-indented") @expect('D213: Multi-line docstring summary should start at the second line') def docstring_start_in_same_line(): """First Line. diff --git a/src/tests/test_cases/unicode_literals.py b/src/tests/test_cases/unicode_literals.py index 5f7dd700..f0aa466e 100644 --- a/src/tests/test_cases/unicode_literals.py +++ b/src/tests/test_cases/unicode_literals.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- """A module.""" -from __future__ import unicode_literals from .expected import Expectation diff --git a/src/tests/test_definitions.py b/src/tests/test_definitions.py index 83109b5a..3971f0a0 100644 --- a/src/tests/test_definitions.py +++ b/src/tests/test_definitions.py @@ -26,7 +26,7 @@ ]) def test_complex_file(test_case): """Run domain-specific tests from test.py file.""" - case_module = __import__('test_cases.{}'.format(test_case), + case_module = __import__(f'test_cases.{test_case}', globals=globals(), locals=locals(), fromlist=['expectation'], @@ -37,7 +37,8 @@ def test_complex_file(test_case): test_case + '.py') results = list(check([test_case_file], select=set(ErrorRegistry.get_error_codes()), - ignore_decorators=re.compile('wraps'))) + ignore_decorators=re.compile( + 'wraps|ignored_decorator'))) for error in results: assert isinstance(error, Error) results = {(e.definition.name, e.message) for e in results} diff --git a/src/tests/test_integration.py b/src/tests/test_integration.py index da1c5748..22f57857 100644 --- a/src/tests/test_integration.py +++ b/src/tests/test_integration.py @@ -1,12 +1,8 @@ -# -*- coding: utf-8 -*- - """Use tox or py.test to run the test-suite.""" -from __future__ import with_statement from collections import namedtuple import os -import sys import shlex import shutil import pytest @@ -52,7 +48,7 @@ def write_config(self, prefix='', name='tox.ini', **kwargs): self.makedirs(base) with open(os.path.join(base, name), 'wt') as conf: - conf.write("[{}]\n".format(self.script_name)) + conf.write(f"[{self.script_name}]\n") for k, v in kwargs.items(): conf.write("{} = {}\n".format(k.replace('_', '-'), v)) @@ -190,6 +186,36 @@ def function_with_bad_docstring(foo): assert error_codes == expected_error_codes - ignored +def test_skip_errors(): + """Test that `ignore`d errors are not reported in the API.""" + function_to_check = textwrap.dedent(''' + def function_with_bad_docstring(foo): # noqa: D400, D401, D403, D415 + """ does spacinwithout a period in the end + no blank line after one-liner is bad. Also this - """ + return foo + ''') + expected_error_codes = {'D100', 'D205', 'D209', 'D210', 'D213'} + mock_open = mock.mock_open(read_data=function_to_check) + from pydocstyle import checker + with mock.patch.object( + checker.tk, 'open', mock_open, create=True): + # Passing a blank ignore here explicitly otherwise + # checkers takes the pep257 ignores by default. + errors = tuple(checker.check(['filepath'], ignore={})) + error_codes = {error.code for error in errors} + assert error_codes == expected_error_codes + + skipped_error_codes = {'D400', 'D401', 'D403', 'D415'} + # We need to recreate the mock, otherwise the read file is empty + mock_open = mock.mock_open(read_data=function_to_check) + with mock.patch.object( + checker.tk, 'open', mock_open, create=True): + errors = tuple(checker.check(['filepath'], ignore={}, + ignore_inline_noqa=True)) + error_codes = {error.code for error in errors} + assert error_codes == expected_error_codes | skipped_error_codes + + def test_run_as_named_module(): """Test that pydocstyle can be run as a "named module". @@ -254,7 +280,7 @@ def test_sectionless_config_file(env): conf.write('[pdcstl]') config_path = conf.name - _, err, code = env.invoke('--config={}'.format(config_path)) + _, err, code = env.invoke(f'--config={config_path}') assert code == 0 assert 'Configuration file does not contain a pydocstyle section' in err @@ -475,6 +501,189 @@ def foo(): in err) +def test_overload_function(env): + """Functions decorated with @overload trigger D418 error.""" + with env.open('example.py', 'wt') as example: + example.write(textwrap.dedent('''\ + from typing import overload + + + @overload + def overloaded_func(a: int) -> str: + ... + + + @overload + def overloaded_func(a: str) -> str: + """Foo bar documentation.""" + ... + + + def overloaded_func(a): + """Foo bar documentation.""" + return str(a) + + ''')) + env.write_config(ignore="D100") + out, err, code = env.invoke() + assert code == 1 + assert 'D418' in out + assert 'D103' not in out + + +def test_overload_method(env): + """Methods decorated with @overload trigger D418 error.""" + with env.open('example.py', 'wt') as example: + example.write(textwrap.dedent('''\ + from typing import overload + + class ClassWithMethods: + @overload + def overloaded_method(a: int) -> str: + ... + + + @overload + def overloaded_method(a: str) -> str: + """Foo bar documentation.""" + ... + + + def overloaded_method(a): + """Foo bar documentation.""" + return str(a) + + ''')) + env.write_config(ignore="D100") + out, err, code = env.invoke() + assert code == 1 + assert 'D418' in out + assert 'D102' not in out + assert 'D103' not in out + + +def test_overload_method_valid(env): + """Valid case for overload decorated Methods. + + This shouldn't throw any errors. + """ + with env.open('example.py', 'wt') as example: + example.write(textwrap.dedent('''\ + from typing import overload + + class ClassWithMethods: + """Valid docstring in public Class.""" + + @overload + def overloaded_method(a: int) -> str: + ... + + + @overload + def overloaded_method(a: str) -> str: + ... + + + def overloaded_method(a): + """Foo bar documentation.""" + return str(a) + + ''')) + env.write_config(ignore="D100, D203") + out, err, code = env.invoke() + assert code == 0 + + +def test_overload_function_valid(env): + """Valid case for overload decorated functions. + + This shouldn't throw any errors. + """ + with env.open('example.py', 'wt') as example: + example.write(textwrap.dedent('''\ + from typing import overload + + + @overload + def overloaded_func(a: int) -> str: + ... + + + @overload + def overloaded_func(a: str) -> str: + ... + + + def overloaded_func(a): + """Foo bar documentation.""" + return str(a) + + ''')) + env.write_config(ignore="D100") + out, err, code = env.invoke() + assert code == 0 + + +def test_overload_nested_function(env): + """Nested functions decorated with @overload trigger D418 error.""" + with env.open('example.py', 'wt') as example: + example.write(textwrap.dedent('''\ + from typing import overload + + def function_with_nesting(): + """Valid docstring in public function.""" + @overload + def overloaded_func(a: int) -> str: + ... + + + @overload + def overloaded_func(a: str) -> str: + """Foo bar documentation.""" + ... + + + def overloaded_func(a): + """Foo bar documentation.""" + return str(a) + ''')) + env.write_config(ignore="D100") + out, err, code = env.invoke() + assert code == 1 + assert 'D418' in out + assert 'D103' not in out + + +def test_overload_nested_function_valid(env): + """Valid case for overload decorated nested functions. + + This shouldn't throw any errors. + """ + with env.open('example.py', 'wt') as example: + example.write(textwrap.dedent('''\ + from typing import overload + + def function_with_nesting(): + """Adding a docstring to a function.""" + @overload + def overloaded_func(a: int) -> str: + ... + + + @overload + def overloaded_func(a: str) -> str: + ... + + + def overloaded_func(a): + """Foo bar documentation.""" + return str(a) + ''')) + env.write_config(ignore="D100") + out, err, code = env.invoke() + assert code == 0 + + def test_conflicting_select_ignore_config(env): """Test that select and ignore are mutually exclusive.""" env.write_config(select="D100", ignore="D101") diff --git a/tox.ini b/tox.ini index 91eacd46..2fa89209 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # install tox" and then run "tox" from this directory. [tox] -envlist = {py35,py36,py37,py38}-{tests,install},docs,install,py36-docs +envlist = {py36,py37,py38,py39}-{tests,install},docs,install,py36-docs [testenv] download = true @@ -16,6 +16,8 @@ setenv = commands = py.test --pep8 --cache-clear -vv src/tests {posargs} mypy --config-file=tox.ini src/ + black --check src/pydocstyle + isort --check src/pydocstyle deps = -rrequirements/runtime.txt -rrequirements/tests.txt @@ -42,10 +44,6 @@ commands = {[testenv:docs]commands} # There's no way to generate sub-sections in tox. # The following sections are all references to the `py37-install`. -[testenv:py35-install] -skip_install = {[testenv:install]skip_install} -commands = {[testenv:install]commands} - [testenv:py36-install] skip_install = {[testenv:install]skip_install} commands = {[testenv:install]commands} @@ -58,6 +56,10 @@ commands = {[testenv:install]commands} skip_install = {[testenv:install]skip_install} commands = {[testenv:install]commands} +[testenv:py39-install] +skip_install = {[testenv:install]skip_install} +commands = {[testenv:install]commands} + [pytest] pep8ignore = test.py E701 E704