Skip to content

Commit

Permalink
Deals with a linting issue reg. pytest-style
Browse files Browse the repository at this point in the history
  • Loading branch information
funkyfuture committed Jan 4, 2025
1 parent 9e0eb38 commit 5e01aa8
Show file tree
Hide file tree
Showing 6 changed files with 36 additions and 37 deletions.
2 changes: 1 addition & 1 deletion _delb/plugins/https_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@


"""
If ``delb`` is installed with ``https-loader`` as extra, the required
If ``delb`` is installed with ``web-loader`` as extra, the required
dependencies for this loader are installed as well. See :doc:`/installation`.
"""

Expand Down
5 changes: 2 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,9 @@ fail_under = 96
#

[tool.flake8]
# E203: https://github.com/PyCQA/pycodestyle/issues/373
# E203: https://github.com/PyCQA/pycodestyle/issues/373#issuecomment-398695123
# PEA001: DROPWITH Python3.8
# PT019: https://github.com/m-burst/flake8-pytest-style/issues/202
extend-ignore = ["E203", "PEA001", "PT019"]
extend-ignore = ["E203", "PEA001"]
extend-select = ["TC", "TC1"]

ban-relative-imports = "true"
Expand Down
6 changes: 3 additions & 3 deletions tests/test_css_select.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ def test_css_select_or(files_path):
assert {x.local_name for x in result} == {"author", "title"}


@pytest.mark.parametrize(("_in", "out"), (("metadata", "descendant::metadata"),))
def test_css_to_xpath(_in, out):
assert _css_to_xpath(_in) == out
@pytest.mark.parametrize(("in_", "out"), (("metadata", "descendant::metadata"),))
def test_css_to_xpath(in_, out):
assert _css_to_xpath(in_) == out


def test_namespace():
Expand Down
24 changes: 12 additions & 12 deletions tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def test_empty_below_default_namespace():


@pytest.mark.parametrize(
("indentation", "_in", "out"),
("indentation", "in_", "out"),
(
(
" ",
Expand Down Expand Up @@ -113,11 +113,11 @@ def test_empty_below_default_namespace():
),
),
)
def test_indentation(indentation, _in, out):
def test_indentation(indentation, in_, out):
DefaultStringOptions.format_options = FormatOptions(
align_attributes=False, indentation=indentation, width=0
)
document = Document(_in, parser_options=ParserOptions(reduce_whitespace=True))
document = Document(in_, parser_options=ParserOptions(reduce_whitespace=True))
serialisat = str(document)
assert_equal_trees(
document.root,
Expand All @@ -127,7 +127,7 @@ def test_indentation(indentation, _in, out):


@pytest.mark.parametrize(
("_in", "namespaces", "prefixes"),
("in_", "namespaces", "prefixes"),
(
(
'<r xmlns="d1"><b xmlns="d2"/></r>',
Expand Down Expand Up @@ -158,11 +158,11 @@ def test_indentation(indentation, _in, out):
),
),
)
def test_prefix_collection_and_generation(_in, namespaces, prefixes):
def test_prefix_collection_and_generation(in_, namespaces, prefixes):
# all namespace declarations are included in the root node.
# definitions from higher levels are preferred.
serializer = Serializer(None, namespaces=namespaces)
serializer._collect_prefixes(Document(_in).root)
serializer._collect_prefixes(Document(in_).root)
assert serializer._prefixes == prefixes


Expand Down Expand Up @@ -444,7 +444,7 @@ def test_text_with_milestone_tag(files_path, text_width, expected):


@pytest.mark.parametrize(
("format_options", "_in", "out"),
("format_options", "in_", "out"),
(
(
FormatOptions(align_attributes=False, indentation="", width=60),
Expand Down Expand Up @@ -541,10 +541,10 @@ def test_text_with_milestone_tag(files_path, text_width, expected):
),
),
)
def test_text_wrapping(format_options, _in, out):
def test_text_wrapping(format_options, in_, out):
DefaultStringOptions.format_options = format_options
document = Document(
dedent(_in), parser_options=ParserOptions(reduce_whitespace=True)
dedent(in_), parser_options=ParserOptions(reduce_whitespace=True)
)

serialisat = str(document.root)
Expand All @@ -554,7 +554,7 @@ def test_text_wrapping(format_options, _in, out):


@pytest.mark.parametrize(
"_in",
"in_",
(
"""\
<root>
Expand All @@ -578,11 +578,11 @@ def test_text_wrapping(format_options, _in, out):
{"indentation": " ", "width": 77},
),
)
def test_that_no_extra_whitespace_is_produced(_in, format_options):
def test_that_no_extra_whitespace_is_produced(in_, format_options):
parser_options = ParserOptions(reduce_whitespace=True)
DefaultStringOptions.format_options = FormatOptions(**format_options)

origin = Document(_in, parser_options=parser_options)
origin = Document(in_, parser_options=parser_options)
_copy = Document(str(origin.root), parser_options=parser_options)
assert_equal_trees(origin.root, _copy.root)

Expand Down
12 changes: 6 additions & 6 deletions tests/test_xpath_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def test_invalid_expressions(expression, string):


@pytest.mark.parametrize(
("_in", "out"),
("in_", "out"),
(
(
"node()",
Expand Down Expand Up @@ -223,12 +223,12 @@ def test_invalid_expressions(expression, string):
),
),
)
def test_parse(_in, out):
assert parse(_in) == out, parse(_in)
def test_parse(in_, out):
assert parse(in_) == out, parse(in_)


@pytest.mark.parametrize(
("_in", "out"),
("in_", "out"),
(
(
"[@lang='zw']",
Expand Down Expand Up @@ -375,8 +375,8 @@ def test_parse(_in, out):
),
),
)
def test_parse_predicates(_in, out):
assert parse(f"*{_in}").location_paths[0].location_steps[0].predicates == tuple(out)
def test_parse_predicates(in_, out):
assert parse(f"*{in_}").location_paths[0].location_steps[0].predicates == tuple(out)


def test_unsupported_feature():
Expand Down
24 changes: 12 additions & 12 deletions tests/test_xpath_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


@pytest.mark.parametrize(
("_in", "out"),
("in_", "out"),
(
("foo", ""),
("'foo'", "'foo'"),
Expand All @@ -18,16 +18,16 @@
(r'"fo\"o"', r'"fo\"o"'),
),
)
def test_string_pattern(_in, out):
result = re.compile(named_group("STRING", string_pattern), re.UNICODE).search(_in)
def test_string_pattern(in_, out):
result = re.compile(named_group("STRING", string_pattern), re.UNICODE).search(in_)
if out:
assert result is not None
assert result.group("STRING") == out


@pytest.mark.parametrize(
(
"_in",
"in_",
"out",
),
(
Expand Down Expand Up @@ -85,12 +85,12 @@ def test_string_pattern(_in, out):
('.//pb[@n="I"]', [".", "//", "pb", "[", "@", "n", "=", '"I"', "]"]),
),
)
def test_tokenize(_in, out):
assert [x.string for x in tokenize(_in)] == out
def test_tokenize(in_, out):
assert [x.string for x in tokenize(in_)] == out


@pytest.mark.parametrize(
("_in", "out"),
("in_", "out"),
(
("'foo'", TokenType.STRING),
("foo", TokenType.NAME),
Expand Down Expand Up @@ -127,12 +127,12 @@ def test_tokenize(_in, out):
("99", TokenType.NUMBER),
),
)
def test_type_detection(_in, out):
result = tokenize(_in)
def test_type_detection(in_, out):
result = tokenize(in_)
assert len(result) == 1, result
assert result[0].type is out


@pytest.mark.parametrize("_in", (" ", "\t", "\n"))
def test_ignored_whitespace(_in):
assert not tokenize(_in)
@pytest.mark.parametrize("in_", (" ", "\t", "\n"))
def test_ignored_whitespace(in_):
assert not tokenize(in_)

0 comments on commit 5e01aa8

Please sign in to comment.