Skip to content

Commit 3e685d6

Browse files
authored
Merge pull request #1795 from hackebrot/fix-report-outcome-for-xpass
WIP Change outcome to 'passed' for xfail unless it's strict
2 parents a01cbce + 68ebf55 commit 3e685d6

File tree

7 files changed

+150
-34
lines changed

7 files changed

+150
-34
lines changed

CHANGELOG.rst

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,20 @@
7979

8080
*
8181

82+
**Changes**
83+
84+
* Change ``report.outcome`` for ``xpassed`` tests to ``"passed"`` in non-strict
85+
mode and ``"failed"`` in strict mode. Thanks to `@hackebrot`_ for the PR
86+
(`#1795`_) and `@gprasad84`_ for report (`#1546`_).
87+
88+
* Tests marked with ``xfail(strict=False)`` (the default) now appear in
89+
JUnitXML reports as passing tests instead of skipped.
90+
Thanks to `@hackebrot`_ for the PR (`#1795`_).
91+
92+
.. _#1795: https://github.com/pytest-dev/pytest/pull/1795
93+
.. _#1546: https://github.com/pytest-dev/pytest/issues/1546
94+
.. _@gprasad84: https://github.com/gprasad84
95+
8296
.. _#1210: https://github.com/pytest-dev/pytest/issues/1210
8397
.. _#1435: https://github.com/pytest-dev/pytest/issues/1435
8498
.. _#1471: https://github.com/pytest-dev/pytest/issues/1471

_pytest/skipping.py

Lines changed: 31 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,18 @@ def check_strict_xfail(pyfuncitem):
220220
pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False)
221221

222222

223+
def _is_unittest_unexpected_success_a_failure():
224+
"""Return if the test suite should fail if a @expectedFailure unittest test PASSES.
225+
226+
From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful:
227+
Changed in version 3.4: Returns False if there were any
228+
unexpectedSuccesses from tests marked with the expectedFailure() decorator.
229+
230+
TODO: this should be moved to the "compat" module.
231+
"""
232+
return sys.version_info >= (3, 4)
233+
234+
223235
@pytest.hookimpl(hookwrapper=True)
224236
def pytest_runtest_makereport(item, call):
225237
outcome = yield
@@ -228,9 +240,15 @@ def pytest_runtest_makereport(item, call):
228240
evalskip = getattr(item, '_evalskip', None)
229241
# unitttest special case, see setting of _unexpectedsuccess
230242
if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
231-
# we need to translate into how pytest encodes xpass
232-
rep.wasxfail = "reason: " + repr(item._unexpectedsuccess)
233-
rep.outcome = "failed"
243+
if item._unexpectedsuccess:
244+
rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess)
245+
else:
246+
rep.longrepr = "Unexpected success"
247+
if _is_unittest_unexpected_success_a_failure():
248+
rep.outcome = "failed"
249+
else:
250+
rep.outcome = "passed"
251+
rep.wasxfail = rep.longrepr
234252
elif item.config.option.runxfail:
235253
pass # don't interefere
236254
elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception):
@@ -245,8 +263,15 @@ def pytest_runtest_makereport(item, call):
245263
rep.outcome = "skipped"
246264
rep.wasxfail = evalxfail.getexplanation()
247265
elif call.when == "call":
248-
rep.outcome = "failed" # xpass outcome
249-
rep.wasxfail = evalxfail.getexplanation()
266+
strict_default = item.config.getini('xfail_strict')
267+
is_strict_xfail = evalxfail.get('strict', strict_default)
268+
explanation = evalxfail.getexplanation()
269+
if is_strict_xfail:
270+
rep.outcome = "failed"
271+
rep.longrepr = "[XPASS(strict)] {0}".format(explanation)
272+
else:
273+
rep.outcome = "passed"
274+
rep.wasxfail = explanation
250275
elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
251276
# skipped by mark.skipif; change the location of the failure
252277
# to point to the item definition, otherwise it will display
@@ -260,7 +285,7 @@ def pytest_report_teststatus(report):
260285
if hasattr(report, "wasxfail"):
261286
if report.skipped:
262287
return "xfailed", "x", "xfail"
263-
elif report.failed:
288+
elif report.passed:
264289
return "xpassed", "X", ("XPASS", {'yellow': True})
265290

266291
# called by the terminalreporter instance/plugin

testing/python/metafunc.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1080,22 +1080,23 @@ def test_increment(n, expected):
10801080
reprec = testdir.inline_run()
10811081
reprec.assertoutcome(passed=2, skipped=1)
10821082

1083-
def test_xfail_passing_is_xpass(self, testdir):
1083+
@pytest.mark.parametrize('strict', [True, False])
1084+
def test_xfail_passing_is_xpass(self, testdir, strict):
10841085
s = """
10851086
import pytest
10861087
10871088
@pytest.mark.parametrize(("n", "expected"), [
10881089
(1, 2),
1089-
pytest.mark.xfail("sys.version > 0", reason="some bug")((2, 3)),
1090+
pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict})((2, 3)),
10901091
(3, 4),
10911092
])
10921093
def test_increment(n, expected):
10931094
assert n + 1 == expected
1094-
"""
1095+
""".format(strict=strict)
10951096
testdir.makepyfile(s)
10961097
reprec = testdir.inline_run()
1097-
# xpass is fail, obviously :)
1098-
reprec.assertoutcome(passed=2, failed=1)
1098+
passed, failed = (2, 1) if strict else (3, 0)
1099+
reprec.assertoutcome(passed=passed, failed=failed)
10991100

11001101
def test_parametrize_called_in_generate_tests(self, testdir):
11011102
s = """

testing/test_config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -592,6 +592,7 @@ def test_setuppy_fallback(self, tmpdir):
592592
assert inicfg == {}
593593

594594
def test_nothing(self, tmpdir):
595+
tmpdir.chdir()
595596
rootdir, inifile, inicfg = determine_setup(None, [tmpdir])
596597
assert rootdir == tmpdir
597598
assert inifile is None
@@ -603,6 +604,7 @@ def test_with_specific_inifile(self, tmpdir):
603604
assert rootdir == tmpdir
604605

605606
def test_with_arg_outside_cwd_without_inifile(self, tmpdir):
607+
tmpdir.chdir()
606608
a = tmpdir.mkdir("a")
607609
b = tmpdir.mkdir("b")
608610
rootdir, inifile, inicfg = determine_setup(None, [a, b])

testing/test_junitxml.py

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ def test_xpass():
100100
result, dom = runandparse(testdir)
101101
assert result.ret
102102
node = dom.find_first_by_tag("testsuite")
103-
node.assert_attr(name="pytest", errors=0, failures=1, skips=3, tests=5)
103+
node.assert_attr(name="pytest", errors=0, failures=1, skips=2, tests=5)
104104

105105
def test_summing_simple_with_errors(self, testdir):
106106
testdir.makepyfile("""
@@ -115,13 +115,16 @@ def test_fail():
115115
def test_error(fixture):
116116
pass
117117
@pytest.mark.xfail
118+
def test_xfail():
119+
assert False
120+
@pytest.mark.xfail(strict=True)
118121
def test_xpass():
119-
assert 1
122+
assert True
120123
""")
121124
result, dom = runandparse(testdir)
122125
assert result.ret
123126
node = dom.find_first_by_tag("testsuite")
124-
node.assert_attr(name="pytest", errors=1, failures=1, skips=1, tests=4)
127+
node.assert_attr(name="pytest", errors=1, failures=2, skips=1, tests=5)
125128

126129
def test_timing_function(self, testdir):
127130
testdir.makepyfile("""
@@ -346,16 +349,33 @@ def test_xpass():
346349
result, dom = runandparse(testdir)
347350
# assert result.ret
348351
node = dom.find_first_by_tag("testsuite")
349-
node.assert_attr(skips=1, tests=1)
352+
node.assert_attr(skips=0, tests=1)
350353
tnode = node.find_first_by_tag("testcase")
351354
tnode.assert_attr(
352355
file="test_xfailure_xpass.py",
353356
line="1",
354357
classname="test_xfailure_xpass",
355358
name="test_xpass")
356-
fnode = tnode.find_first_by_tag("skipped")
357-
fnode.assert_attr(message="xfail-marked test passes unexpectedly")
358-
# assert "ValueError" in fnode.toxml()
359+
360+
def test_xfailure_xpass_strict(self, testdir):
361+
testdir.makepyfile("""
362+
import pytest
363+
@pytest.mark.xfail(strict=True, reason="This needs to fail!")
364+
def test_xpass():
365+
pass
366+
""")
367+
result, dom = runandparse(testdir)
368+
# assert result.ret
369+
node = dom.find_first_by_tag("testsuite")
370+
node.assert_attr(skips=0, tests=1)
371+
tnode = node.find_first_by_tag("testcase")
372+
tnode.assert_attr(
373+
file="test_xfailure_xpass_strict.py",
374+
line="1",
375+
classname="test_xfailure_xpass_strict",
376+
name="test_xpass")
377+
fnode = tnode.find_first_by_tag("failure")
378+
fnode.assert_attr(message="[XPASS(strict)] This needs to fail!")
359379

360380
def test_collect_error(self, testdir):
361381
testdir.makepyfile("syntax error")

testing/test_skipping.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -145,15 +145,29 @@ def test_func():
145145
def test_xfail_xpassed(self, testdir):
146146
item = testdir.getitem("""
147147
import pytest
148-
@pytest.mark.xfail
148+
@pytest.mark.xfail(reason="this is an xfail")
149+
def test_func():
150+
assert 1
151+
""")
152+
reports = runtestprotocol(item, log=False)
153+
assert len(reports) == 3
154+
callreport = reports[1]
155+
assert callreport.passed
156+
assert callreport.wasxfail == "this is an xfail"
157+
158+
def test_xfail_xpassed_strict(self, testdir):
159+
item = testdir.getitem("""
160+
import pytest
161+
@pytest.mark.xfail(strict=True, reason="nope")
149162
def test_func():
150163
assert 1
151164
""")
152165
reports = runtestprotocol(item, log=False)
153166
assert len(reports) == 3
154167
callreport = reports[1]
155168
assert callreport.failed
156-
assert callreport.wasxfail == ""
169+
assert callreport.longrepr == "[XPASS(strict)] nope"
170+
assert not hasattr(callreport, "wasxfail")
157171

158172
def test_xfail_run_anyway(self, testdir):
159173
testdir.makepyfile("""

testing/test_unittest.py

Lines changed: 54 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -419,8 +419,9 @@ def setup_class(cls):
419419
def test_method(self):
420420
pass
421421
""")
422+
from _pytest.skipping import _is_unittest_unexpected_success_a_failure
423+
should_fail = _is_unittest_unexpected_success_a_failure()
422424
result = testdir.runpytest("-rxs")
423-
assert result.ret == 0
424425
result.stdout.fnmatch_lines_random([
425426
"*XFAIL*test_trial_todo*",
426427
"*trialselfskip*",
@@ -429,8 +430,9 @@ def test_method(self):
429430
"*i2wanto*",
430431
"*sys.version_info*",
431432
"*skip_in_method*",
432-
"*4 skipped*3 xfail*1 xpass*",
433+
"*1 failed*4 skipped*3 xfailed*" if should_fail else "*4 skipped*3 xfail*1 xpass*",
433434
])
435+
assert result.ret == (1 if should_fail else 0)
434436

435437
def test_trial_error(self, testdir):
436438
testdir.makepyfile("""
@@ -587,24 +589,62 @@ def test_hello(self, arg1):
587589
assert "TypeError" in result.stdout.str()
588590
assert result.ret == 1
589591

592+
590593
@pytest.mark.skipif("sys.version_info < (2,7)")
591-
def test_unittest_unexpected_failure(testdir):
592-
testdir.makepyfile("""
594+
@pytest.mark.parametrize('runner', ['pytest', 'unittest'])
595+
def test_unittest_expected_failure_for_failing_test_is_xfail(testdir, runner):
596+
script = testdir.makepyfile("""
593597
import unittest
594598
class MyTestCase(unittest.TestCase):
595599
@unittest.expectedFailure
596-
def test_func1(self):
597-
assert 0
600+
def test_failing_test_is_xfail(self):
601+
assert False
602+
if __name__ == '__main__':
603+
unittest.main()
604+
""")
605+
if runner == 'pytest':
606+
result = testdir.runpytest("-rxX")
607+
result.stdout.fnmatch_lines([
608+
"*XFAIL*MyTestCase*test_failing_test_is_xfail*",
609+
"*1 xfailed*",
610+
])
611+
else:
612+
result = testdir.runpython(script)
613+
result.stderr.fnmatch_lines([
614+
"*1 test in*",
615+
"*OK*(expected failures=1)*",
616+
])
617+
assert result.ret == 0
618+
619+
620+
@pytest.mark.skipif("sys.version_info < (2,7)")
621+
@pytest.mark.parametrize('runner', ['pytest', 'unittest'])
622+
def test_unittest_expected_failure_for_passing_test_is_fail(testdir, runner):
623+
script = testdir.makepyfile("""
624+
import unittest
625+
class MyTestCase(unittest.TestCase):
598626
@unittest.expectedFailure
599-
def test_func2(self):
600-
assert 1
627+
def test_passing_test_is_fail(self):
628+
assert True
629+
if __name__ == '__main__':
630+
unittest.main()
601631
""")
602-
result = testdir.runpytest("-rxX")
603-
result.stdout.fnmatch_lines([
604-
"*XFAIL*MyTestCase*test_func1*",
605-
"*XPASS*MyTestCase*test_func2*",
606-
"*1 xfailed*1 xpass*",
607-
])
632+
from _pytest.skipping import _is_unittest_unexpected_success_a_failure
633+
should_fail = _is_unittest_unexpected_success_a_failure()
634+
if runner == 'pytest':
635+
result = testdir.runpytest("-rxX")
636+
result.stdout.fnmatch_lines([
637+
"*MyTestCase*test_passing_test_is_fail*",
638+
"*1 failed*" if should_fail else "*1 xpassed*",
639+
])
640+
else:
641+
result = testdir.runpython(script)
642+
result.stderr.fnmatch_lines([
643+
"*1 test in*",
644+
"*(unexpected successes=1)*",
645+
])
646+
647+
assert result.ret == (1 if should_fail else 0)
608648

609649

610650
@pytest.mark.parametrize('fix_type, stmt', [

0 commit comments

Comments
 (0)