Skip to content

Commit 6f71f6e

Browse files
CarlosRDominCarlos Ruiznicoddemus
authored
Print output "dots" for successful unittest subtests (#164) (#190)
Brings the unittest subtests reporting similar to the subtests fixture: produce outputs like "1 passed, 5 skipped, 3 subtests passed" instead of "1 passed, 5 skipped", and "---,,,--." instead of just "." (if the `-s` flag is enabled). --------- Co-authored-by: Carlos Ruiz <[email protected]> Co-authored-by: Bruno Oliveira <[email protected]>
1 parent 722c830 commit 6f71f6e

File tree

3 files changed

+49
-41
lines changed

3 files changed

+49
-41
lines changed

CHANGELOG.rst

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,15 @@
11
CHANGELOG
22
=========
33

4+
UNRELEASED
5+
----------
6+
7+
*UNRELEASED*
8+
9+
* Print output "dots" for successful unittest subtests (`#164`_).
10+
11+
.. _#164: https://github.com/pytest-dev/pytest-subtests/issues/164
12+
413
0.14.1
514
------
615

src/pytest_subtests/plugin.py

Lines changed: 38 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -126,47 +126,46 @@ def _addSubTest(
126126
test: TestCase,
127127
exc_info: tuple[type[BaseException], BaseException, TracebackType] | None,
128128
) -> None:
129-
if exc_info is not None:
130-
msg = test._message if isinstance(test._message, str) else None # type: ignore[attr-defined]
131-
call_info = make_call_info(
132-
ExceptionInfo(exc_info, _ispytest=True),
133-
start=0,
134-
stop=0,
135-
duration=0,
136-
when="call",
129+
msg = test._message if isinstance(test._message, str) else None # type: ignore[attr-defined]
130+
call_info = make_call_info(
131+
ExceptionInfo(exc_info, _ispytest=True) if exc_info else None,
132+
start=0,
133+
stop=0,
134+
duration=0,
135+
when="call",
136+
)
137+
report = self.ihook.pytest_runtest_makereport(item=self, call=call_info)
138+
sub_report = SubTestReport._from_test_report(report)
139+
sub_report.context = SubTestContext(msg, dict(test.params)) # type: ignore[attr-defined]
140+
self.ihook.pytest_runtest_logreport(report=sub_report)
141+
if check_interactive_exception(call_info, sub_report):
142+
self.ihook.pytest_exception_interact(
143+
node=self, call=call_info, report=sub_report
137144
)
138-
report = self.ihook.pytest_runtest_makereport(item=self, call=call_info)
139-
sub_report = SubTestReport._from_test_report(report)
140-
sub_report.context = SubTestContext(msg, dict(test.params)) # type: ignore[attr-defined]
141-
self.ihook.pytest_runtest_logreport(report=sub_report)
142-
if check_interactive_exception(call_info, sub_report):
143-
self.ihook.pytest_exception_interact(
144-
node=self, call=call_info, report=sub_report
145-
)
146145

147-
# For python < 3.11: add non-subtest skips once all subtest failures are processed by # `_addSubTest`.
148-
if sys.version_info < (3, 11):
149-
from unittest.case import _SubTest # type: ignore[attr-defined]
150-
151-
non_subtest_skip = [
152-
(x, y)
153-
for x, y in self.instance._outcome.skipped
154-
if not isinstance(x, _SubTest)
155-
]
156-
subtest_errors = [
157-
(x, y)
158-
for x, y in self.instance._outcome.errors
159-
if isinstance(x, _SubTest) and y is not None
160-
]
161-
# Check if we have non-subtest skips: if there are also sub failures, non-subtest skips are not treated in
162-
# `_addSubTest` and have to be added using `_originaladdSkip` after all subtest failures are processed.
163-
if len(non_subtest_skip) > 0 and len(subtest_errors) > 0:
164-
# Make sure we have processed the last subtest failure
165-
last_subset_error = subtest_errors[-1]
166-
if exc_info is last_subset_error[-1]:
167-
# Add non-subtest skips (as they could not be treated in `_addSkip`)
168-
for testcase, reason in non_subtest_skip:
169-
self._originaladdSkip(testcase, reason) # type: ignore[attr-defined]
146+
# For python < 3.11: add non-subtest skips once all subtest failures are processed by # `_addSubTest`.
147+
if sys.version_info < (3, 11):
148+
from unittest.case import _SubTest # type: ignore[attr-defined]
149+
150+
non_subtest_skip = [
151+
(x, y)
152+
for x, y in self.instance._outcome.skipped
153+
if not isinstance(x, _SubTest)
154+
]
155+
subtest_errors = [
156+
(x, y)
157+
for x, y in self.instance._outcome.errors
158+
if isinstance(x, _SubTest) and y is not None
159+
]
160+
# Check if we have non-subtest skips: if there are also sub failures, non-subtest skips are not treated in
161+
# `_addSubTest` and have to be added using `_originaladdSkip` after all subtest failures are processed.
162+
if len(non_subtest_skip) > 0 and len(subtest_errors) > 0:
163+
# Make sure we have processed the last subtest failure
164+
last_subset_error = subtest_errors[-1]
165+
if exc_info is last_subset_error[-1]:
166+
# Add non-subtest skips (as they could not be treated in `_addSkip`)
167+
for testcase, reason in non_subtest_skip:
168+
self._originaladdSkip(testcase, reason) # type: ignore[attr-defined]
170169

171170

172171
def pytest_configure(config: pytest.Config) -> None:

tests/test_subtests.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ def test_simple_terminal_normal(
216216
"E * AssertionError: 1 != 0",
217217
"* T.test_foo [[]custom[]] (i=3) *",
218218
"E * AssertionError: 1 != 0",
219-
"* 2 failed, 1 passed in *",
219+
"* 2 failed, 1 passed, 3 subtests passed in *",
220220
]
221221
)
222222

@@ -267,7 +267,7 @@ def test_simple_terminal_verbose(
267267
"E * AssertionError: 1 != 0",
268268
"* T.test_foo [[]custom[]] (i=3) *",
269269
"E * AssertionError: 1 != 0",
270-
"* 2 failed, 1 passed in *",
270+
"* 2 failed, 1 passed, 3 subtests passed in *",
271271
]
272272
)
273273

0 commit comments

Comments
 (0)