Skip to content

Commit

Permalink
Use LabelAnalysisRequestResult object
Browse files Browse the repository at this point in the history
instead of using the result as a dictionary we can use it as the object we wrap it into.
There are 2 benefits to this:
1. It's cleaner to read
2. It does the fallback to `[]` if the api returns any of the values as `None`. This avoids a bug we saw yesterday when trying to concatenate list to None.

We do the wrapping in labelanalysis, before passing it to the runner.
  • Loading branch information
giovanni-guidini committed Jul 12, 2023
1 parent 9d671c5 commit 592e14d
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 13 deletions.
18 changes: 9 additions & 9 deletions codecov_cli/runners/python_standard_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,19 +185,19 @@ def process_labelanalysis_result(self, result: LabelAnalysisRequestResult):
"Received information about tests to run",
extra=dict(
extra_log_attributes=dict(
absent_labels=len(result["absent_labels"] or []),
present_diff_labels=len(result["present_diff_labels"] or []),
global_level_labels=len(result["global_level_labels"] or []),
present_report_labels=len(result["present_report_labels"] or []),
absent_labels=len(result.absent_labels),
present_diff_labels=len(result.present_diff_labels),
global_level_labels=len(result.global_level_labels),
present_report_labels=len(result.present_report_labels),
)
),
)
all_labels = set(
result["absent_labels"]
+ result["present_diff_labels"]
+ result["global_level_labels"]
result.absent_labels
+ result.present_diff_labels
+ result.global_level_labels
)
skipped_tests = set(result["present_report_labels"]) - all_labels
skipped_tests = set(result.present_report_labels) - all_labels
if skipped_tests:
logger.info(
"Some tests are being skipped",
Expand All @@ -207,7 +207,7 @@ def process_labelanalysis_result(self, result: LabelAnalysisRequestResult):
)

if len(all_labels) == 0:
all_labels = [random.choice(result["present_report_labels"])]
all_labels = [random.choice(result.present_report_labels)]
logger.info(
"All tests are being skipped. Selected random label to run",
extra=dict(extra_log_attributes=dict(selected_label=all_labels[0])),
Expand Down
14 changes: 10 additions & 4 deletions tests/runners/test_python_standard_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

from codecov_cli.runners.python_standard_runner import (
PythonStandardRunner,
PythonStandardRunnerConfigParams,
_execute_pytest_subprocess,
)
from codecov_cli.runners.python_standard_runner import stdout as pyrunner_stdout
from codecov_cli.runners.types import LabelAnalysisRequestResult


@patch("codecov_cli.runners.python_standard_runner.pytest")
Expand Down Expand Up @@ -248,7 +248,9 @@ def test_process_label_analysis_result(self, mocker):
}
mock_execute = mocker.patch.object(PythonStandardRunner, "_execute_pytest")

self.runner.process_labelanalysis_result(label_analysis_result)
self.runner.process_labelanalysis_result(
LabelAnalysisRequestResult(label_analysis_result)
)
args, kwargs = mock_execute.call_args
assert kwargs == {"capture_output": False}
assert isinstance(args[0], list)
Expand Down Expand Up @@ -277,7 +279,9 @@ def test_process_label_analysis_result_strict(self, mocker):

runner_config = {"strict_mode": True}
runner = PythonStandardRunner(runner_config)
runner.process_labelanalysis_result(label_analysis_result)
runner.process_labelanalysis_result(
LabelAnalysisRequestResult(label_analysis_result)
)
mock_execute.assert_not_called()
args, kwargs = mock_execute_strict.call_args
assert kwargs == {"capture_output": False}
Expand All @@ -302,7 +306,9 @@ def test_process_label_analysis_skip_all_tests(self, mocker):
}
mock_execute = mocker.patch.object(PythonStandardRunner, "_execute_pytest")

self.runner.process_labelanalysis_result(label_analysis_result)
self.runner.process_labelanalysis_result(
LabelAnalysisRequestResult(label_analysis_result)
)
args, kwargs = mock_execute.call_args
assert kwargs == {"capture_output": False}
assert isinstance(args[0], list)
Expand Down

0 comments on commit 592e14d

Please sign in to comment.