Skip to content

Commit 5c2f566

Browse files
CyrilvallezArthurZucker
authored andcommitted
Remove set_model_tester_for_less_flaky_tests (#40982)
remove
1 parent 5ac3c51 commit 5c2f566

File tree

2 files changed

+0
-56
lines changed

2 files changed

+0
-56
lines changed

src/transformers/testing_utils.py

Lines changed: 0 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
import ast
1616
import collections
1717
import contextlib
18-
import copy
1918
import doctest
2019
import functools
2120
import gc
@@ -1638,58 +1637,6 @@ def assert_screenout(out, what):
16381637
assert match_str != -1, f"expecting to find {what} in output: f{out_pr}"
16391638

16401639

1641-
def set_model_tester_for_less_flaky_test(test_case):
1642-
# NOTE: this function edits the config object, which may lead to hard-to-debug side-effects. Use with caution.
1643-
# Do not use in tests/models where objects behave very differently based on the config's hidden layer settings
1644-
# (e.g. KV caches, sliding window attention, ...)
1645-
1646-
# TODO (if possible): Avoid exceptional cases
1647-
exceptional_classes = [
1648-
"ZambaModelTester",
1649-
"Zamba2ModelTester",
1650-
"RwkvModelTester",
1651-
"AriaVisionText2TextModelTester",
1652-
"GPTNeoModelTester",
1653-
"DPTModelTester",
1654-
"Qwen3NextModelTester",
1655-
]
1656-
if test_case.model_tester.__class__.__name__ in exceptional_classes:
1657-
return
1658-
1659-
target_num_hidden_layers = 1
1660-
if hasattr(test_case.model_tester, "out_features") or hasattr(test_case.model_tester, "out_indices"):
1661-
target_num_hidden_layers = None
1662-
1663-
if hasattr(test_case.model_tester, "num_hidden_layers") and target_num_hidden_layers is not None:
1664-
test_case.model_tester.num_hidden_layers = target_num_hidden_layers
1665-
if (
1666-
hasattr(test_case.model_tester, "vision_config")
1667-
and "num_hidden_layers" in test_case.model_tester.vision_config
1668-
and target_num_hidden_layers is not None
1669-
):
1670-
test_case.model_tester.vision_config = copy.deepcopy(test_case.model_tester.vision_config)
1671-
if isinstance(test_case.model_tester.vision_config, dict):
1672-
test_case.model_tester.vision_config["num_hidden_layers"] = 1
1673-
else:
1674-
test_case.model_tester.vision_config.num_hidden_layers = 1
1675-
if (
1676-
hasattr(test_case.model_tester, "text_config")
1677-
and "num_hidden_layers" in test_case.model_tester.text_config
1678-
and target_num_hidden_layers is not None
1679-
):
1680-
test_case.model_tester.text_config = copy.deepcopy(test_case.model_tester.text_config)
1681-
if isinstance(test_case.model_tester.text_config, dict):
1682-
test_case.model_tester.text_config["num_hidden_layers"] = 1
1683-
else:
1684-
test_case.model_tester.text_config.num_hidden_layers = 1
1685-
1686-
# A few model class specific handling
1687-
1688-
# For Albert
1689-
if hasattr(test_case.model_tester, "num_hidden_groups"):
1690-
test_case.model_tester.num_hidden_groups = test_case.model_tester.num_hidden_layers
1691-
1692-
16931640
def set_config_for_less_flaky_test(config):
16941641
target_attrs = [
16951642
"rms_norm_eps",

tests/models/efficientloftr/test_modeling_efficientloftr.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
require_vision,
2424
set_config_for_less_flaky_test,
2525
set_model_for_less_flaky_test,
26-
set_model_tester_for_less_flaky_test,
2726
slow,
2827
torch_device,
2928
)
@@ -360,8 +359,6 @@ def recursive_check(batched_object, single_row_object, model_name, key):
360359
msg += str(e)
361360
raise AssertionError(msg)
362361

363-
set_model_tester_for_less_flaky_test(self)
364-
365362
config, batched_input = self.model_tester.prepare_config_and_inputs_for_common()
366363
set_config_for_less_flaky_test(config)
367364

0 commit comments

Comments
 (0)