|
15 | 15 | import ast |
16 | 16 | import collections |
17 | 17 | import contextlib |
18 | | -import copy |
19 | 18 | import doctest |
20 | 19 | import functools |
21 | 20 | import gc |
@@ -1638,58 +1637,6 @@ def assert_screenout(out, what): |
1638 | 1637 | assert match_str != -1, f"expecting to find {what} in output: f{out_pr}" |
1639 | 1638 |
|
1640 | 1639 |
|
1641 | | -def set_model_tester_for_less_flaky_test(test_case): |
1642 | | - # NOTE: this function edits the config object, which may lead to hard-to-debug side-effects. Use with caution. |
1643 | | - # Do not use in tests/models where objects behave very differently based on the config's hidden layer settings |
1644 | | - # (e.g. KV caches, sliding window attention, ...) |
1645 | | - |
1646 | | - # TODO (if possible): Avoid exceptional cases |
1647 | | - exceptional_classes = [ |
1648 | | - "ZambaModelTester", |
1649 | | - "Zamba2ModelTester", |
1650 | | - "RwkvModelTester", |
1651 | | - "AriaVisionText2TextModelTester", |
1652 | | - "GPTNeoModelTester", |
1653 | | - "DPTModelTester", |
1654 | | - "Qwen3NextModelTester", |
1655 | | - ] |
1656 | | - if test_case.model_tester.__class__.__name__ in exceptional_classes: |
1657 | | - return |
1658 | | - |
1659 | | - target_num_hidden_layers = 1 |
1660 | | - if hasattr(test_case.model_tester, "out_features") or hasattr(test_case.model_tester, "out_indices"): |
1661 | | - target_num_hidden_layers = None |
1662 | | - |
1663 | | - if hasattr(test_case.model_tester, "num_hidden_layers") and target_num_hidden_layers is not None: |
1664 | | - test_case.model_tester.num_hidden_layers = target_num_hidden_layers |
1665 | | - if ( |
1666 | | - hasattr(test_case.model_tester, "vision_config") |
1667 | | - and "num_hidden_layers" in test_case.model_tester.vision_config |
1668 | | - and target_num_hidden_layers is not None |
1669 | | - ): |
1670 | | - test_case.model_tester.vision_config = copy.deepcopy(test_case.model_tester.vision_config) |
1671 | | - if isinstance(test_case.model_tester.vision_config, dict): |
1672 | | - test_case.model_tester.vision_config["num_hidden_layers"] = 1 |
1673 | | - else: |
1674 | | - test_case.model_tester.vision_config.num_hidden_layers = 1 |
1675 | | - if ( |
1676 | | - hasattr(test_case.model_tester, "text_config") |
1677 | | - and "num_hidden_layers" in test_case.model_tester.text_config |
1678 | | - and target_num_hidden_layers is not None |
1679 | | - ): |
1680 | | - test_case.model_tester.text_config = copy.deepcopy(test_case.model_tester.text_config) |
1681 | | - if isinstance(test_case.model_tester.text_config, dict): |
1682 | | - test_case.model_tester.text_config["num_hidden_layers"] = 1 |
1683 | | - else: |
1684 | | - test_case.model_tester.text_config.num_hidden_layers = 1 |
1685 | | - |
1686 | | - # A few model class specific handling |
1687 | | - |
1688 | | - # For Albert |
1689 | | - if hasattr(test_case.model_tester, "num_hidden_groups"): |
1690 | | - test_case.model_tester.num_hidden_groups = test_case.model_tester.num_hidden_layers |
1691 | | - |
1692 | | - |
1693 | 1640 | def set_config_for_less_flaky_test(config): |
1694 | 1641 | target_attrs = [ |
1695 | 1642 | "rms_norm_eps", |
|
0 commit comments