Skip to content

Commit 3691102

Browse files
cyyeverArthurZucker
authored andcommitted
Remove repeated import (#40937)
* Remove repeated import Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> * Fix conflict Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> --------- Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>
1 parent 5fb3b35 commit 3691102

File tree

7 files changed

+1
-13
lines changed

7 files changed

+1
-13
lines changed

src/transformers/data/data_collator.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -889,8 +889,6 @@ def get_generator(self, seed):
889889

890890
return tf.random.Generator.from_seed(seed)
891891
else:
892-
import numpy as np
893-
894892
return np.random.default_rng(seed)
895893

896894
def create_rng(self):

src/transformers/integrations/integration_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -547,8 +547,6 @@ def run_hp_search_sigopt(trainer, n_trials: int, direction: str, **kwargs) -> Be
547547

548548

549549
def run_hp_search_wandb(trainer, n_trials: int, direction: str, **kwargs) -> BestRun:
550-
from ..integrations import is_wandb_available
551-
552550
if not is_wandb_available():
553551
raise ImportError("This function needs wandb installed: `pip install wandb`")
554552
import wandb

src/transformers/integrations/tensor_parallel.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1103,8 +1103,6 @@ def distribute_model(model, distributed_config, device_mesh, tp_size):
11031103
raise ValueError(f"Unsupported tensor parallel style {v}. Supported styles are {ALL_PARALLEL_STYLES}")
11041104
for name, module in model.named_modules():
11051105
if not getattr(module, "_is_hooked", False):
1106-
from transformers.integrations.tensor_parallel import add_tensor_parallel_hooks_to_module
1107-
11081106
plan = _get_parameter_tp_plan(parameter_name=name, tp_plan=model_plan, is_weight=False)
11091107
add_tensor_parallel_hooks_to_module(
11101108
model=model,

src/transformers/modeling_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2332,8 +2332,6 @@ def tp_plan(self, plan: dict[str, str]):
23322332
flexible_matched = True
23332333
break
23342334
if not flexible_matched:
2335-
import warnings
2336-
23372335
warnings.warn(
23382336
f"Layer pattern '{layer_pattern}' does not match any parameters in the model. "
23392337
f"This rule may not be applied during tensor parallelization."

src/transformers/testing_utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import ast
1616
import collections
1717
import contextlib
18+
import copy
1819
import doctest
1920
import functools
2021
import gc
@@ -2794,8 +2795,6 @@ def wrapper(*args, **kwargs):
27942795
else:
27952796
test = " ".join(os.environ.get("PYTEST_CURRENT_TEST").split(" ")[:-1])
27962797
try:
2797-
import copy
2798-
27992798
env = copy.deepcopy(os.environ)
28002799
env["_INSIDE_SUB_PROCESS"] = "1"
28012800
# This prevents the entries in `short test summary info` given by the subprocess being truncated. so the

src/transformers/utils/hub.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1088,7 +1088,6 @@ def get_checkpoint_shard_files(
10881088
For the description of each arg, see [`PreTrainedModel.from_pretrained`]. `index_filename` is the full path to the
10891089
index (downloaded and cached if `pretrained_model_name_or_path` is a model ID on the Hub).
10901090
"""
1091-
import json
10921091

10931092
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
10941093
if use_auth_token is not None:

src/transformers/utils/metrics.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,6 @@ def decorator(func):
105105
if not _has_opentelemetry:
106106
return func
107107

108-
import functools
109-
110108
@functools.wraps(func)
111109
def wrapper(*args, **kwargs):
112110
instance = args[0] if args and (hasattr(func, "__self__") and func.__self__ is not None) else None

0 commit comments

Comments
 (0)