Skip to content

Commit 4c7ce3b

Browse files
threewebcodezucchini-nlp
authored andcommitted
chore: fix typos in the tests directory (huggingface#36813)
* chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * chore: fix typos in the tests * fix: format codes * chore: fix copy mismatch issue * fix: format codes * chore: fix copy mismatch issue * chore: fix copy mismatch issue * chore: fix copy mismatch issue * chore: restore previous words * chore: revert unexpected changes
1 parent c8b2768 commit 4c7ce3b

File tree

78 files changed

+181
-148
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+181
-148
lines changed

tests/deepspeed/test_deepspeed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -628,7 +628,7 @@ def model_init():
628628
with CaptureStd() as cs:
629629
trainer.hyperparameter_search(direction="maximize", n_trials=n_trials)
630630
self.assertIn("DeepSpeed info", cl.out, "expected DeepSpeed logger output but got none")
631-
self.assertIn(f"Trial {n_trials-1} finished with value", cs.err, "expected hyperparameter_search output")
631+
self.assertIn(f"Trial {n_trials - 1} finished with value", cs.err, "expected hyperparameter_search output")
632632
self.assertIn("Best is trial", cs.err, "expected hyperparameter_search output")
633633

634634
# --- These tests need to run on both zero stages --- #

tests/models/bart/test_modeling_tf_bart.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def prepare_config_and_inputs_for_common(self):
8787
clip_value_min=self.eos_token_id + 1,
8888
clip_value_max=self.vocab_size + 1,
8989
)
90-
# Explicity add "end of sequence" to the inputs
90+
# Explicitly add "end of sequence" to the inputs
9191
eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1)
9292
input_ids = tf.concat([input_ids, eos_tensor], axis=1)
9393

@@ -212,7 +212,7 @@ def test_decoder_model_past_large_inputs(self):
212212
self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs)
213213

214214
# TODO (Joao): fix me
215-
@unittest.skip("Onnx compliancy broke with TF 2.10")
215+
@unittest.skip("Onnx compliance broke with TF 2.10")
216216
def test_onnx_compliancy(self):
217217
pass
218218

tests/models/bert/test_modeling_tf_bert.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -735,7 +735,7 @@ def test_custom_load_tf_weights(self):
735735
self.assertTrue(layer.split("_")[0] in ["dropout", "classifier"])
736736

737737
# TODO (Joao): fix me
738-
@unittest.skip("Onnx compliancy broke with TF 2.10")
738+
@unittest.skip("Onnx compliance broke with TF 2.10")
739739
def test_onnx_compliancy(self):
740740
pass
741741

tests/models/blip/test_modeling_blip.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -474,7 +474,7 @@ def test_retain_grad_hidden_states_attentions(self):
474474
def test_model_get_set_embeddings(self):
475475
pass
476476

477-
# override as the `logit_scale` parameter initilization is different for Blip
477+
# override as the `logit_scale` parameter initialization is different for Blip
478478
def test_initialization(self):
479479
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
480480

@@ -483,7 +483,7 @@ def test_initialization(self):
483483
model = model_class(config=configs_no_init)
484484
for name, param in model.named_parameters():
485485
if param.requires_grad:
486-
# check if `logit_scale` is initilized as per the original implementation
486+
# check if `logit_scale` is initialized as per the original implementation
487487
if name == "logit_scale":
488488
self.assertAlmostEqual(
489489
param.data.item(),
@@ -988,7 +988,7 @@ def test_training_gradient_checkpointing_use_reentrant(self):
988988
def test_training_gradient_checkpointing_use_reentrant_false(self):
989989
pass
990990

991-
# override as the `logit_scale` parameter initilization is different for Blip
991+
# override as the `logit_scale` parameter initialization is different for Blip
992992
def test_initialization(self):
993993
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
994994

@@ -997,7 +997,7 @@ def test_initialization(self):
997997
model = model_class(config=configs_no_init)
998998
for name, param in model.named_parameters():
999999
if param.requires_grad:
1000-
# check if `logit_scale` is initilized as per the original implementation
1000+
# check if `logit_scale` is initialized as per the original implementation
10011001
if name == "logit_scale":
10021002
self.assertAlmostEqual(
10031003
param.data.item(),
@@ -1206,7 +1206,7 @@ def test_training_gradient_checkpointing(self):
12061206
loss = model(**inputs).loss
12071207
loss.backward()
12081208

1209-
# override as the `logit_scale` parameter initilization is different for Blip
1209+
# override as the `logit_scale` parameter initialization is different for Blip
12101210
def test_initialization(self):
12111211
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
12121212

@@ -1215,7 +1215,7 @@ def test_initialization(self):
12151215
model = model_class(config=configs_no_init)
12161216
for name, param in model.named_parameters():
12171217
if param.requires_grad:
1218-
# check if `logit_scale` is initilized as per the original implementation
1218+
# check if `logit_scale` is initialized as per the original implementation
12191219
if name == "logit_scale":
12201220
self.assertAlmostEqual(
12211221
param.data.item(),

tests/models/blip_2/test_modeling_blip_2.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -521,7 +521,7 @@ def test_save_load_fast_init_to_base(self):
521521
def test_sdpa_can_dispatch_composite_models(self):
522522
"""
523523
Tests if composite models dispatch correctly on SDPA/eager when requested so when loading the model.
524-
This tests only by looking at layer names, as usually SDPA layers are calles "SDPAAttention".
524+
This tests only by looking at layer names, as usually SDPA layers are called "SDPAAttention".
525525
In contrast to the above test, this one checks if the "config._attn_implamentation" is a dict after the model
526526
is loaded, because we manually replicate requested attn implementation on each sub-config when loading.
527527
See https://github.com/huggingface/transformers/pull/32238 for more info
@@ -970,7 +970,7 @@ def test_cpu_offload(self):
970970
def test_sdpa_can_dispatch_composite_models(self):
971971
"""
972972
Tests if composite models dispatch correctly on SDPA/eager when requested so when loading the model.
973-
This tests only by looking at layer names, as usually SDPA layers are calles "SDPAAttention".
973+
This tests only by looking at layer names, as usually SDPA layers are called "SDPAAttention".
974974
In contrast to the above test, this one checks if the "config._attn_implamentation" is a dict after the model
975975
is loaded, because we manually replicate requested attn implementation on each sub-config when loading.
976976
See https://github.com/huggingface/transformers/pull/32238 for more info
@@ -1647,7 +1647,7 @@ def test_initialization(self):
16471647
model = model_class(config=configs_no_init)
16481648
for name, param in model.named_parameters():
16491649
if param.requires_grad:
1650-
# check if `logit_scale` is initilized as per the original implementation
1650+
# check if `logit_scale` is initialized as per the original implementation
16511651
if name == "logit_scale":
16521652
self.assertAlmostEqual(
16531653
param.data.item(),

tests/models/bloom/test_tokenization_bloom.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ def test_encodings_from_xnli_dataset(self):
135135
@require_jinja
136136
def test_tokenization_for_chat(self):
137137
tokenizer = self.get_rust_tokenizer()
138-
tokenizer.chat_template = "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}"
138+
tokenizer.chat_template = "{% for message in messages %}{{ message.content }}{{ eos_token }}{% endfor %}"
139139
test_chats = [
140140
[{"role": "system", "content": "You are a helpful chatbot."}, {"role": "user", "content": "Hello!"}],
141141
[

tests/models/canine/test_tokenization_canine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def test_prepare_batch_integration(self):
6464
@require_torch
6565
def test_encoding_keys(self):
6666
tokenizer = self.canine_tokenizer
67-
src_text = ["Once there was a man.", "He wrote a test in HuggingFace Tranformers."]
67+
src_text = ["Once there was a man.", "He wrote a test in HuggingFace Transformers."]
6868
batch = tokenizer(src_text, padding=True, return_tensors="pt")
6969
# check if input_ids, attention_mask and token_type_ids are returned
7070
self.assertIn("input_ids", batch)

tests/models/chinese_clip/test_modeling_chinese_clip.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -596,7 +596,7 @@ def test_retain_grad_hidden_states_attentions(self):
596596
def test_model_get_set_embeddings(self):
597597
pass
598598

599-
# override as the `logit_scale` parameter initilization is different for CHINESE_CLIP
599+
# override as the `logit_scale` parameter initialization is different for CHINESE_CLIP
600600
def test_initialization(self):
601601
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
602602

@@ -608,7 +608,7 @@ def test_initialization(self):
608608
model = model_class(config=configs_no_init)
609609
for name, param in model.named_parameters():
610610
if param.requires_grad:
611-
# check if `logit_scale` is initilized as per the original implementation
611+
# check if `logit_scale` is initialized as per the original implementation
612612
if name == "logit_scale":
613613
self.assertAlmostEqual(
614614
param.data.item(),

tests/models/clap/test_modeling_clap.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,7 @@ def test_retain_grad_hidden_states_attentions(self):
543543
def test_model_get_set_embeddings(self):
544544
pass
545545

546-
# override as the `logit_scale` parameter initilization is different for CLAP
546+
# override as the `logit_scale` parameter initialization is different for CLAP
547547
def test_initialization(self):
548548
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
549549

@@ -552,7 +552,7 @@ def test_initialization(self):
552552
model = model_class(config=configs_no_init)
553553
for name, param in model.named_parameters():
554554
if param.requires_grad:
555-
# check if `logit_scale` is initilized as per the original implementation
555+
# check if `logit_scale` is initialized as per the original implementation
556556
if name == "logit_scale":
557557
self.assertAlmostEqual(
558558
param.data.item(),

tests/models/clip/test_modeling_clip.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -761,7 +761,7 @@ def test_retain_grad_hidden_states_attentions(self):
761761
def test_model_get_set_embeddings(self):
762762
pass
763763

764-
# override as the `logit_scale` parameter initilization is different for CLIP
764+
# override as the `logit_scale` parameter initialization is different for CLIP
765765
def test_initialization(self):
766766
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
767767

@@ -770,7 +770,7 @@ def test_initialization(self):
770770
model = model_class(config=configs_no_init)
771771
for name, param in model.named_parameters():
772772
if param.requires_grad:
773-
# check if `logit_scale` is initilized as per the original implementation
773+
# check if `logit_scale` is initialized as per the original implementation
774774
if name == "logit_scale":
775775
self.assertAlmostEqual(
776776
param.data.item(),

0 commit comments

Comments
 (0)