Skip to content

Commit 70c7aec

Browse files
ydshiehzucchini-nlp
authored andcommitted
Use another repo. for Mistral3 processor testing (huggingface#36925)
* fix * fix * fix * fix --------- Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
1 parent b50a5ca commit 70c7aec

File tree

2 files changed

+9
-3
lines changed

2 files changed

+9
-3
lines changed

tests/models/mistral3/test_modeling_mistral3.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from transformers.testing_utils import (
2626
cleanup,
2727
require_bitsandbytes,
28+
require_read_token,
2829
require_torch,
2930
require_torch_gpu,
3031
slow,
@@ -315,6 +316,7 @@ def setUp(self):
315316
def tearDown(self):
316317
cleanup(torch_device, gc_collect=True)
317318

319+
@require_read_token
318320
def test_mistral3_integration_generate_text_only(self):
319321
processor = AutoProcessor.from_pretrained(self.model_checkpoint)
320322
model = Mistral3ForConditionalGeneration.from_pretrained(
@@ -342,6 +344,7 @@ def test_mistral3_integration_generate_text_only(self):
342344
expected_output = "Sure, here's a haiku for you:\n\nWhispers of the breeze,\nCherry blossoms softly fall,\nSpring's gentle embrace."
343345
self.assertEqual(decoded_output, expected_output)
344346

347+
@require_read_token
345348
def test_mistral3_integration_generate(self):
346349
processor = AutoProcessor.from_pretrained(self.model_checkpoint)
347350
model = Mistral3ForConditionalGeneration.from_pretrained(
@@ -368,6 +371,7 @@ def test_mistral3_integration_generate(self):
368371
expected_output = "The image depicts two cats lying on a pink blanket. The larger cat, which appears to be an"
369372
self.assertEqual(decoded_output, expected_output)
370373

374+
@require_read_token
371375
def test_mistral3_integration_batched_generate(self):
372376
processor = AutoProcessor.from_pretrained(self.model_checkpoint)
373377
model = Mistral3ForConditionalGeneration.from_pretrained(
@@ -418,6 +422,7 @@ def test_mistral3_integration_batched_generate(self):
418422
f"Decoded output: {decoded_output}\nExpected output: {expected_output}",
419423
)
420424

425+
@require_read_token
421426
@require_bitsandbytes
422427
def test_mistral3_integration_batched_generate_multi_image(self):
423428
processor = AutoProcessor.from_pretrained(self.model_checkpoint)

tests/models/mistral3/test_processor_mistral3.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import requests
2121

2222
from transformers import PixtralProcessor
23-
from transformers.testing_utils import require_read_token, require_vision
23+
from transformers.testing_utils import require_vision
2424
from transformers.utils import is_torch_available, is_vision_available
2525

2626
from ...test_processing_common import ProcessorTesterMixin
@@ -35,7 +35,6 @@
3535

3636

3737
@require_vision
38-
@require_read_token
3938
class Mistral3ProcessorTest(ProcessorTesterMixin, unittest.TestCase):
4039
"""This tests Pixtral processor with the new `spatial_merge_size` argument in Mistral3."""
4140

@@ -52,7 +51,9 @@ def setUpClass(cls):
5251

5352
def setUp(self):
5453
self.tmpdirname = tempfile.mkdtemp()
55-
processor = self.processor_class.from_pretrained("mistralai/Mistral-Small-3.1-24B-Instruct-2503")
54+
processor = PixtralProcessor.from_pretrained(
55+
"hf-internal-testing/Mistral-Small-3.1-24B-Instruct-2503-only-processor"
56+
)
5657
processor.save_pretrained(self.tmpdirname)
5758

5859
def get_processor(self):

0 commit comments

Comments
 (0)