Skip to content

Commit

Permalink
initial pass
Browse files Browse the repository at this point in the history
  • Loading branch information
Gustavo committed Oct 18, 2024
1 parent f98688d commit b12aea0
Show file tree
Hide file tree
Showing 2 changed files with 218 additions and 242 deletions.
18 changes: 17 additions & 1 deletion tests/transformers/tests/generation/test_framework_agnostic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@
Framework agnostic tests for generate()-related methods.
"""

import pytest
import numpy as np
from transformers import AutoTokenizer
from transformers.testing_utils import slow, torch_device
from transformers.testing_utils import slow

torch_device = "hpu"


class GenerationIntegrationTestsMixin:
Expand Down Expand Up @@ -46,6 +49,7 @@ def test_validate_generation_inputs(self):
valid_model_kwargs = {"attention_mask": create_tensor_fn(np.zeros_like(input_ids))}
model.generate(input_ids, **valid_model_kwargs)

@pytest.mark.xfail
def test_custom_logits_processor(self):
model_cls = self.framework_dependent_parameters["AutoModelForSeq2SeqLM"]
logits_processor_list_cls = self.framework_dependent_parameters["LogitsProcessorList"]
Expand All @@ -66,6 +70,7 @@ def test_custom_logits_processor(self):
bart_model.config.min_length = None
bart_model.generate(input_ids, logits_processor=logits_processor)

@pytest.mark.xfail
def test_max_new_tokens_encoder_decoder(self):
model_cls = self.framework_dependent_parameters["AutoModelForSeq2SeqLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -222,6 +227,7 @@ def test_transition_scores_greedy_search_normalized(self):
)
self.assertTrue(np.allclose(transition_scores, expected_scores, atol=1e-3))

@pytest.mark.xfail
def test_transition_scores_beam_search_encoder_decoder(self):
model_cls = self.framework_dependent_parameters["AutoModelForSeq2SeqLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -257,6 +263,7 @@ def test_transition_scores_beam_search_encoder_decoder(self):

self.assertTrue(np.allclose(np.sum(transition_scores, axis=-1), outputs.sequences_scores, atol=1e-3))

@pytest.mark.xfail
def test_transition_scores_beam_search_encoder_decoder_with_eos(self):
model_cls = self.framework_dependent_parameters["AutoModelForSeq2SeqLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -291,6 +298,7 @@ def test_transition_scores_beam_search_encoder_decoder_with_eos(self):

self.assertTrue(np.allclose(np.sum(transition_scores, axis=-1), outputs.sequences_scores, atol=1e-3))

@pytest.mark.xfail
def test_transition_scores_beam_search_decoder_only(self):
model_cls = self.framework_dependent_parameters["AutoModelForCausalLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -328,6 +336,7 @@ def test_transition_scores_beam_search_decoder_only(self):

self.assertTrue(np.allclose(np.sum(transition_scores, axis=-1), outputs.sequences_scores, atol=1e-3))

@pytest.mark.xfail
def test_transition_scores_beam_sample_encoder_decoder(self):
model_cls = self.framework_dependent_parameters["AutoModelForSeq2SeqLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -400,6 +409,7 @@ def test_transition_scores_early_stopping(self):

self.assertTrue(np.allclose(np.sum(transition_scores, axis=-1), outputs.sequences_scores))

@pytest.mark.xfail
def test_encoder_decoder_generate_attention_mask(self):
model_cls = self.framework_dependent_parameters["AutoModelForSeq2SeqLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -501,6 +511,7 @@ def test_generate_too_many_encoder_kwargs(self):
with self.assertRaises(ValueError):
model.generate(input_ids=input_ids, inputs_embeds=input_ids)

@pytest.mark.xfail
def test_generate_input_features_as_encoder_kwarg(self):
model_cls = self.framework_dependent_parameters["AutoModelForSpeechSeq2Seq"]
floats_tensor = self.framework_dependent_parameters["floats_tensor"]
Expand Down Expand Up @@ -542,6 +553,7 @@ def test_generate_pixel_values_as_encoder_kwarg(self):
self.assertTrue(np.array_equal(output_sequences, output_sequences_kwargs))
self.assertEqual(output_sequences.shape, (2, 5))

@pytest.mark.xfail
def test_generate_encoder_outputs_attention_mask(self):
model_cls = self.framework_dependent_parameters["AutoModelForSpeechSeq2Seq"]
floats_tensor = self.framework_dependent_parameters["floats_tensor"]
Expand All @@ -567,6 +579,7 @@ def test_generate_encoder_outputs_attention_mask(self):

self.assertTrue(np.array_equal(output_sequences_no_mask, output_sequences_with_mask))

@pytest.mark.xfail
def test_eos_token_id_int_and_list_greedy_search(self):
model_cls = self.framework_dependent_parameters["AutoModelForCausalLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -594,6 +607,7 @@ def test_eos_token_id_int_and_list_greedy_search(self):
generated_tokens = model.generate(**tokens, eos_token_id=eos_token_id, **generation_kwargs)
self.assertTrue(expectation == len(generated_tokens[0]))

@pytest.mark.xfail
def test_eos_token_id_int_and_list_contrastive_search(self):
model_cls = self.framework_dependent_parameters["AutoModelForCausalLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -623,6 +637,7 @@ def test_eos_token_id_int_and_list_contrastive_search(self):
generated_tokens = model.generate(**tokens, eos_token_id=eos_token_id, **generation_kwargs)
self.assertTrue(expectation == len(generated_tokens[0]))

@pytest.mark.xfail
def test_eos_token_id_int_and_list_beam_search(self):
model_cls = self.framework_dependent_parameters["AutoModelForCausalLM"]
return_tensors = self.framework_dependent_parameters["return_tensors"]
Expand Down Expand Up @@ -658,6 +673,7 @@ def test_eos_token_id_int_and_list_beam_search(self):
)
self.assertTrue(unpadded_correct_condition or padded_correct_condition)

@pytest.mark.xfail
def test_generate_vision2text_conditioning(self):
model_cls = self.framework_dependent_parameters["AutoModelForVision2Seq"]
floats_tensor = self.framework_dependent_parameters["floats_tensor"]
Expand Down
Loading

0 comments on commit b12aea0

Please sign in to comment.