We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0b6acfb commit 719a80aCopy full SHA for 719a80a
src/transformers/models/whisper/modeling_whisper.py
@@ -1845,6 +1845,10 @@ def prepare_inputs_for_generation(
1845
elif use_cache:
1846
cache_position = cache_position[-decoder_input_ids.shape[1] :]
1847
1848
+ # The `contiguous()` here is necessary to have a static stride during decoding. torchdynamo otherwise
1849
+ # recompiles graphs as the stride of the inputs is a guard. Ref: https://github.com/huggingface/transformers/pull/29114
1850
+ decoder_input_ids = decoder_input_ids.contiguous()
1851
+
1852
return {
1853
"encoder_outputs": encoder_outputs,
1854
"past_key_values": past_key_values,
0 commit comments