Skip to content

Commit a8339b9

Browse files
authored
Fix bart shape comment (#8423)
1 parent 46509d1 commit a8339b9

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/transformers/modeling_bart.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -585,7 +585,7 @@ def forward(
585585

586586
x = F.dropout(x, p=self.dropout, training=self.training)
587587

588-
# Convert to Bart output format: (seq_len, BS, model_dim) -> (BS, seq_len, model_dim)
588+
# Convert to Bart output format: (BS, seq_len, model_dim) -> (seq_len, BS, model_dim)
589589
x = x.transpose(0, 1)
590590
encoder_hidden_states = encoder_hidden_states.transpose(0, 1)
591591

src/transformers/modeling_tf_bart.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -570,7 +570,7 @@ def call(
570570
x = self.layernorm_embedding(x + positions)
571571
x = tf.nn.dropout(x, rate=self.dropout if training else 0)
572572

573-
# Convert to Bart output format: (seq_len, BS, model_dim) -> (BS, seq_len, model_dim)
573+
# Convert to Bart output format: (BS, seq_len, model_dim) -> (seq_len, BS, model_dim)
574574
x = tf.transpose(x, perm=(1, 0, 2))
575575
assert len(shape_list(encoder_hidden_states)) == 3, "encoder_hidden_states must be a 3D tensor"
576576
encoder_hidden_states = tf.transpose(encoder_hidden_states, perm=(1, 0, 2))

0 commit comments

Comments
 (0)