Skip to content

Commit a039040

Browse files
committed
Add: better comment
Signed-off-by: Rahul Tuli <rtuli@redhat.com>
1 parent b1bb2c4 commit a039040

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

vllm/model_executor/models/llama_eagle3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,12 +97,12 @@ def forward(
9797
residual: Optional[torch.Tensor],
9898
) -> tuple[torch.Tensor, torch.Tensor]:
9999
if self.layer_idx == 0:
100+
# First layer: concatenate embeds with hidden_states
100101
embeds = self.input_layernorm(embeds)
101102
hidden_states, residual = self._residual_norm(hidden_states=hidden_states)
102103
hidden_states = torch.cat([embeds, hidden_states], dim=-1)
103104
else:
104-
# Subsequent layers: only process hidden_states
105-
# and residuals
105+
# Subsequent layers: process hidden_states and residuals only
106106
hidden_states, residual = self.input_layernorm(hidden_states, residual)
107107

108108
# Self Attention

0 commit comments

Comments
 (0)