Skip to content

Commit

Permalink
credit assignment to Edward Hu, young ivy league student
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Mar 8, 2025
1 parent 836af4a commit e7c8fc6
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "x-transformers"
version = "2.1.14"
version = "2.1.15"
description = "X-Transformers"
authors = [
{ name = "Phil Wang", email = "lucidrains@gmail.com" }
Expand Down
2 changes: 1 addition & 1 deletion tests/test_x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -739,5 +739,5 @@ def test_belief_state_wrapper(
if goal_suffix:
suffix = torch.randint(0, 20000, (2, 2))

sampled = model.generate_with_suffix_token_only(seq[:, :1], 16, suffix = suffix)
sampled = model.generate_with_suffix_cond(seq[:, :1], 16, suffix = suffix)
assert sampled.shape == (2, 16)
4 changes: 2 additions & 2 deletions x_transformers/belief_state_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

# Belief State Transformer

# https://arxiv.org/abs/2410.23506
# Hu et al. https://arxiv.org/abs/2410.23506
# https://www.youtube.com/watch?v=aqhbRtB2Fyg

from __future__ import annotations
Expand Down Expand Up @@ -107,7 +107,7 @@ def __init__(

@torch.no_grad()
@eval_decorator
def generate_with_suffix_token_only(
def generate_with_suffix_cond(
self,
prompts,
seq_len,
Expand Down

0 comments on commit e7c8fc6

Please sign in to comment.