From e7c8fc68c6289f5d254de492b7b557f38fe62ccf Mon Sep 17 00:00:00 2001 From: lucidrains Date: Sat, 8 Mar 2025 12:27:34 -0800 Subject: [PATCH] credit assignment to Edward Hu, young ivy league student --- pyproject.toml | 2 +- tests/test_x_transformers.py | 2 +- x_transformers/belief_state_wrapper.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 48989480..183b3770 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "x-transformers" -version = "2.1.14" +version = "2.1.15" description = "X-Transformers" authors = [ { name = "Phil Wang", email = "lucidrains@gmail.com" } diff --git a/tests/test_x_transformers.py b/tests/test_x_transformers.py index 13f0e34c..5e369028 100644 --- a/tests/test_x_transformers.py +++ b/tests/test_x_transformers.py @@ -739,5 +739,5 @@ def test_belief_state_wrapper( if goal_suffix: suffix = torch.randint(0, 20000, (2, 2)) - sampled = model.generate_with_suffix_token_only(seq[:, :1], 16, suffix = suffix) + sampled = model.generate_with_suffix_cond(seq[:, :1], 16, suffix = suffix) assert sampled.shape == (2, 16) diff --git a/x_transformers/belief_state_wrapper.py b/x_transformers/belief_state_wrapper.py index bc9829cd..b2e5bc2a 100644 --- a/x_transformers/belief_state_wrapper.py +++ b/x_transformers/belief_state_wrapper.py @@ -1,7 +1,7 @@ # Belief State Transformer -# https://arxiv.org/abs/2410.23506 +# Hu et al. https://arxiv.org/abs/2410.23506 # https://www.youtube.com/watch?v=aqhbRtB2Fyg from __future__ import annotations @@ -107,7 +107,7 @@ def __init__( @torch.no_grad() @eval_decorator - def generate_with_suffix_token_only( + def generate_with_suffix_cond( self, prompts, seq_len,