Skip to content

Commit

Permalink
Remove redundant GPT-2 model in the test
Browse files Browse the repository at this point in the history
  • Loading branch information
PinetreePantry committed Jan 25, 2024
1 parent 47cd000 commit 1a3616a
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 16 deletions.
5 changes: 3 additions & 2 deletions pyvene/models/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,14 +510,15 @@ def scatter_neurons(
for loc_i, loc in enumerate(locations):
h_start_index = start_index + loc * attn_head_size
h_end_index = start_index + (loc + 1) * attn_head_size
print(tensor_input.shape, replacing_tensor_input.shape, batch_i, loc, h_start_index, h_end_index, loc_i)
tensor_input[
batch_i, :, h_start_index:h_end_index
] = replacing_tensor_input[
batch_i, loc_i
] # [s, dh]
else:
assert False, f"`head` in representation type should not associate with {unit}"
assert (
False
), f"`head` in representation type should not associate with {unit}"
else:
if use_fast:
tensor_input[
Expand Down
26 changes: 12 additions & 14 deletions tests/unit_tests/ModelUtilsTestCase.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,18 @@
class ModelUtilsTestCase(unittest.TestCase):
@classmethod
def setUpClass(self):
self.gpt2_config, _, self.gpt2 = create_gpt2_lm(
config=GPT2Config(
n_embd=6,
n_head=3,
attn_pdrop=0.0,
embd_pdrop=0.0,
resid_pdrop=0.0,
summary_first_dropout=0.0,
n_layer=4,
bos_token_id=0,
eos_token_id=0,
n_positions=20,
vocab_size=10,
)
self.gpt2_config = GPT2Config(
n_embd=6,
n_head=3,
attn_pdrop=0.0,
embd_pdrop=0.0,
resid_pdrop=0.0,
summary_first_dropout=0.0,
n_layer=4,
bos_token_id=0,
eos_token_id=0,
n_positions=20,
vocab_size=10,
)
self.gpt2_model = hf_models.gpt2.modeling_gpt2.GPT2LMHeadModel

Expand Down

0 comments on commit 1a3616a

Please sign in to comment.