Skip to content

Commit b7630f1

Browse files
authored
SAM2 AMG load_fast fix (#1374)
1 parent ebb9086 commit b7630f1

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

examples/sam2_amg_server/server.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -588,6 +588,12 @@ def main(checkpoint_path,
588588
if load_fast != "":
589589
load_aot_fast(mask_generator, load_fast)
590590

591+
if furious:
592+
set_furious(mask_generator)
593+
# since autoquant is replicating what furious mode is doing, don't use these two together
594+
elif use_autoquant:
595+
set_autoquant(mask_generator)
596+
591597
if save_fast != "":
592598
assert load_fast == "", "Can't save compiled models while loading them with --load-fast."
593599
assert not baseline, "--fast cannot be combined with baseline. code to be torch.compile(fullgraph=True) compatible."
@@ -598,12 +604,6 @@ def main(checkpoint_path,
598604
assert not baseline, "--fast cannot be combined with baseline. code to be torch.compile(fullgraph=True) compatible."
599605
set_fast(mask_generator, load_fast)
600606

601-
if furious:
602-
set_furious(mask_generator)
603-
# since autoquant is replicating what furious mode is doing, don't use these two together
604-
elif use_autoquant:
605-
set_autoquant(mask_generator)
606-
607607
with open('dog.jpg', 'rb') as f:
608608
image_tensor = file_bytes_to_image_tensor(bytearray(f.read()))
609609

0 commit comments

Comments
 (0)