Skip to content

Commit 2d6805c

Browse files
Add option for using fp8_e8m0fnu for model weights. (Comfy-Org#7733)
Seems to break every model I have tried but worth testing?
1 parent a8f63c0 commit 2d6805c

File tree

2 files changed

+3
-0
lines changed

2 files changed

+3
-0
lines changed

comfy/cli_args.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ def __call__(self, parser, namespace, values, option_string=None):
6666
fpunet_group.add_argument("--fp16-unet", action="store_true", help="Run the diffusion model in fp16")
6767
fpunet_group.add_argument("--fp8_e4m3fn-unet", action="store_true", help="Store unet weights in fp8_e4m3fn.")
6868
fpunet_group.add_argument("--fp8_e5m2-unet", action="store_true", help="Store unet weights in fp8_e5m2.")
69+
fpunet_group.add_argument("--fp8_e8m0fnu-unet", action="store_true", help="Store unet weights in fp8_e8m0fnu.")
6970

7071
fpvae_group = parser.add_mutually_exclusive_group()
7172
fpvae_group.add_argument("--fp16-vae", action="store_true", help="Run the VAE in fp16, might cause black images.")

comfy/model_management.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -725,6 +725,8 @@ def unet_dtype(device=None, model_params=0, supported_dtypes=[torch.float16, tor
725725
return torch.float8_e4m3fn
726726
if args.fp8_e5m2_unet:
727727
return torch.float8_e5m2
728+
if args.fp8_e8m0fnu_unet:
729+
return torch.float8_e8m0fnu
728730

729731
fp8_dtype = None
730732
if weight_dtype in FLOAT8_TYPES:

0 commit comments

Comments
 (0)