Skip to content

Commit a94bc84

Browse files
committed
fix to work bitsandbytes optimizers with full path #1640
1 parent 4296e28 commit a94bc84

File tree

1 file changed

+9
-2
lines changed

1 file changed

+9
-2
lines changed

library/train_util.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3014,7 +3014,11 @@ def int_or_float(value):
30143014
"--optimizer_type",
30153015
type=str,
30163016
default="",
3017-
help="Optimizer to use / オプティマイザの種類: AdamW (default), AdamW8bit, PagedAdamW, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, Lion, AdEMAMix8bit, PagedAdEMAMix8bit, SGDNesterov, SGDNesterov8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, AdaFactor",
3017+
help="Optimizer to use / オプティマイザの種類: AdamW (default), AdamW8bit, PagedAdamW, PagedAdamW8bit, PagedAdamW32bit, "
3018+
"Lion8bit, PagedLion8bit, Lion, SGDNesterov, SGDNesterov8bit, "
3019+
"DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, "
3020+
"AdaFactor. "
3021+
"Also, you can use any optimizer by specifying the full path to the class, like 'bitsandbytes.optim.AdEMAMix8bit' or 'bitsandbytes.optim.PagedAdEMAMix8bit'.",
30183022
)
30193023

30203024
# backward compatibility
@@ -4105,6 +4109,7 @@ def get_optimizer(args, trainable_params):
41054109

41064110
lr = args.learning_rate
41074111
optimizer = None
4112+
optimizer_class = None
41084113

41094114
if optimizer_type == "Lion".lower():
41104115
try:
@@ -4162,7 +4167,8 @@ def get_optimizer(args, trainable_params):
41624167
"No PagedLion8bit. The version of bitsandbytes installed seems to be old. Please install 0.39.0 or later. / PagedLion8bitが定義されていません。インストールされているbitsandbytesのバージョンが古いようです。0.39.0以上をインストールしてください"
41634168
)
41644169

4165-
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
4170+
if optimizer_class is not None:
4171+
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
41664172

41674173
elif optimizer_type == "PagedAdamW".lower():
41684174
logger.info(f"use PagedAdamW optimizer | {optimizer_kwargs}")
@@ -4338,6 +4344,7 @@ def get_optimizer(args, trainable_params):
43384344
optimizer_class = getattr(optimizer_module, optimizer_type)
43394345
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
43404346

4347+
# for logging
43414348
optimizer_name = optimizer_class.__module__ + "." + optimizer_class.__name__
43424349
optimizer_args = ",".join([f"{k}={v}" for k, v in optimizer_kwargs.items()])
43434350

0 commit comments

Comments
 (0)