Skip to content

Commit

Permalink
make tracker init_kwargs configurable
Browse files Browse the repository at this point in the history
  • Loading branch information
ddPn08 committed Jul 11, 2023
1 parent f54b784 commit b841dd7
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 7 deletions.
6 changes: 5 additions & 1 deletion fine_tune.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -275,7 +276,10 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)

if accelerator.is_main_process:
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

for epoch in range(num_train_epochs):
accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}")
Expand Down
6 changes: 6 additions & 0 deletions library/train_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2445,6 +2445,12 @@ def add_training_arguments(parser: argparse.ArgumentParser, support_dreambooth:
default=None,
help="name of tracker to use for logging, default is script-specific default name / ログ出力に使用するtrackerの名前、省略時はスクリプトごとのデフォルト名",
)
parser.add_argument(
"--log_tracker_config",
type=str,
default=None,
help="path to tracker config file to use for logging / ログ出力に使用するtrackerの設定ファイルのパス",
)
parser.add_argument(
"--wandb_api_key",
type=str,
Expand Down
6 changes: 5 additions & 1 deletion sdxl_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -350,7 +351,10 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)

if accelerator.is_main_process:
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

for epoch in range(num_train_epochs):
accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}")
Expand Down
6 changes: 5 additions & 1 deletion train_controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import time
from multiprocessing import Value
from types import SimpleNamespace
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -324,7 +325,10 @@ def train(args):
clip_sample=False,
)
if accelerator.is_main_process:
accelerator.init_trackers("controlnet_train" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("controlnet_train" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

loss_list = []
loss_total = 0.0
Expand Down
6 changes: 5 additions & 1 deletion train_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -248,7 +249,10 @@ def train(args):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)

if accelerator.is_main_process:
accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

loss_list = []
loss_total = 0.0
Expand Down
6 changes: 5 additions & 1 deletion train_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import time
import json
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -672,7 +673,10 @@ def train(self, args):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)

if accelerator.is_main_process:
accelerator.init_trackers("network_train" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("network_train" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

loss_list = []
loss_total = 0.0
Expand Down
6 changes: 5 additions & 1 deletion train_textual_inversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -493,7 +494,10 @@ def train(self, args):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)

if accelerator.is_main_process:
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

# function for saving/removing
def save_model(ckpt_name, embs_list, steps, epoch_no, force_sync_upload=False):
Expand Down
5 changes: 4 additions & 1 deletion train_textual_inversion_XTI.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,10 @@ def train(args):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)

if accelerator.is_main_process:
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

# function for saving/removing
def save_model(ckpt_name, embs, steps, epoch_no, force_sync_upload=False):
Expand Down

0 comments on commit b841dd7

Please sign in to comment.