Skip to content

Commit

Permalink
Update training info display
Browse files Browse the repository at this point in the history
  • Loading branch information
bmaltais committed Apr 17, 2024
1 parent 8bda4f2 commit 43087f7
Show file tree
Hide file tree
Showing 4 changed files with 56 additions and 25 deletions.
23 changes: 15 additions & 8 deletions kohya_gui/dreambooth_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,8 +530,11 @@ def train_model(
lr_warmup_steps = 0
else:
lr_warmup_steps = 0

max_train_steps_info = f"Max train steps: {max_train_steps}"

if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"
else:
if train_data_dir == "":
log.error("Train data dir is empty")
Expand Down Expand Up @@ -604,19 +607,23 @@ def train_model(
)
max_train_steps_info = f"max_train_steps ({total_steps} / {train_batch_size} / {gradient_accumulation_steps} * {epoch} * {reg_factor}) = {max_train_steps}"
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"
if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"

if lr_warmup != 0:
lr_warmup_steps = round(float(int(lr_warmup) * int(max_train_steps) / 100))
else:
lr_warmup_steps = 0

log.info(f"Total steps: {total_steps}")
log.info(f"Train batch size: {train_batch_size}")
log.info(f"Gradient accumulation steps: {gradient_accumulation_steps}")
log.info(f"Epoch: {epoch}")
log.info(max_train_steps_info)
log.info(f"lr_warmup_steps = {lr_warmup_steps}")

log.info(f"Train batch size: {train_batch_size}")
log.info(f"Gradient accumulation steps: {gradient_accumulation_steps}")
log.info(f"Epoch: {epoch}")
log.info(max_train_steps_info)
log.info(f"lr_warmup_steps = {lr_warmup_steps}")

run_cmd = [fr'"{get_executable_path("accelerate")}"', "launch"]

Expand Down
12 changes: 11 additions & 1 deletion kohya_gui/finetune_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,6 +559,11 @@ def train_model(
log.info(
"Dataset config toml file used, skipping caption json file, image buckets, total_steps, train_batch_size, gradient_accumulation_steps, epoch, reg_factor, max_train_steps creation..."
)

if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"
else:
# create caption json file
if generate_caption_database:
Expand Down Expand Up @@ -675,8 +680,13 @@ def train_model(
# Divide by two because flip augmentation create two copied of the source images
if flip_aug and max_train_steps:
max_train_steps = int(math.ceil(float(max_train_steps) / 2))

if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"

log.info(f"max_train_steps = {max_train_steps}")
log.info(max_train_steps_info)

if max_train_steps != 0:
lr_warmup_steps = round(float(int(lr_warmup) * int(max_train_steps) / 100))
Expand Down
23 changes: 15 additions & 8 deletions kohya_gui/lora_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -777,7 +777,10 @@ def train_model(
stop_text_encoder_training = 0
lr_warmup_steps = 0

max_train_steps_info = f"Max train steps: {max_train_steps}"
if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"

else:
if train_data_dir == "":
Expand Down Expand Up @@ -851,7 +854,10 @@ def train_model(
)
max_train_steps_info = f"max_train_steps ({total_steps} / {train_batch_size} / {gradient_accumulation_steps} * {epoch} * {reg_factor}) = {max_train_steps}"
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"
if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"

# calculate stop encoder training
if stop_text_encoder_training_pct == 0:
Expand All @@ -867,12 +873,13 @@ def train_model(
lr_warmup_steps = 0

log.info(f"Total steps: {total_steps}")
log.info(f"Train batch size: {train_batch_size}")
log.info(f"Gradient accumulation steps: {gradient_accumulation_steps}")
log.info(f"Epoch: {epoch}")
log.info(max_train_steps_info)
log.info(f"stop_text_encoder_training = {stop_text_encoder_training}")
log.info(f"lr_warmup_steps = {lr_warmup_steps}")

log.info(f"Train batch size: {train_batch_size}")
log.info(f"Gradient accumulation steps: {gradient_accumulation_steps}")
log.info(f"Epoch: {epoch}")
log.info(max_train_steps_info)
log.info(f"stop_text_encoder_training = {stop_text_encoder_training}")
log.info(f"lr_warmup_steps = {lr_warmup_steps}")

run_cmd = [rf'"{get_executable_path("accelerate")}"', "launch"]

Expand Down
23 changes: 15 additions & 8 deletions kohya_gui/textual_inversion_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,10 @@ def train_model(
stop_text_encoder_training = 0
lr_warmup_steps = 0

max_train_steps_info = f"Max train steps: {max_train_steps}"
if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"

else:
if train_data_dir == "":
Expand Down Expand Up @@ -619,7 +622,10 @@ def train_model(
)
max_train_steps_info = f"max_train_steps ({total_steps} / {train_batch_size} / {gradient_accumulation_steps} * {epoch} * {reg_factor}) = {max_train_steps}"
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"
if max_train_steps == 0:
max_train_steps_info = f"Max train steps: 0. sd-scripts will therefore default to 1600. Please specify a different value if required."
else:
max_train_steps_info = f"Max train steps: {max_train_steps}"

# calculate stop encoder training
if stop_text_encoder_training_pct == 0:
Expand All @@ -635,12 +641,13 @@ def train_model(
lr_warmup_steps = 0

log.info(f"Total steps: {total_steps}")
log.info(f"Train batch size: {train_batch_size}")
log.info(f"Gradient accumulation steps: {gradient_accumulation_steps}")
log.info(f"Epoch: {epoch}")
log.info(max_train_steps_info)
log.info(f"stop_text_encoder_training = {stop_text_encoder_training}")
log.info(f"lr_warmup_steps = {lr_warmup_steps}")

log.info(f"Train batch size: {train_batch_size}")
log.info(f"Gradient accumulation steps: {gradient_accumulation_steps}")
log.info(f"Epoch: {epoch}")
log.info(max_train_steps_info)
log.info(f"stop_text_encoder_training = {stop_text_encoder_training}")
log.info(f"lr_warmup_steps = {lr_warmup_steps}")

run_cmd = [fr'"{get_executable_path("accelerate")}"', "launch"]

Expand Down

0 comments on commit 43087f7

Please sign in to comment.