Skip to content

Commit

Permalink
fix dylora create_modules error
Browse files Browse the repository at this point in the history
  • Loading branch information
tamlog06 committed Feb 18, 2024
1 parent cd19df4 commit a6f1ed2
Showing 1 changed file with 28 additions and 3 deletions.
31 changes: 28 additions & 3 deletions networks/dylora.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
import math
import os
import random
from typing import List, Tuple, Union
from typing import Dict, List, Optional, Tuple, Type, Union
from diffusers import AutoencoderKL
from transformers import CLIPTextModel
import torch
from torch import nn

Expand Down Expand Up @@ -165,7 +167,15 @@ def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, miss
super()._load_from_state_dict(state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs)


def create_network(multiplier, network_dim, network_alpha, vae, text_encoder, unet, **kwargs):
def create_network(
multiplier: float,
network_dim: Optional[int],
network_alpha: Optional[float],
vae: AutoencoderKL,
text_encoder: Union[CLIPTextModel, List[CLIPTextModel]],
unet,
**kwargs,
):
if network_dim is None:
network_dim = 4 # default
if network_alpha is None:
Expand All @@ -182,6 +192,7 @@ def create_network(multiplier, network_dim, network_alpha, vae, text_encoder, un
conv_alpha = 1.0
else:
conv_alpha = float(conv_alpha)

if unit is not None:
unit = int(unit)
else:
Expand Down Expand Up @@ -306,8 +317,22 @@ def create_modules(is_unet, root_module: torch.nn.Module, target_replace_modules
lora = module_class(lora_name, child_module, self.multiplier, dim, alpha, unit)
loras.append(lora)
return loras

text_encoders = text_encoder if type(text_encoder) == list else [text_encoder]

self.text_encoder_loras = []
for i, text_encoder in enumerate(text_encoders):
if len(text_encoders) > 1:
index = i + 1
print(f"create LoRA for Text Encoder {index}")
else:
index = None
print(f"create LoRA for Text Encoder")

text_encoder_loras = create_modules(False, text_encoder, DyLoRANetwork.TEXT_ENCODER_TARGET_REPLACE_MODULE)
self.text_encoder_loras.extend(text_encoder_loras)

self.text_encoder_loras = create_modules(False, text_encoder, DyLoRANetwork.TEXT_ENCODER_TARGET_REPLACE_MODULE)
# self.text_encoder_loras = create_modules(False, text_encoder, DyLoRANetwork.TEXT_ENCODER_TARGET_REPLACE_MODULE)
print(f"create LoRA for Text Encoder: {len(self.text_encoder_loras)} modules.")

# extend U-Net target modules if conv2d 3x3 is enabled, or load from weights
Expand Down

0 comments on commit a6f1ed2

Please sign in to comment.