forked from AUTOMATIC1111/stable-diffusion-webui
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
update readme to reflect some recent changes
- Loading branch information
1 parent
cbfb463
commit 855b9e3
Showing
9 changed files
with
314 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
from modules import extra_networks | ||
import lora | ||
|
||
class ExtraNetworkLora(extra_networks.ExtraNetwork): | ||
def __init__(self): | ||
super().__init__('lora') | ||
|
||
def activate(self, p, params_list): | ||
names = [] | ||
multipliers = [] | ||
for params in params_list: | ||
assert len(params.items) > 0 | ||
|
||
names.append(params.items[0]) | ||
multipliers.append(float(params.items[1]) if len(params.items) > 1 else 1.0) | ||
|
||
lora.load_loras(names, multipliers) | ||
|
||
def deactivate(self, p): | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,198 @@ | ||
import glob | ||
import os | ||
import re | ||
import torch | ||
|
||
from modules import shared, devices, sd_models | ||
|
||
re_digits = re.compile(r"\d+") | ||
re_unet_down_blocks = re.compile(r"lora_unet_down_blocks_(\d+)_attentions_(\d+)_(.+)") | ||
re_unet_mid_blocks = re.compile(r"lora_unet_mid_block_attentions_(\d+)_(.+)") | ||
re_unet_up_blocks = re.compile(r"lora_unet_up_blocks_(\d+)_attentions_(\d+)_(.+)") | ||
re_text_block = re.compile(r"lora_te_text_model_encoder_layers_(\d+)_(.+)") | ||
|
||
|
||
def convert_diffusers_name_to_compvis(key): | ||
def match(match_list, regex): | ||
r = re.match(regex, key) | ||
if not r: | ||
return False | ||
|
||
match_list.clear() | ||
match_list.extend([int(x) if re.match(re_digits, x) else x for x in r.groups()]) | ||
return True | ||
|
||
m = [] | ||
|
||
if match(m, re_unet_down_blocks): | ||
return f"diffusion_model_input_blocks_{1 + m[0] * 3 + m[1]}_1_{m[2]}" | ||
|
||
if match(m, re_unet_mid_blocks): | ||
return f"diffusion_model_middle_block_1_{m[1]}" | ||
|
||
if match(m, re_unet_up_blocks): | ||
return f"diffusion_model_output_blocks_{m[0] * 3 + m[1]}_1_{m[2]}" | ||
|
||
if match(m, re_text_block): | ||
return f"transformer_text_model_encoder_layers_{m[0]}_{m[1]}" | ||
|
||
return key | ||
|
||
|
||
class LoraOnDisk: | ||
def __init__(self, name, filename): | ||
self.name = name | ||
self.filename = filename | ||
|
||
|
||
class LoraModule: | ||
def __init__(self, name): | ||
self.name = name | ||
self.multiplier = 1.0 | ||
self.modules = {} | ||
self.mtime = None | ||
|
||
|
||
class LoraUpDownModule: | ||
def __init__(self): | ||
self.up = None | ||
self.down = None | ||
|
||
|
||
def assign_lora_names_to_compvis_modules(sd_model): | ||
lora_layer_mapping = {} | ||
|
||
for name, module in shared.sd_model.cond_stage_model.wrapped.named_modules(): | ||
lora_name = name.replace(".", "_") | ||
lora_layer_mapping[lora_name] = module | ||
module.lora_layer_name = lora_name | ||
|
||
for name, module in shared.sd_model.model.named_modules(): | ||
lora_name = name.replace(".", "_") | ||
lora_layer_mapping[lora_name] = module | ||
module.lora_layer_name = lora_name | ||
|
||
sd_model.lora_layer_mapping = lora_layer_mapping | ||
|
||
|
||
def load_lora(name, filename): | ||
lora = LoraModule(name) | ||
lora.mtime = os.path.getmtime(filename) | ||
|
||
sd = sd_models.read_state_dict(filename) | ||
|
||
keys_failed_to_match = [] | ||
|
||
for key_diffusers, weight in sd.items(): | ||
fullkey = convert_diffusers_name_to_compvis(key_diffusers) | ||
key, lora_key = fullkey.split(".", 1) | ||
|
||
sd_module = shared.sd_model.lora_layer_mapping.get(key, None) | ||
if sd_module is None: | ||
keys_failed_to_match.append(key_diffusers) | ||
continue | ||
|
||
if type(sd_module) == torch.nn.Linear: | ||
module = torch.nn.Linear(weight.shape[1], weight.shape[0], bias=False) | ||
elif type(sd_module) == torch.nn.Conv2d: | ||
module = torch.nn.Conv2d(weight.shape[1], weight.shape[0], (1, 1), bias=False) | ||
else: | ||
assert False, f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}' | ||
|
||
with torch.no_grad(): | ||
module.weight.copy_(weight) | ||
|
||
module.to(device=devices.device, dtype=devices.dtype) | ||
|
||
lora_module = lora.modules.get(key, None) | ||
if lora_module is None: | ||
lora_module = LoraUpDownModule() | ||
lora.modules[key] = lora_module | ||
|
||
if lora_key == "lora_up.weight": | ||
lora_module.up = module | ||
elif lora_key == "lora_down.weight": | ||
lora_module.down = module | ||
else: | ||
assert False, f'Bad Lora layer name: {key_diffusers} - must end in lora_up.weight or lora_down.weight' | ||
|
||
if len(keys_failed_to_match) > 0: | ||
print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}") | ||
|
||
return lora | ||
|
||
|
||
def load_loras(names, multipliers=None): | ||
already_loaded = {} | ||
|
||
for lora in loaded_loras: | ||
if lora.name in names: | ||
already_loaded[lora.name] = lora | ||
|
||
loaded_loras.clear() | ||
|
||
loras_on_disk = [available_loras.get(name, None) for name in names] | ||
if any([x is None for x in loras_on_disk]): | ||
list_available_loras() | ||
|
||
loras_on_disk = [available_loras.get(name, None) for name in names] | ||
|
||
for i, name in enumerate(names): | ||
lora = already_loaded.get(name, None) | ||
|
||
lora_on_disk = loras_on_disk[i] | ||
if lora_on_disk is not None: | ||
if lora is None or os.path.getmtime(lora_on_disk.filename) > lora.mtime: | ||
lora = load_lora(name, lora_on_disk.filename) | ||
|
||
if lora is None: | ||
print(f"Couldn't find Lora with name {name}") | ||
continue | ||
|
||
lora.multiplier = multipliers[i] if multipliers else 1.0 | ||
loaded_loras.append(lora) | ||
|
||
|
||
def lora_forward(module, input, res): | ||
if len(loaded_loras) == 0: | ||
return res | ||
|
||
lora_layer_name = getattr(module, 'lora_layer_name', None) | ||
for lora in loaded_loras: | ||
module = lora.modules.get(lora_layer_name, None) | ||
if module is not None: | ||
res = res + module.up(module.down(input)) * lora.multiplier | ||
|
||
return res | ||
|
||
|
||
def lora_Linear_forward(self, input): | ||
return lora_forward(self, input, torch.nn.Linear_forward_before_lora(self, input)) | ||
|
||
|
||
def lora_Conv2d_forward(self, input): | ||
return lora_forward(self, input, torch.nn.Conv2d_forward_before_lora(self, input)) | ||
|
||
|
||
def list_available_loras(): | ||
available_loras.clear() | ||
|
||
os.makedirs(lora_dir, exist_ok=True) | ||
|
||
candidates = glob.glob(os.path.join(lora_dir, '**/*.pt'), recursive=True) + glob.glob(os.path.join(lora_dir, '**/*.safetensors'), recursive=True) | ||
|
||
for filename in sorted(candidates): | ||
if os.path.isdir(filename): | ||
continue | ||
|
||
name = os.path.splitext(os.path.basename(filename))[0] | ||
|
||
available_loras[name] = LoraOnDisk(name, filename) | ||
|
||
|
||
lora_dir = os.path.join(shared.models_path, "Lora") | ||
available_loras = {} | ||
loaded_loras = [] | ||
|
||
list_available_loras() | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import torch | ||
|
||
import lora | ||
import extra_networks_lora | ||
import ui_extra_networks_lora | ||
from modules import script_callbacks, ui_extra_networks, extra_networks | ||
|
||
|
||
def unload(): | ||
torch.nn.Linear.forward = torch.nn.Linear_forward_before_lora | ||
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora | ||
|
||
|
||
def before_ui(): | ||
ui_extra_networks.register_page(ui_extra_networks_lora.ExtraNetworksPageLora()) | ||
extra_networks.register_extra_network(extra_networks_lora.ExtraNetworkLora()) | ||
|
||
|
||
if not hasattr(torch.nn, 'Linear_forward_before_lora'): | ||
torch.nn.Linear_forward_before_lora = torch.nn.Linear.forward | ||
|
||
if not hasattr(torch.nn, 'Conv2d_forward_before_lora'): | ||
torch.nn.Conv2d_forward_before_lora = torch.nn.Conv2d.forward | ||
|
||
torch.nn.Linear.forward = lora.lora_Linear_forward | ||
torch.nn.Conv2d.forward = lora.lora_Conv2d_forward | ||
|
||
script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules) | ||
script_callbacks.on_script_unloaded(unload) | ||
script_callbacks.on_before_ui(before_ui) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
import os | ||
import lora | ||
|
||
from modules import shared, ui_extra_networks | ||
|
||
|
||
class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage): | ||
def __init__(self): | ||
super().__init__('Lora') | ||
|
||
def refresh(self): | ||
lora.list_available_loras() | ||
|
||
def list_items(self): | ||
for name, lora_on_disk in lora.available_loras.items(): | ||
path, ext = os.path.splitext(lora_on_disk.filename) | ||
previews = [path + ".png", path + ".preview.png"] | ||
|
||
preview = None | ||
for file in previews: | ||
if os.path.isfile(file): | ||
preview = "./file=" + file.replace('\\', '/') + "?mtime=" + str(os.path.getmtime(file)) | ||
break | ||
|
||
yield { | ||
"name": name, | ||
"filename": path, | ||
"preview": preview, | ||
"prompt": f"<lora:{name}:1.0>", | ||
"local_preview": path + ".png", | ||
} | ||
|
||
def allowed_directories_for_previews(self): | ||
return [lora.lora_dir] | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters