-
Notifications
You must be signed in to change notification settings - Fork 8
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
MMqd
committed
Jul 23, 2023
1 parent
84e21d4
commit 92ab7ea
Showing
5 changed files
with
158 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
from modules import errors | ||
try: | ||
from diffusers import IFPipeline | ||
except ImportError as e: | ||
errors.print_error_explanation('RESTART AUTOMATIC1111 COMPLETELY TO FINISH INSTALLING PACKAGES FOR kandinsky-for-automatic1111') | ||
|
||
|
||
import os | ||
import gc | ||
import torch | ||
import numpy as np | ||
from PIL import Image, ImageOps, ImageFilter | ||
from packaging import version | ||
from modules import processing, shared, script_callbacks, images, devices, scripts, masking, sd_models, generation_parameters_copypaste, sd_vae#, sd_samplers | ||
from modules.processing import Processed, StableDiffusionProcessing | ||
from modules.shared import opts, state | ||
from modules.sd_models import CheckpointInfo | ||
from modules.paths_internal import script_path | ||
|
||
import sys | ||
sys.path.append('extensions/kandinsky-for-automatic1111/scripts') | ||
from abstract_model import AbstractModel | ||
#import pdb | ||
|
||
class IFModel(AbstractModel): | ||
pipe = None | ||
|
||
def __init__(self): | ||
self.cache_dir = os.path.join(os.path.join(script_path, 'models'), 'IF') | ||
|
||
def load_encoder(self): | ||
pass | ||
|
||
def run_encoder(self, prior_settings_dict): | ||
if prior_settings_dict.get("negative_prompt", None) is None: | ||
tup = self.pipe.encode_prompt(prompt=prior_settings_dict["prompt"]) | ||
else: | ||
tup = self.pipe.encode_prompt(prompt=prior_settings_dict["prompt"], negative_prompt=prior_settings_dict["negative_prompt"]) | ||
return tup.to_tuple() | ||
|
||
def encoder_to_cpu(self): | ||
pass | ||
|
||
def main_model_to_cpu(self): | ||
pass | ||
|
||
def cleanup_on_error(self): | ||
pass | ||
|
||
def txt2img(self, p, generation_parameters, b): | ||
self.pipe = self.load_pipeline("pipe", IFPipeline, "DeepFloyd/IF-I-XL-v1.0") | ||
result_images = self.pipe(**generation_parameters, num_images_per_prompt=p.batch_size).images | ||
return result_images | ||
|
||
def img2img(self, p, generation_parameters, b): | ||
pass | ||
|
||
def inpaint(self, p, generation_parameters, b): | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
import sys | ||
import torch | ||
import gradio as gr | ||
from modules import processing, shared, script_callbacks, scripts | ||
from modules.processing import Processed | ||
#import pkg_resources | ||
#import pdb | ||
|
||
sys.path.append('extensions/kandinsky-for-automatic1111/scripts') | ||
from deepfloydif import * | ||
|
||
def unload_model(): | ||
if shared.sd_model is None: | ||
shared.sd_model = IFModel() | ||
print("Unloaded Stable Diffusion model") | ||
return | ||
|
||
if not isinstance(shared.sd_model, IFModel): | ||
sd_models.unload_model_weights() | ||
sd_vae.clear_loaded_vae() | ||
devices.torch_gc() | ||
gc.collect() | ||
torch.cuda.empty_cache() | ||
shared.sd_model = IFModel() | ||
|
||
def reload_model(): | ||
if shared.sd_model is None or isinstance(shared.sd_model, IFModel): | ||
shared.sd_model = None | ||
sd_models.reload_model_weights() | ||
devices.torch_gc() | ||
gc.collect() | ||
torch.cuda.empty_cache() | ||
|
||
def unload_if_model(): | ||
if shared.if_model.pipe is not None: | ||
del shared.if_model.pipe | ||
devices.torch_gc() | ||
gc.collect() | ||
torch.cuda.empty_cache() | ||
|
||
if shared.if_model is not None: | ||
del shared.if_model | ||
|
||
print("Unloaded IF model") | ||
|
||
class Script(scripts.Script): | ||
def title(self): | ||
return "IF" | ||
|
||
def ui(self, is_img2img): | ||
gr.Markdown("To save VRAM unload the Stable Diffusion Model") | ||
|
||
unload_sd_model = gr.Button("Unload Stable Diffusion Model") | ||
unload_sd_model.click(unload_model) | ||
reload_sd_model = gr.Button("Reload Stable Diffusion Model") | ||
reload_sd_model.click(reload_model) | ||
|
||
unload_k_model = gr.Button("Unload IF Model") | ||
unload_k_model.click(unload_if_model) | ||
with gr.Row(): | ||
unload_sd_model | ||
reload_sd_model | ||
unload_k_model | ||
|
||
inputs = [] | ||
|
||
return inputs | ||
|
||
def run(self, p) -> Processed: | ||
p.sampler_name = "DDPM" | ||
p.init_image = getattr(p, 'init_images', None) | ||
p.extra_generation_params["Script"] = self.title() | ||
|
||
shared.if_model = getattr(shared, 'if_model', None) | ||
|
||
if shared.if_model is None: | ||
shared.if_model = IFModel() | ||
|
||
return shared.if_model.process_images(p) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters