mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2024-06-07 21:20:49 +00:00
Fix VRAM Issue by only loading in hypernetwork when selected in settings
This commit is contained in:
parent
e00b4df7c6
commit
122d42687b
@ -40,27 +40,34 @@ class Hypernetwork:
|
|||||||
self.layers[size] = (HypernetworkModule(size, sd[0]), HypernetworkModule(size, sd[1]))
|
self.layers[size] = (HypernetworkModule(size, sd[0]), HypernetworkModule(size, sd[1]))
|
||||||
|
|
||||||
|
|
||||||
def load_hypernetworks(path):
|
def list_hypernetworks(path):
|
||||||
res = {}
|
res = {}
|
||||||
|
|
||||||
for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
|
for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
|
||||||
try:
|
name = os.path.splitext(os.path.basename(filename))[0]
|
||||||
hn = Hypernetwork(filename)
|
res[name] = filename
|
||||||
res[hn.name] = hn
|
|
||||||
except Exception:
|
|
||||||
print(f"Error loading hypernetwork {filename}", file=sys.stderr)
|
|
||||||
print(traceback.format_exc(), file=sys.stderr)
|
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def load_hypernetwork(filename):
|
||||||
|
print(f"Loading hypernetwork {filename}")
|
||||||
|
path = shared.hypernetworks.get(filename, None)
|
||||||
|
if (path is not None):
|
||||||
|
try:
|
||||||
|
shared.loaded_hypernetwork = Hypernetwork(path)
|
||||||
|
except Exception:
|
||||||
|
print(f"Error loading hypernetwork {path}", file=sys.stderr)
|
||||||
|
print(traceback.format_exc(), file=sys.stderr)
|
||||||
|
else:
|
||||||
|
shared.loaded_hypernetwork = None
|
||||||
|
|
||||||
|
|
||||||
def attention_CrossAttention_forward(self, x, context=None, mask=None):
|
def attention_CrossAttention_forward(self, x, context=None, mask=None):
|
||||||
h = self.heads
|
h = self.heads
|
||||||
|
|
||||||
q = self.to_q(x)
|
q = self.to_q(x)
|
||||||
context = default(context, x)
|
context = default(context, x)
|
||||||
|
|
||||||
hypernetwork = shared.selected_hypernetwork()
|
hypernetwork = shared.loaded_hypernetwork
|
||||||
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
||||||
|
|
||||||
if hypernetwork_layers is not None:
|
if hypernetwork_layers is not None:
|
||||||
|
@ -28,7 +28,7 @@ def split_cross_attention_forward_v1(self, x, context=None, mask=None):
|
|||||||
q_in = self.to_q(x)
|
q_in = self.to_q(x)
|
||||||
context = default(context, x)
|
context = default(context, x)
|
||||||
|
|
||||||
hypernetwork = shared.selected_hypernetwork()
|
hypernetwork = shared.loaded_hypernetwork
|
||||||
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
||||||
|
|
||||||
if hypernetwork_layers is not None:
|
if hypernetwork_layers is not None:
|
||||||
@ -68,7 +68,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None):
|
|||||||
q_in = self.to_q(x)
|
q_in = self.to_q(x)
|
||||||
context = default(context, x)
|
context = default(context, x)
|
||||||
|
|
||||||
hypernetwork = shared.selected_hypernetwork()
|
hypernetwork = shared.loaded_hypernetwork
|
||||||
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
||||||
|
|
||||||
if hypernetwork_layers is not None:
|
if hypernetwork_layers is not None:
|
||||||
@ -132,7 +132,7 @@ def xformers_attention_forward(self, x, context=None, mask=None):
|
|||||||
h = self.heads
|
h = self.heads
|
||||||
q_in = self.to_q(x)
|
q_in = self.to_q(x)
|
||||||
context = default(context, x)
|
context = default(context, x)
|
||||||
hypernetwork = shared.selected_hypernetwork()
|
hypernetwork = shared.loaded_hypernetwork
|
||||||
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
|
||||||
if hypernetwork_layers is not None:
|
if hypernetwork_layers is not None:
|
||||||
k_in = self.to_k(hypernetwork_layers[0](context))
|
k_in = self.to_k(hypernetwork_layers[0](context))
|
||||||
|
@ -79,11 +79,8 @@ parallel_processing_allowed = not cmd_opts.lowvram and not cmd_opts.medvram
|
|||||||
xformers_available = False
|
xformers_available = False
|
||||||
config_filename = cmd_opts.ui_settings_file
|
config_filename = cmd_opts.ui_settings_file
|
||||||
|
|
||||||
hypernetworks = hypernetwork.load_hypernetworks(os.path.join(models_path, 'hypernetworks'))
|
hypernetworks = hypernetwork.list_hypernetworks(os.path.join(models_path, 'hypernetworks'))
|
||||||
|
loaded_hypernetwork = None
|
||||||
|
|
||||||
def selected_hypernetwork():
|
|
||||||
return hypernetworks.get(opts.sd_hypernetwork, None)
|
|
||||||
|
|
||||||
|
|
||||||
class State:
|
class State:
|
||||||
|
3
webui.py
3
webui.py
@ -82,6 +82,9 @@ modules.scripts.load_scripts(os.path.join(script_path, "scripts"))
|
|||||||
shared.sd_model = modules.sd_models.load_model()
|
shared.sd_model = modules.sd_models.load_model()
|
||||||
shared.opts.onchange("sd_model_checkpoint", wrap_queued_call(lambda: modules.sd_models.reload_model_weights(shared.sd_model)))
|
shared.opts.onchange("sd_model_checkpoint", wrap_queued_call(lambda: modules.sd_models.reload_model_weights(shared.sd_model)))
|
||||||
|
|
||||||
|
loaded_hypernetwork = modules.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork)
|
||||||
|
shared.opts.onchange("sd_hypernetwork", wrap_queued_call(lambda: modules.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork)))
|
||||||
|
|
||||||
|
|
||||||
def webui():
|
def webui():
|
||||||
# make the program just exit at ctrl+c without waiting for anything
|
# make the program just exit at ctrl+c without waiting for anything
|
||||||
|
Loading…
Reference in New Issue
Block a user