From b2ae4490b9c225ff020941bcbf36c8975760deba Mon Sep 17 00:00:00 2001 From: huchenlei Date: Thu, 16 May 2024 14:45:00 -0400 Subject: [PATCH] Fix LoRA bias error --- extensions-builtin/Lora/networks.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index 360455f87..aee4e9d9c 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -379,15 +379,17 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn bias_backup = getattr(self, "network_bias_backup", None) if bias_backup is None and wanted_names != (): - if current_names != (): - raise RuntimeError("no backup bias found and current bias are not unchanged") - if isinstance(self, torch.nn.MultiheadAttention) and self.out_proj.bias is not None: bias_backup = self.out_proj.bias.to(devices.cpu, copy=True) elif getattr(self, 'bias', None) is not None: bias_backup = self.bias.to(devices.cpu, copy=True) else: bias_backup = None + + # Unlike weight which always has value, some modules don't have bias. + # Only report if bias is not None and current bias are not unchanged. + if bias_backup is not None and current_names != (): + raise RuntimeError("no backup bias found and current bias are not unchanged") self.network_bias_backup = bias_backup if current_names != wanted_names: