fix: lora-bias-backup don't reset cache

This commit is contained in:
dongwenpu 2023-09-10 17:53:42 +08:00
parent 924642331b
commit 7d4d871d46

View File

@ -418,6 +418,7 @@ def network_forward(module, input, original_forward):
def network_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
self.network_current_names = ()
self.network_weights_backup = None
self.network_bias_backup = None
def network_Linear_forward(self, input):