From 9d7390d2d19a8baf04ee4ebe598b96ac6ba7f97e Mon Sep 17 00:00:00 2001 From: camenduru <54370274+camenduru@users.noreply.github.com> Date: Mon, 27 Mar 2023 04:28:40 +0300 Subject: [PATCH] convert to python v3.9 --- extensions-builtin/Lora/lora.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index edd95f786..79d11e0e7 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -2,6 +2,7 @@ import glob import os import re import torch +from typing import Union from modules import shared, devices, sd_models, errors @@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target): return updown -def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention): +def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]): """ Applies the currently selected set of Loras to the weights of torch layer self. If weights already have this particular set of loras applied, does nothing.