From ef5dac7786916dd39711edb2b8e90ce96ef78fca Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Mon, 17 Jul 2023 00:01:17 +0300 Subject: [PATCH] fix --- extensions-builtin/Lora/network_hada.py | 3 --- extensions-builtin/Lora/networks.py | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/extensions-builtin/Lora/network_hada.py b/extensions-builtin/Lora/network_hada.py index 15e7ffd8..799bb3bc 100644 --- a/extensions-builtin/Lora/network_hada.py +++ b/extensions-builtin/Lora/network_hada.py @@ -27,9 +27,6 @@ class NetworkModuleHada(network_lyco.NetworkModuleLyco): self.t1 = weights.w.get("hada_t1") self.t2 = weights.w.get("hada_t2") - self.alpha = weights.w["alpha"].item() if "alpha" in weights.w else None - self.scale = weights.w["scale"].item() if "scale" in weights.w else None - def calc_updown(self, orig_weight): w1a = self.w1a.to(orig_weight.device, dtype=orig_weight.dtype) w1b = self.w1b.to(orig_weight.device, dtype=orig_weight.dtype) diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index 5b0ddfb6..90374faa 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -271,6 +271,7 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn updown = torch.nn.functional.pad(updown, (0, 0, 0, 0, 0, 5)) self.weight += updown + continue module_q = net.modules.get(network_layer_name + "_q_proj", None) module_k = net.modules.get(network_layer_name + "_k_proj", None)