From dd098c8f86e422c1a541a0dbf930d7219b0f98bc Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Mon, 11 Sep 2023 19:58:26 +0300 Subject: [PATCH] Avoid unnecessary copies when loading LoRAs --- src/diffusers/loaders.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/diffusers/loaders.py b/src/diffusers/loaders.py index 1de899cad927..d3f946529d12 100644 --- a/src/diffusers/loaders.py +++ b/src/diffusers/loaders.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import copy import os import re import warnings @@ -381,8 +380,7 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict # Create another `mapped_network_alphas` dictionary so that we can properly map them. if network_alphas is not None: - network_alphas_ = copy.deepcopy(network_alphas) - for k in network_alphas_: + for k in list(network_alphas.keys()): if k.replace(".alpha", "") in key: mapped_network_alphas.update({attn_processor_key: network_alphas.pop(k)})