Skip to content

Commit

Permalink
Organize
Browse files Browse the repository at this point in the history
  • Loading branch information
oobabooga committed Oct 23, 2023
1 parent 49e5eec commit 280ae72
Showing 1 changed file with 9 additions and 8 deletions.
17 changes: 9 additions & 8 deletions modules/LoRA.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,6 @@
from modules.models import reload_model


def merge_loras():
if len(list({shared.model.peft_config[adapter].r for adapter in shared.model.peft_config.keys()})) > 1:
logger.warning("The loaded LoRAs cannot be merged, as they have dissimilar ranks. Only the first one will be active.")
return

shared.model.add_weighted_adapter(shared.lora_names, [1] * len(shared.lora_names), "__merged")
shared.model.set_adapter("__merged")

def add_lora_to_model(lora_names):
if 'GPTQForCausalLM' in shared.model.__class__.__name__ or shared.args.loader == 'AutoGPTQ':
add_lora_autogptq(lora_names)
Expand Down Expand Up @@ -189,3 +181,12 @@ def add_lora_transformers(lora_names):
shared.model = shared.model.to(device)
else:
shared.model = shared.model.cuda()


def merge_loras():
if len(list({shared.model.peft_config[adapter].r for adapter in shared.model.peft_config.keys()})) > 1:
logger.warning("The loaded LoRAs cannot be merged, as they have dissimilar ranks. Only the first one will be active.")
return

shared.model.add_weighted_adapter(shared.lora_names, [1] * len(shared.lora_names), "__merged")
shared.model.set_adapter("__merged")

0 comments on commit 280ae72

Please sign in to comment.