Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 39 additions & 4 deletions src/diffusers/loaders/lora_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import copy
import inspect
import json
import os
from pathlib import Path
from typing import Callable, Dict, List, Optional, Union
Expand All @@ -26,6 +27,7 @@

from ..models.modeling_utils import ModelMixin, load_state_dict
from ..utils import (
SAFETENSORS_FILE_EXTENSION,
USE_PEFT_BACKEND,
_get_model_file,
delete_adapter_layers,
Expand All @@ -44,6 +46,7 @@
from transformers import PreTrainedModel

if is_peft_available():
from peft import LoraConfig
from peft.tuners.tuners_utils import BaseTunerLayer

if is_accelerate_available():
Expand Down Expand Up @@ -252,6 +255,7 @@ def _fetch_state_dict(
from .lora_pipeline import LORA_WEIGHT_NAME, LORA_WEIGHT_NAME_SAFE

model_file = None
metadata = None
if not isinstance(pretrained_model_name_or_path_or_dict, dict):
# Let's first try to load .safetensors weights
if (use_safetensors and weight_name is None) or (
Expand Down Expand Up @@ -280,6 +284,8 @@ def _fetch_state_dict(
user_agent=user_agent,
)
state_dict = safetensors.torch.load_file(model_file, device="cpu")
with safetensors.safe_open(model_file, framework="pt", device="cpu") as f:
metadata = f.metadata()
except (IOError, safetensors.SafetensorError) as e:
if not allow_pickle:
raise e
Expand All @@ -305,10 +311,14 @@ def _fetch_state_dict(
user_agent=user_agent,
)
state_dict = load_state_dict(model_file)
file_extension = os.path.basename(model_file).split(".")[-1]
if file_extension == SAFETENSORS_FILE_EXTENSION:
with safetensors.safe_open(model_file, framework="pt", device="cpu") as f:
metadata = f.metadata()
Comment on lines +315 to +317
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

will we have any way or desire to warn users loading a lora from a pth file that we can't scale it?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We currently error out when trying to save metadata to a pth file (or more generally, when use_safetensors is False).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

good! i never use them. but i was concerned for others.

else:
state_dict = pretrained_model_name_or_path_or_dict

return state_dict
return state_dict, metadata

@classmethod
def _best_guess_weight_name(
Expand Down Expand Up @@ -709,6 +719,20 @@ def pack_weights(layers, prefix):
layers_state_dict = {f"{prefix}.{module_name}": param for module_name, param in layers_weights.items()}
return layers_state_dict

@staticmethod
def pack_metadata(config, prefix):
local_metadata = {}
if config is not None:
if isinstance(config, LoraConfig):
config = config.to_dict()
for key, value in config.items():
if isinstance(value, set):
config[key] = list(value)

config_as_string = json.dumps(config, indent=2, sort_keys=True)
local_metadata[prefix] = config_as_string
return local_metadata

@staticmethod
def write_lora_layers(
state_dict: Dict[str, torch.Tensor],
Expand All @@ -717,18 +741,26 @@ def write_lora_layers(
weight_name: str,
save_function: Callable,
safe_serialization: bool,
metadata=None,
):
from .lora_pipeline import LORA_WEIGHT_NAME, LORA_WEIGHT_NAME_SAFE

if not safe_serialization and isinstance(metadata, dict) and len(metadata) > 0:
raise ValueError("Passing `metadata` is not possible when `safe_serialization` is False.")

if os.path.isfile(save_directory):
logger.error(f"Provided path ({save_directory}) should be a directory, not a file")
return

if save_function is None:
if safe_serialization:

def save_function(weights, filename):
return safetensors.torch.save_file(weights, filename, metadata={"format": "pt"})
def save_function(weights, filename, metadata):
if metadata is None:
metadata = {"format": "pt"}
elif len(metadata) > 0:
metadata.update({"format": "pt"})
return safetensors.torch.save_file(weights, filename, metadata=metadata)

else:
save_function = torch.save
Expand All @@ -742,7 +774,10 @@ def save_function(weights, filename):
weight_name = LORA_WEIGHT_NAME

save_path = Path(save_directory, weight_name).as_posix()
save_function(state_dict, save_path)
if save_function != torch.save:
save_function(state_dict, save_path, metadata)
else:
save_function(state_dict, save_path)
logger.info(f"Model weights saved in {save_path}")

@property
Expand Down
Loading