From 32ea17d408dc5bb3e6670749a5144c851343a947 Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Thu, 20 Jun 2024 16:38:36 +0530 Subject: [PATCH 1/3] remove resume_download --- examples/community/checkpoint_merger.py | 3 --- examples/community/ip_adapter_face_id.py | 2 -- examples/community/stable_diffusion_tensorrt_img2img.py | 2 -- examples/community/stable_diffusion_tensorrt_inpaint.py | 2 -- examples/community/stable_diffusion_tensorrt_txt2img.py | 2 -- src/diffusers/configuration_utils.py | 2 -- src/diffusers/loaders/autoencoder.py | 2 -- src/diffusers/loaders/controlnet.py | 2 -- src/diffusers/loaders/ip_adapter.py | 2 -- src/diffusers/loaders/lora.py | 6 ------ src/diffusers/loaders/single_file.py | 5 ----- src/diffusers/loaders/single_file_model.py | 2 -- src/diffusers/loaders/single_file_utils.py | 1 - src/diffusers/loaders/textual_inversion.py | 3 --- src/diffusers/loaders/unet.py | 3 --- src/diffusers/models/model_loading_utils.py | 1 - src/diffusers/models/modeling_flax_utils.py | 3 --- src/diffusers/models/modeling_utils.py | 9 --------- src/diffusers/pipelines/auto_pipeline.py | 6 ------ src/diffusers/pipelines/pipeline_flax_utils.py | 3 --- src/diffusers/pipelines/pipeline_utils.py | 8 -------- src/diffusers/utils/dynamic_modules_utils.py | 3 --- src/diffusers/utils/hub_utils.py | 3 --- 23 files changed, 75 deletions(-) diff --git a/examples/community/checkpoint_merger.py b/examples/community/checkpoint_merger.py index f702bf0cea9b..0121f45c2efc 100644 --- a/examples/community/checkpoint_merger.py +++ b/examples/community/checkpoint_merger.py @@ -86,7 +86,6 @@ def merge(self, pretrained_model_name_or_path_list: List[Union[str, os.PathLike] """ # Default kwargs from DiffusionPipeline cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", False) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", False) @@ -124,7 +123,6 @@ def merge(self, pretrained_model_name_or_path_list: List[Union[str, os.PathLike] config_dict = DiffusionPipeline.load_config( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, force_download=force_download, proxies=proxies, local_files_only=local_files_only, @@ -160,7 +158,6 @@ def merge(self, pretrained_model_name_or_path_list: List[Union[str, os.PathLike] else snapshot_download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/examples/community/ip_adapter_face_id.py b/examples/community/ip_adapter_face_id.py index befb48c7391e..c8e39ae08d2f 100644 --- a/examples/community/ip_adapter_face_id.py +++ b/examples/community/ip_adapter_face_id.py @@ -267,7 +267,6 @@ def __init__( def load_ip_adapter_face_id(self, pretrained_model_name_or_path_or_dict, weight_name, **kwargs): cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -283,7 +282,6 @@ def load_ip_adapter_face_id(self, pretrained_model_name_or_path_or_dict, weight_ weights_name=weight_name, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/examples/community/stable_diffusion_tensorrt_img2img.py b/examples/community/stable_diffusion_tensorrt_img2img.py index 7264a60506fe..16a8b803cc29 100755 --- a/examples/community/stable_diffusion_tensorrt_img2img.py +++ b/examples/community/stable_diffusion_tensorrt_img2img.py @@ -783,7 +783,6 @@ def __loadModels(self): @validate_hf_hub_args def set_cached_folder(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs): cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", False) token = kwargs.pop("token", None) @@ -795,7 +794,6 @@ def set_cached_folder(cls, pretrained_model_name_or_path: Optional[Union[str, os else snapshot_download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/examples/community/stable_diffusion_tensorrt_inpaint.py b/examples/community/stable_diffusion_tensorrt_inpaint.py index b2d61a3dab93..8bacd050571a 100755 --- a/examples/community/stable_diffusion_tensorrt_inpaint.py +++ b/examples/community/stable_diffusion_tensorrt_inpaint.py @@ -783,7 +783,6 @@ def __loadModels(self): @validate_hf_hub_args def set_cached_folder(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs): cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", False) token = kwargs.pop("token", None) @@ -795,7 +794,6 @@ def set_cached_folder(cls, pretrained_model_name_or_path: Optional[Union[str, os else snapshot_download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/examples/community/stable_diffusion_tensorrt_txt2img.py b/examples/community/stable_diffusion_tensorrt_txt2img.py index 1fcfafadb4f7..6072a357bc5d 100755 --- a/examples/community/stable_diffusion_tensorrt_txt2img.py +++ b/examples/community/stable_diffusion_tensorrt_txt2img.py @@ -695,7 +695,6 @@ def __loadModels(self): @validate_hf_hub_args def set_cached_folder(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs): cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", False) token = kwargs.pop("token", None) @@ -707,7 +706,6 @@ def set_cached_folder(cls, pretrained_model_name_or_path: Optional[Union[str, os else snapshot_download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/configuration_utils.py b/src/diffusers/configuration_utils.py index be74ae061909..460e9d9e8c71 100644 --- a/src/diffusers/configuration_utils.py +++ b/src/diffusers/configuration_utils.py @@ -343,7 +343,6 @@ def load_config( local_dir = kwargs.pop("local_dir", None) local_dir_use_symlinks = kwargs.pop("local_dir_use_symlinks", "auto") force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) local_files_only = kwargs.pop("local_files_only", False) @@ -386,7 +385,6 @@ def load_config( cache_dir=cache_dir, force_download=force_download, proxies=proxies, - resume_download=resume_download, local_files_only=local_files_only, token=token, user_agent=user_agent, diff --git a/src/diffusers/loaders/autoencoder.py b/src/diffusers/loaders/autoencoder.py index 36b022a26ec9..be9124f6222f 100644 --- a/src/diffusers/loaders/autoencoder.py +++ b/src/diffusers/loaders/autoencoder.py @@ -99,7 +99,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): original_config_file = kwargs.pop("original_config_file", None) config_file = kwargs.pop("config_file", None) - resume_download = kwargs.pop("resume_download", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) @@ -120,7 +119,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): pretrained_model_link_or_path=pretrained_model_link_or_path, class_name=class_name, original_config_file=original_config_file, - resume_download=resume_download, force_download=force_download, proxies=proxies, token=token, diff --git a/src/diffusers/loaders/controlnet.py b/src/diffusers/loaders/controlnet.py index 53b9802d390e..bd4539176f18 100644 --- a/src/diffusers/loaders/controlnet.py +++ b/src/diffusers/loaders/controlnet.py @@ -89,7 +89,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): """ original_config_file = kwargs.pop("original_config_file", None) config_file = kwargs.pop("config_file", None) - resume_download = kwargs.pop("resume_download", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) @@ -109,7 +108,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): pretrained_model_link_or_path=pretrained_model_link_or_path, class_name=class_name, original_config_file=original_config_file, - resume_download=resume_download, force_download=force_download, proxies=proxies, token=token, diff --git a/src/diffusers/loaders/ip_adapter.py b/src/diffusers/loaders/ip_adapter.py index ef6a53e43196..72aa2a08f1cb 100644 --- a/src/diffusers/loaders/ip_adapter.py +++ b/src/diffusers/loaders/ip_adapter.py @@ -135,7 +135,6 @@ def load_ip_adapter( # Load the main state dict first. cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -171,7 +170,6 @@ def load_ip_adapter( weights_name=weight_name, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/loaders/lora.py b/src/diffusers/loaders/lora.py index 8b42f412add1..b1e59723d327 100644 --- a/src/diffusers/loaders/lora.py +++ b/src/diffusers/loaders/lora.py @@ -193,7 +193,6 @@ def lora_state_dict( # UNet and text encoder or both. cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -234,7 +233,6 @@ def lora_state_dict( weights_name=weight_name or LORA_WEIGHT_NAME_SAFE, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -260,7 +258,6 @@ def lora_state_dict( weights_name=weight_name or LORA_WEIGHT_NAME, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -1450,7 +1447,6 @@ def lora_state_dict( # UNet and text encoder or both. cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -1481,7 +1477,6 @@ def lora_state_dict( weights_name=weight_name or LORA_WEIGHT_NAME_SAFE, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -1503,7 +1498,6 @@ def lora_state_dict( weights_name=weight_name or LORA_WEIGHT_NAME, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index d7bf67288c0a..8f406236cea0 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -253,7 +253,6 @@ def _download_diffusers_model_config_from_hub( revision=revision, proxies=proxies, force_download=force_download, - resume_download=resume_download, local_files_only=local_files_only, token=token, allow_patterns=allow_patterns, @@ -352,7 +351,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): deprecate("original_config_file", "1.0.0", deprecation_message) original_config = original_config_file - resume_download = kwargs.pop("resume_download", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) @@ -382,7 +380,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): checkpoint = load_single_file_checkpoint( pretrained_model_link_or_path, - resume_download=resume_download, force_download=force_download, proxies=proxies, token=token, @@ -412,7 +409,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): revision=revision, proxies=proxies, force_download=force_download, - resume_download=resume_download, local_files_only=local_files_only, token=token, ) @@ -435,7 +431,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): revision=revision, proxies=proxies, force_download=force_download, - resume_download=resume_download, local_files_only=False, token=token, ) diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index f537a3f44917..cf9fa2d18294 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -171,7 +171,6 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = "`from_single_file` cannot accept both `config` and `original_config` arguments. Please provide only one of these arguments" ) - resume_download = kwargs.pop("resume_download", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) @@ -186,7 +185,6 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = else: checkpoint = load_single_file_checkpoint( pretrained_model_link_or_path_or_dict, - resume_download=resume_download, force_download=force_download, proxies=proxies, token=token, diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index ff076c82b00b..ad272d57d07a 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -324,7 +324,6 @@ def load_single_file_checkpoint( weights_name=weights_name, force_download=force_download, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/loaders/textual_inversion.py b/src/diffusers/loaders/textual_inversion.py index b6e1545e16dd..53bf30749283 100644 --- a/src/diffusers/loaders/textual_inversion.py +++ b/src/diffusers/loaders/textual_inversion.py @@ -38,7 +38,6 @@ def load_textual_inversion_state_dicts(pretrained_model_name_or_paths, **kwargs): cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -72,7 +71,6 @@ def load_textual_inversion_state_dicts(pretrained_model_name_or_paths, **kwargs) weights_name=weight_name or TEXT_INVERSION_NAME_SAFE, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -93,7 +91,6 @@ def load_textual_inversion_state_dicts(pretrained_model_name_or_paths, **kwargs) weights_name=weight_name or TEXT_INVERSION_NAME, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/loaders/unet.py b/src/diffusers/loaders/unet.py index b02ff5a5895f..3746226c42de 100644 --- a/src/diffusers/loaders/unet.py +++ b/src/diffusers/loaders/unet.py @@ -140,7 +140,6 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict """ cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -174,7 +173,6 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict weights_name=weight_name or LORA_WEIGHT_NAME_SAFE, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -194,7 +192,6 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict weights_name=weight_name or LORA_WEIGHT_NAME, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index 5604879f40ab..ef75061a27fd 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -216,7 +216,6 @@ def _fetch_index_file( weights_name=index_file_in_repo, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/models/modeling_flax_utils.py b/src/diffusers/models/modeling_flax_utils.py index 151281070faa..85a7c64958b1 100644 --- a/src/diffusers/models/modeling_flax_utils.py +++ b/src/diffusers/models/modeling_flax_utils.py @@ -296,7 +296,6 @@ def from_pretrained( cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) from_pt = kwargs.pop("from_pt", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", False) token = kwargs.pop("token", None) @@ -316,7 +315,6 @@ def from_pretrained( cache_dir=cache_dir, return_unused_kwargs=True, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -362,7 +360,6 @@ def from_pretrained( cache_dir=cache_dir, force_download=force_download, proxies=proxies, - resume_download=resume_download, local_files_only=local_files_only, token=token, user_agent=user_agent, diff --git a/src/diffusers/models/modeling_utils.py b/src/diffusers/models/modeling_utils.py index ab98d4cea5a4..b60491afaa71 100644 --- a/src/diffusers/models/modeling_utils.py +++ b/src/diffusers/models/modeling_utils.py @@ -518,7 +518,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P ignore_mismatched_sizes = kwargs.pop("ignore_mismatched_sizes", False) force_download = kwargs.pop("force_download", False) from_flax = kwargs.pop("from_flax", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) output_loading_info = kwargs.pop("output_loading_info", False) local_files_only = kwargs.pop("local_files_only", None) @@ -619,7 +618,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P return_unused_kwargs=True, return_commit_hash=True, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -641,7 +639,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P cache_dir=cache_dir, variant=variant, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -663,7 +660,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P weights_name=FLAX_WEIGHTS_NAME, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -685,7 +681,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P index_file, cache_dir=cache_dir, proxies=proxies, - resume_download=resume_download, local_files_only=local_files_only, token=token, user_agent=user_agent, @@ -700,7 +695,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P weights_name=_add_variant(SAFETENSORS_WEIGHTS_NAME, variant), cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -724,7 +718,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P weights_name=_add_variant(WEIGHTS_NAME, variant), cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -1177,7 +1170,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) token = kwargs.pop("token", None) @@ -1200,7 +1192,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P return_unused_kwargs=True, return_commit_hash=True, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/pipelines/auto_pipeline.py b/src/diffusers/pipelines/auto_pipeline.py index f5bc16ed9072..ff18143a2715 100644 --- a/src/diffusers/pipelines/auto_pipeline.py +++ b/src/diffusers/pipelines/auto_pipeline.py @@ -319,7 +319,6 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): """ cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) local_files_only = kwargs.pop("local_files_only", False) @@ -328,7 +327,6 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): load_config_kwargs = { "cache_dir": cache_dir, "force_download": force_download, - "resume_download": resume_download, "proxies": proxies, "token": token, "local_files_only": local_files_only, @@ -592,7 +590,6 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): """ cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) local_files_only = kwargs.pop("local_files_only", False) @@ -601,7 +598,6 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): load_config_kwargs = { "cache_dir": cache_dir, "force_download": force_download, - "resume_download": resume_download, "proxies": proxies, "token": token, "local_files_only": local_files_only, @@ -868,7 +864,6 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): """ cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) token = kwargs.pop("token", None) local_files_only = kwargs.pop("local_files_only", False) @@ -877,7 +872,6 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): load_config_kwargs = { "cache_dir": cache_dir, "force_download": force_download, - "resume_download": resume_download, "proxies": proxies, "token": token, "local_files_only": local_files_only, diff --git a/src/diffusers/pipelines/pipeline_flax_utils.py b/src/diffusers/pipelines/pipeline_flax_utils.py index 7534149b559a..4d27fcd370dc 100644 --- a/src/diffusers/pipelines/pipeline_flax_utils.py +++ b/src/diffusers/pipelines/pipeline_flax_utils.py @@ -316,7 +316,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P ``` """ cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", None) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", False) token = kwargs.pop("token", None) @@ -332,7 +331,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P config_dict = cls.load_config( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -363,7 +361,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P cached_folder = snapshot_download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, diff --git a/src/diffusers/pipelines/pipeline_utils.py b/src/diffusers/pipelines/pipeline_utils.py index e5f822caa0ef..047bf9479bfd 100644 --- a/src/diffusers/pipelines/pipeline_utils.py +++ b/src/diffusers/pipelines/pipeline_utils.py @@ -625,7 +625,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P ``` """ cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) @@ -702,7 +701,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P cached_folder = cls.download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, force_download=force_download, proxies=proxies, local_files_only=local_files_only, @@ -842,7 +840,6 @@ def load_module(name, value): torch_dtype=torch_dtype, cached_folder=cached_folder, force_download=force_download, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -910,7 +907,6 @@ def load_module(name, value): connected_pipes = {prefix: getattr(modelcard.data, prefix, [None])[0] for prefix in CONNECTED_PIPES_KEYS} load_kwargs = { "cache_dir": cache_dir, - "resume_download": resume_download, "force_download": force_download, "proxies": proxies, "local_files_only": local_files_only, @@ -1271,7 +1267,6 @@ def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]: """ cache_dir = kwargs.pop("cache_dir", None) - resume_download = kwargs.pop("resume_download", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) local_files_only = kwargs.pop("local_files_only", None) @@ -1311,7 +1306,6 @@ def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]: revision=revision, proxies=proxies, force_download=force_download, - resume_download=resume_download, token=token, ) @@ -1500,7 +1494,6 @@ def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]: cached_folder = snapshot_download( pretrained_model_name, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, @@ -1523,7 +1516,6 @@ def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]: for connected_pipe_repo_id in connected_pipes: download_kwargs = { "cache_dir": cache_dir, - "resume_download": resume_download, "force_download": force_download, "proxies": proxies, "local_files_only": local_files_only, diff --git a/src/diffusers/utils/dynamic_modules_utils.py b/src/diffusers/utils/dynamic_modules_utils.py index 733579b8c09c..c03802c305d8 100644 --- a/src/diffusers/utils/dynamic_modules_utils.py +++ b/src/diffusers/utils/dynamic_modules_utils.py @@ -309,7 +309,6 @@ def get_cached_module_file( cache_dir=cache_dir, force_download=force_download, proxies=proxies, - resume_download=resume_download, local_files_only=local_files_only, token=token, ) @@ -366,7 +365,6 @@ def get_cached_module_file( f"{module_needed}.py", cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, token=token, revision=revision, @@ -458,7 +456,6 @@ def get_class_from_dynamic_module( module_file, cache_dir=cache_dir, force_download=force_download, - resume_download=resume_download, proxies=proxies, token=token, revision=revision, diff --git a/src/diffusers/utils/hub_utils.py b/src/diffusers/utils/hub_utils.py index d0253ff474d9..7a4caf1908cb 100644 --- a/src/diffusers/utils/hub_utils.py +++ b/src/diffusers/utils/hub_utils.py @@ -324,7 +324,6 @@ def _get_model_file( cache_dir=cache_dir, force_download=force_download, proxies=proxies, - resume_download=resume_download, local_files_only=local_files_only, token=token, user_agent=user_agent, @@ -349,7 +348,6 @@ def _get_model_file( cache_dir=cache_dir, force_download=force_download, proxies=proxies, - resume_download=resume_download, local_files_only=local_files_only, token=token, user_agent=user_agent, @@ -472,7 +470,6 @@ def _get_checkpoint_shard_files( cached_folder = snapshot_download( pretrained_model_name_or_path, cache_dir=cache_dir, - resume_download=resume_download, proxies=proxies, local_files_only=local_files_only, token=token, From 5019329a24b7276bffc2cbc9346365b55ef1e53a Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Thu, 20 Jun 2024 17:46:40 +0530 Subject: [PATCH 2/3] fix: _fetch_index_file call. --- src/diffusers/models/model_loading_utils.py | 1 - src/diffusers/models/modeling_utils.py | 3 --- 2 files changed, 4 deletions(-) diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index ef75061a27fd..6a7bfc084036 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -191,7 +191,6 @@ def _fetch_index_file( cache_dir, variant, force_download, - resume_download, proxies, local_files_only, token, diff --git a/src/diffusers/models/modeling_utils.py b/src/diffusers/models/modeling_utils.py index b60491afaa71..45c244b10585 100644 --- a/src/diffusers/models/modeling_utils.py +++ b/src/diffusers/models/modeling_utils.py @@ -434,9 +434,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. From 66b27e7f1d5b48e06de9ceeed6e9ad1b9c652025 Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Wed, 26 Jun 2024 22:30:25 +0530 Subject: [PATCH 3/3] remove resume_download from docs. --- examples/community/checkpoint_merger.py | 2 +- src/diffusers/configuration_utils.py | 3 --- src/diffusers/loaders/autoencoder.py | 4 +--- src/diffusers/loaders/controlnet.py | 4 +--- src/diffusers/loaders/ip_adapter.py | 4 +--- src/diffusers/loaders/lora.py | 8 ++------ src/diffusers/loaders/single_file.py | 5 +---- src/diffusers/loaders/single_file_model.py | 4 +--- src/diffusers/loaders/single_file_utils.py | 1 - src/diffusers/loaders/textual_inversion.py | 4 +--- src/diffusers/loaders/unet.py | 4 +--- src/diffusers/models/modeling_flax_utils.py | 4 +--- src/diffusers/pipelines/auto_pipeline.py | 12 +++--------- src/diffusers/pipelines/pipeline_flax_utils.py | 4 +--- src/diffusers/pipelines/pipeline_loading_utils.py | 3 --- src/diffusers/pipelines/pipeline_utils.py | 8 ++------ src/diffusers/schedulers/scheduling_utils.py | 4 +--- src/diffusers/schedulers/scheduling_utils_flax.py | 4 +--- src/diffusers/utils/dynamic_modules_utils.py | 9 +-------- src/diffusers/utils/hub_utils.py | 2 -- 20 files changed, 20 insertions(+), 73 deletions(-) diff --git a/examples/community/checkpoint_merger.py b/examples/community/checkpoint_merger.py index 0121f45c2efc..6ba4b8c6e837 100644 --- a/examples/community/checkpoint_merger.py +++ b/examples/community/checkpoint_merger.py @@ -71,7 +71,7 @@ def merge(self, pretrained_model_name_or_path_list: List[Union[str, os.PathLike] **kwargs: Supports all the default DiffusionPipeline.get_config_dict kwargs viz.. - cache_dir, resume_download, force_download, proxies, local_files_only, token, revision, torch_dtype, device_map. + cache_dir, force_download, proxies, local_files_only, token, revision, torch_dtype, device_map. alpha - The interpolation parameter. Ranges from 0 to 1. It affects the ratio in which the checkpoints are merged. A 0.8 alpha would mean that the first model checkpoints would affect the final result far less than an alpha of 0.2 diff --git a/src/diffusers/configuration_utils.py b/src/diffusers/configuration_utils.py index 460e9d9e8c71..8561a6801032 100644 --- a/src/diffusers/configuration_utils.py +++ b/src/diffusers/configuration_utils.py @@ -310,9 +310,6 @@ def load_config( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/autoencoder.py b/src/diffusers/loaders/autoencoder.py index be9124f6222f..04f87cdd036f 100644 --- a/src/diffusers/loaders/autoencoder.py +++ b/src/diffusers/loaders/autoencoder.py @@ -50,9 +50,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/controlnet.py b/src/diffusers/loaders/controlnet.py index bd4539176f18..b17b9e6280a4 100644 --- a/src/diffusers/loaders/controlnet.py +++ b/src/diffusers/loaders/controlnet.py @@ -50,9 +50,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/ip_adapter.py b/src/diffusers/loaders/ip_adapter.py index 72aa2a08f1cb..ce9ed23caa94 100644 --- a/src/diffusers/loaders/ip_adapter.py +++ b/src/diffusers/loaders/ip_adapter.py @@ -90,9 +90,7 @@ def load_ip_adapter( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/lora.py b/src/diffusers/loaders/lora.py index 75c163f66baa..cdf6cd56e367 100644 --- a/src/diffusers/loaders/lora.py +++ b/src/diffusers/loaders/lora.py @@ -170,9 +170,7 @@ def lora_state_dict( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. @@ -1425,9 +1423,7 @@ def lora_state_dict( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download (`bool`, *optional*, defaults to `False`): - Whether or not to resume downloading the model weights and configuration files. If set to `False`, any - incompletely downloaded files are deleted. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 8f406236cea0..c0e5b9e4cfb3 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -242,7 +242,6 @@ def _download_diffusers_model_config_from_hub( revision, proxies, force_download=None, - resume_download=None, local_files_only=None, token=None, ): @@ -287,9 +286,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index cf9fa2d18294..824171eadc18 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -121,9 +121,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download (`bool`, *optional*, defaults to `False`): - Whether or not to resume downloading the model weights and configuration files. If set to `False`, any - incompletely downloaded files are deleted. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index ad272d57d07a..b3e8e77c25e1 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -306,7 +306,6 @@ def _is_model_weights_in_cached_folder(cached_folder, name): def load_single_file_checkpoint( pretrained_model_link_or_path, - resume_download=False, force_download=False, proxies=None, token=None, diff --git a/src/diffusers/loaders/textual_inversion.py b/src/diffusers/loaders/textual_inversion.py index 53bf30749283..574b89233cc1 100644 --- a/src/diffusers/loaders/textual_inversion.py +++ b/src/diffusers/loaders/textual_inversion.py @@ -305,9 +305,7 @@ def load_textual_inversion( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/loaders/unet.py b/src/diffusers/loaders/unet.py index 3746226c42de..56770832d0ca 100644 --- a/src/diffusers/loaders/unet.py +++ b/src/diffusers/loaders/unet.py @@ -97,9 +97,7 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/models/modeling_flax_utils.py b/src/diffusers/models/modeling_flax_utils.py index 85a7c64958b1..8c35fab0fc16 100644 --- a/src/diffusers/models/modeling_flax_utils.py +++ b/src/diffusers/models/modeling_flax_utils.py @@ -245,9 +245,7 @@ def from_pretrained( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/pipelines/auto_pipeline.py b/src/diffusers/pipelines/auto_pipeline.py index ff18143a2715..697123909be4 100644 --- a/src/diffusers/pipelines/auto_pipeline.py +++ b/src/diffusers/pipelines/auto_pipeline.py @@ -242,9 +242,7 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. @@ -513,9 +511,7 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. @@ -787,9 +783,7 @@ def from_pretrained(cls, pretrained_model_or_path, **kwargs): cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/pipelines/pipeline_flax_utils.py b/src/diffusers/pipelines/pipeline_flax_utils.py index 4d27fcd370dc..12078e69fe59 100644 --- a/src/diffusers/pipelines/pipeline_flax_utils.py +++ b/src/diffusers/pipelines/pipeline_flax_utils.py @@ -254,9 +254,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/pipelines/pipeline_loading_utils.py b/src/diffusers/pipelines/pipeline_loading_utils.py index 0043bec65d79..a0af28803d79 100644 --- a/src/diffusers/pipelines/pipeline_loading_utils.py +++ b/src/diffusers/pipelines/pipeline_loading_utils.py @@ -435,7 +435,6 @@ def _load_empty_model( return_unused_kwargs=True, return_commit_hash=True, force_download=kwargs.pop("force_download", False), - resume_download=kwargs.pop("resume_download", None), proxies=kwargs.pop("proxies", None), local_files_only=kwargs.pop("local_files_only", False), token=kwargs.pop("token", None), @@ -454,7 +453,6 @@ def _load_empty_model( cached_folder, subfolder=name, force_download=kwargs.pop("force_download", False), - resume_download=kwargs.pop("resume_download", None), proxies=kwargs.pop("proxies", None), local_files_only=kwargs.pop("local_files_only", False), token=kwargs.pop("token", None), @@ -544,7 +542,6 @@ def _get_final_device_map(device_map, pipeline_class, passed_class_obj, init_dic torch_dtype=torch_dtype, cached_folder=kwargs.get("cached_folder", None), force_download=kwargs.get("force_download", None), - resume_download=kwargs.get("resume_download", None), proxies=kwargs.get("proxies", None), local_files_only=kwargs.get("local_files_only", None), token=kwargs.get("token", None), diff --git a/src/diffusers/pipelines/pipeline_utils.py b/src/diffusers/pipelines/pipeline_utils.py index 047bf9479bfd..2cc9defc3ffa 100644 --- a/src/diffusers/pipelines/pipeline_utils.py +++ b/src/diffusers/pipelines/pipeline_utils.py @@ -533,9 +533,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P cache_dir (`Union[str, os.PathLike]`, *optional*): Path to a directory where a downloaded pretrained model configuration is cached if the standard cache is not used. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. @@ -1212,9 +1210,7 @@ def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]: force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/schedulers/scheduling_utils.py b/src/diffusers/schedulers/scheduling_utils.py index 33d34e26d89d..f20224b19009 100644 --- a/src/diffusers/schedulers/scheduling_utils.py +++ b/src/diffusers/schedulers/scheduling_utils.py @@ -121,9 +121,7 @@ def from_pretrained( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/schedulers/scheduling_utils_flax.py b/src/diffusers/schedulers/scheduling_utils_flax.py index 360ca4705e02..ae11baf9ea1b 100644 --- a/src/diffusers/schedulers/scheduling_utils_flax.py +++ b/src/diffusers/schedulers/scheduling_utils_flax.py @@ -102,9 +102,7 @@ def from_pretrained( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. diff --git a/src/diffusers/utils/dynamic_modules_utils.py b/src/diffusers/utils/dynamic_modules_utils.py index c03802c305d8..f0cf953924ad 100644 --- a/src/diffusers/utils/dynamic_modules_utils.py +++ b/src/diffusers/utils/dynamic_modules_utils.py @@ -199,7 +199,6 @@ def get_cached_module_file( module_file: str, cache_dir: Optional[Union[str, os.PathLike]] = None, force_download: bool = False, - resume_download: Optional[bool] = None, proxies: Optional[Dict[str, str]] = None, token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, @@ -226,9 +225,7 @@ def get_cached_module_file( cache should not be used. force_download (`bool`, *optional*, defaults to `False`): Whether or not to force to (re-)download the configuration files and override the cached versions if they - exist. resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 - of Diffusers. + exist. proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. @@ -380,7 +377,6 @@ def get_class_from_dynamic_module( class_name: Optional[str] = None, cache_dir: Optional[Union[str, os.PathLike]] = None, force_download: bool = False, - resume_download: Optional[bool] = None, proxies: Optional[Dict[str, str]] = None, token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, @@ -417,9 +413,6 @@ def get_class_from_dynamic_module( force_download (`bool`, *optional*, defaults to `False`): Whether or not to force to (re-)download the configuration files and override the cached versions if they exist. - resume_download: - Deprecated and ignored. All downloads are now resumed by default when possible. Will be removed in v1 of - Diffusers. proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. diff --git a/src/diffusers/utils/hub_utils.py b/src/diffusers/utils/hub_utils.py index 7a4caf1908cb..c4a8a5d72d99 100644 --- a/src/diffusers/utils/hub_utils.py +++ b/src/diffusers/utils/hub_utils.py @@ -286,7 +286,6 @@ def _get_model_file( cache_dir: Optional[str] = None, force_download: bool = False, proxies: Optional[Dict] = None, - resume_download: Optional[bool] = None, local_files_only: bool = False, token: Optional[str] = None, user_agent: Optional[Union[Dict, str]] = None, @@ -415,7 +414,6 @@ def _get_checkpoint_shard_files( index_filename, cache_dir=None, proxies=None, - resume_download=False, local_files_only=False, token=None, user_agent=None,