From 2ee90dabf423c2d3d28a7603f21e73b5e66179ca Mon Sep 17 00:00:00 2001 From: zhuoqun-chen Date: Sun, 7 Jul 2024 22:39:31 -0700 Subject: [PATCH 1/2] Fix static typing and doc typos --- src/diffusers/schedulers/scheduling_ddim.py | 2 +- src/diffusers/schedulers/scheduling_ddpm.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/diffusers/schedulers/scheduling_ddim.py b/src/diffusers/schedulers/scheduling_ddim.py index 104a4a3f3b48..14356eafdaea 100644 --- a/src/diffusers/schedulers/scheduling_ddim.py +++ b/src/diffusers/schedulers/scheduling_ddim.py @@ -377,7 +377,7 @@ def step( Whether or not to return a [`~schedulers.scheduling_ddim.DDIMSchedulerOutput`] or `tuple`. Returns: - [`~schedulers.scheduling_utils.DDIMSchedulerOutput`] or `tuple`: + [`~schedulers.scheduling_ddim.DDIMSchedulerOutput`] or `tuple`: If return_dict is `True`, [`~schedulers.scheduling_ddim.DDIMSchedulerOutput`] is returned, otherwise a tuple is returned where the first element is the sample tensor. diff --git a/src/diffusers/schedulers/scheduling_ddpm.py b/src/diffusers/schedulers/scheduling_ddpm.py index 9af53a61d0a8..81a770edf635 100644 --- a/src/diffusers/schedulers/scheduling_ddpm.py +++ b/src/diffusers/schedulers/scheduling_ddpm.py @@ -194,7 +194,7 @@ def __init__( sample_max_value: float = 1.0, timestep_spacing: str = "leading", steps_offset: int = 0, - rescale_betas_zero_snr: int = False, + rescale_betas_zero_snr: bool = False, ): if trained_betas is not None: self.betas = torch.tensor(trained_betas, dtype=torch.float32) From 9b9e9c9afa18c75f43873982de6c0f8221f505d8 Mon Sep 17 00:00:00 2001 From: zhuoqun-chen Date: Mon, 8 Jul 2024 00:37:20 -0700 Subject: [PATCH 2/2] Fix more same type hint typos with make fix-copies --- src/diffusers/schedulers/scheduling_ddpm_parallel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/schedulers/scheduling_ddpm_parallel.py b/src/diffusers/schedulers/scheduling_ddpm_parallel.py index 94f64425816c..5dfcf3c17a2f 100644 --- a/src/diffusers/schedulers/scheduling_ddpm_parallel.py +++ b/src/diffusers/schedulers/scheduling_ddpm_parallel.py @@ -202,7 +202,7 @@ def __init__( sample_max_value: float = 1.0, timestep_spacing: str = "leading", steps_offset: int = 0, - rescale_betas_zero_snr: int = False, + rescale_betas_zero_snr: bool = False, ): if trained_betas is not None: self.betas = torch.tensor(trained_betas, dtype=torch.float32)