From db1f7021eb80226693293241f3f07706e7337b09 Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Mon, 4 Mar 2024 08:55:12 +0530 Subject: [PATCH 1/4] throw error when patch inputs and layernorm are provided for transformers2d. --- src/diffusers/models/transformers/transformer_2d.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/diffusers/models/transformers/transformer_2d.py b/src/diffusers/models/transformers/transformer_2d.py index bd632660f46c..6636d5592e08 100644 --- a/src/diffusers/models/transformers/transformer_2d.py +++ b/src/diffusers/models/transformers/transformer_2d.py @@ -100,6 +100,11 @@ def __init__( interpolation_scale: float = None, ): super().__init__() + if patch_size is not None and norm_type == "layer_norm": + raise NotImplementedError( + "Forward pass is not implemented when `patch_size` is not None and `norm_type` is 'layer_norm'." + ) + self.use_linear_projection = use_linear_projection self.num_attention_heads = num_attention_heads self.attention_head_dim = attention_head_dim From 16627e9ac9a04c39a0cac2339f87361b1a14857b Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Tue, 5 Mar 2024 12:09:00 +0530 Subject: [PATCH 2/4] add comment on supported norm_types in transformers2d --- src/diffusers/models/transformers/transformer_2d.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/transformers/transformer_2d.py b/src/diffusers/models/transformers/transformer_2d.py index 6636d5592e08..1a0de198d482 100644 --- a/src/diffusers/models/transformers/transformer_2d.py +++ b/src/diffusers/models/transformers/transformer_2d.py @@ -92,7 +92,7 @@ def __init__( only_cross_attention: bool = False, double_self_attention: bool = False, upcast_attention: bool = False, - norm_type: str = "layer_norm", + norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single', 'layer_norm_i2vgen' norm_elementwise_affine: bool = True, norm_eps: float = 1e-5, attention_type: str = "default", From 5ddcb5b8fb1a2c8b8360ca04362cac4b046ea7af Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Tue, 5 Mar 2024 13:10:19 +0530 Subject: [PATCH 3/4] more check --- src/diffusers/models/attention.py | 2 +- .../models/transformers/transformer_2d.py | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index a4b3ee58a865..8b3f2da66191 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -143,7 +143,7 @@ def __init__( double_self_attention: bool = False, upcast_attention: bool = False, norm_elementwise_affine: bool = True, - norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single', 'layer_norm_i2vgen' + norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single', 'ada_norm_continuous', 'layer_norm_i2vgen' norm_eps: float = 1e-5, final_dropout: bool = False, attention_type: str = "default", diff --git a/src/diffusers/models/transformers/transformer_2d.py b/src/diffusers/models/transformers/transformer_2d.py index 1a0de198d482..35a0595bf030 100644 --- a/src/diffusers/models/transformers/transformer_2d.py +++ b/src/diffusers/models/transformers/transformer_2d.py @@ -92,7 +92,7 @@ def __init__( only_cross_attention: bool = False, double_self_attention: bool = False, upcast_attention: bool = False, - norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single', 'layer_norm_i2vgen' + norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single', 'ada_norm_continuous', 'layer_norm_i2vgen' norm_elementwise_affine: bool = True, norm_eps: float = 1e-5, attention_type: str = "default", @@ -100,10 +100,15 @@ def __init__( interpolation_scale: float = None, ): super().__init__() - if patch_size is not None and norm_type == "layer_norm": - raise NotImplementedError( - "Forward pass is not implemented when `patch_size` is not None and `norm_type` is 'layer_norm'." - ) + if patch_size is not None: + if norm_type not in ["ada_norm", "ada_norm_zero"]: + raise NotImplementedError( + f"Forward pass is not implemented when `patch_size` is not None and `norm_type` is '{norm_type}'." + ) + elif norm_type in ["ada_norm", "ada_norm_zero"] and num_embeds_ada_norm is None: + raise ValueError( + f"When using a `patch_size` and this `norm_type` ({norm_type}), `num_embeds_ada_norm` cannot be None." + ) self.use_linear_projection = use_linear_projection self.num_attention_heads = num_attention_heads From b7b58686dc36f3047b64935a2da283590354668d Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Tue, 5 Mar 2024 18:23:24 +0530 Subject: [PATCH 4/4] fix: norm _type handling --- src/diffusers/models/transformers/transformer_2d.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/transformers/transformer_2d.py b/src/diffusers/models/transformers/transformer_2d.py index 35a0595bf030..8b391eeebfd9 100644 --- a/src/diffusers/models/transformers/transformer_2d.py +++ b/src/diffusers/models/transformers/transformer_2d.py @@ -101,7 +101,7 @@ def __init__( ): super().__init__() if patch_size is not None: - if norm_type not in ["ada_norm", "ada_norm_zero"]: + if norm_type not in ["ada_norm", "ada_norm_zero", "ada_norm_single"]: raise NotImplementedError( f"Forward pass is not implemented when `patch_size` is not None and `norm_type` is '{norm_type}'." )