@@ -104,6 +104,7 @@ class PeftLoraLoaderMixinTests:
104
104
vae_kwargs = None
105
105
106
106
text_encoder_target_modules = ["q_proj" , "k_proj" , "v_proj" , "out_proj" ]
107
+ denoiser_target_modules = ["to_q" , "to_k" , "to_v" , "to_out.0" ]
107
108
108
109
def get_dummy_components (self , scheduler_cls = None , use_dora = False ):
109
110
if self .unet_kwargs and self .transformer_kwargs :
@@ -157,7 +158,7 @@ def get_dummy_components(self, scheduler_cls=None, use_dora=False):
157
158
denoiser_lora_config = LoraConfig (
158
159
r = rank ,
159
160
lora_alpha = rank ,
160
- target_modules = [ "to_q" , "to_k" , "to_v" , "to_out.0" ] ,
161
+ target_modules = self . denoiser_target_modules ,
161
162
init_lora_weights = False ,
162
163
use_dora = use_dora ,
163
164
)
@@ -2040,7 +2041,7 @@ def test_lora_B_bias(self):
2040
2041
bias_values = {}
2041
2042
denoiser = pipe .unet if self .unet_kwargs is not None else pipe .transformer
2042
2043
for name , module in denoiser .named_modules ():
2043
- if any (k in name for k in [ "to_q" , "to_k" , "to_v" , "to_out.0" ] ):
2044
+ if any (k in name for k in self . denoiser_target_modules ):
2044
2045
if module .bias is not None :
2045
2046
bias_values [name ] = module .bias .data .clone ()
2046
2047
0 commit comments