Skip to content

Commit 1177d37

Browse files
committed
update
1 parent 3669690 commit 1177d37

File tree

2 files changed

+19
-4
lines changed

2 files changed

+19
-4
lines changed

tests/lora/test_lora_layers_old_backend.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -343,6 +343,21 @@ def test_stable_diffusion_attn_processors(self):
343343
image = sd_pipe(**inputs).images
344344
assert image.shape == (1, 64, 64, 3)
345345

346+
@unittest.skipIf(not torch.cuda.is_available() or not is_xformers_available(), reason="xformers requires cuda")
347+
def test_stable_diffusion_set_xformers_attn_processors(self):
348+
# disable_full_determinism()
349+
device = "cuda" # ensure determinism for the device-dependent torch.Generator
350+
components, _ = self.get_dummy_components()
351+
sd_pipe = StableDiffusionPipeline(**components)
352+
sd_pipe = sd_pipe.to(device)
353+
sd_pipe.set_progress_bar_config(disable=None)
354+
355+
_, _, inputs = self.get_dummy_inputs()
356+
357+
# run normal sd pipe
358+
image = sd_pipe(**inputs).images
359+
assert image.shape == (1, 64, 64, 3)
360+
346361
# run lora xformers attention
347362
attn_processors, _ = create_unet_lora_layers(sd_pipe.unet)
348363
attn_processors = {
@@ -607,7 +622,7 @@ def test_unload_lora_sd(self):
607622
orig_image_slice, orig_image_slice_two, atol=1e-3
608623
), "Unloading LoRA parameters should lead to results similar to what was obtained with the pipeline without any LoRA parameters."
609624

610-
@unittest.skipIf(torch_device != "cuda", "This test is supposed to run on GPU")
625+
@unittest.skipIf(torch_device != "cuda" or not is_xformers_available(), "This test is supposed to run on GPU")
611626
def test_lora_unet_attn_processors_with_xformers(self):
612627
with tempfile.TemporaryDirectory() as tmpdirname:
613628
self.create_lora_weight_file(tmpdirname)
@@ -644,7 +659,7 @@ def test_lora_unet_attn_processors_with_xformers(self):
644659
if isinstance(module, Attention):
645660
self.assertIsInstance(module.processor, XFormersAttnProcessor)
646661

647-
@unittest.skipIf(torch_device != "cuda", "This test is supposed to run on GPU")
662+
@unittest.skipIf(torch_device != "cuda" or not is_xformers_available(), "This test is supposed to run on GPU")
648663
def test_lora_save_load_with_xformers(self):
649664
pipeline_components, lora_components = self.get_dummy_components()
650665
sd_pipe = StableDiffusionPipeline(**pipeline_components)

tests/lora/test_lora_layers_peft.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1864,14 +1864,14 @@ def test_sdxl_1_0_lora_unfusion(self):
18641864
pipe.enable_model_cpu_offload()
18651865

18661866
images = pipe(
1867-
"masterpiece, best quality, mountain", output_type="pil", generator=generator, num_inference_steps=3
1867+
"masterpiece, best quality, mountain", output_type="np", generator=generator, num_inference_steps=3
18681868
).images
18691869
images_with_fusion = images.flatten()
18701870

18711871
pipe.unfuse_lora()
18721872
generator = torch.Generator("cpu").manual_seed(0)
18731873
images = pipe(
1874-
"masterpiece, best quality, mountain", output_type="pil", generator=generator, num_inference_steps=3
1874+
"masterpiece, best quality, mountain", output_type="np", generator=generator, num_inference_steps=3
18751875
).images
18761876
images_without_fusion = images.flatten()
18771877

0 commit comments

Comments
 (0)