Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
lshqqytiger committed Feb 12, 2024
1 parent c7fc860 commit 835ee20
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ def __init__(
):
optimum.onnxruntime.ORTStableDiffusionXLImg2ImgPipeline.__init__(self, vae_decoder, text_encoder, unet, config, tokenizer, scheduler, feature_extractor, vae_encoder, text_encoder_2, tokenizer_2, use_io_binding, model_save_dir, add_watermarker)
super().__init__()
del self.image_processor # This image processor requires np array. In order to share same workflow with non-XL pipelines, delete it.

def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, generator=None):
batch_size = batch_size * num_images_per_prompt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ def __init__(
):
optimum.onnxruntime.ORTStableDiffusionXLPipeline.__init__(self, vae_decoder, text_encoder, unet, config, tokenizer, scheduler, feature_extractor, vae_encoder, text_encoder_2, tokenizer_2, use_io_binding, model_save_dir, add_watermarker)
super().__init__()
del self.image_processor # This image processor requires np array. In order to share same workflow with non-XL pipelines, delete it.

def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, generator, latents=None):
return prepare_latents(self.scheduler.init_noise_sigma, batch_size, height, width, dtype, generator, latents, num_channels_latents, self.vae_scale_factor)

1 comment on commit 835ee20

@f1am3d
Copy link

@f1am3d f1am3d commented on 835ee20 Feb 16, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you please don't commit such small fixes directly into the master branch without proper testing?

#390

Please sign in to comment.