Skip to content

Commit

Permalink
fix(api): make version-safe imports compatible with tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ssube committed Dec 30, 2023
1 parent 05ab396 commit 6a00481
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 11 deletions.
3 changes: 1 addition & 2 deletions api/onnx_web/diffusers/patches/vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@
import numpy as np
import torch
from diffusers import OnnxRuntimeModel
from diffusers.models.autoencoder_kl import AutoencoderKLOutput
from diffusers.models.vae import DecoderOutput
from diffusers.pipelines.onnx_utils import ORT_TO_NP_TYPE

from ...server import ServerContext
from ..version_safe_diffusers import AutoencoderKLOutput, DecoderOutput

logger = getLogger(__name__)

Expand Down
20 changes: 11 additions & 9 deletions api/onnx_web/diffusers/version_safe_diffusers.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
import diffusers
from diffusers import * # NOQA
from packaging import version

is_diffusers_0_15 = version.parse(
version.parse(diffusers.__version__).base_version
) >= version.parse("0.15")


try:
from diffusers import DEISMultistepScheduler
Expand All @@ -27,8 +20,17 @@
except ImportError:
from ..diffusers.stub_scheduler import StubScheduler as UniPCMultistepScheduler

try:
from diffusers.models.modeling_outputs import AutoencoderKLOutput
except ImportError:
from diffusers.models.autoencoder_kl import AutoencoderKLOutput

if is_diffusers_0_15:
try:
from diffusers.models.autoencoders.vae import DecoderOutput
except ImportError:
from diffusers.models.vae import DecoderOutput

try:
from diffusers.models.attention_processor import AttnProcessor
else:
except ImportError:
from diffusers.models.cross_attention import CrossAttnProcessor as AttnProcessor

0 comments on commit 6a00481

Please sign in to comment.