Skip to content

Commit

Permalink
Initial ZLUDA support.
Browse files Browse the repository at this point in the history
  • Loading branch information
lshqqytiger committed Feb 14, 2024
1 parent a956be0 commit cc44386
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 0 deletions.
3 changes: 3 additions & 0 deletions modules/shared.py
Expand Up @@ -17,6 +17,7 @@
from modules.paths import models_path, script_path, data_path, sd_configs_path, sd_default_config, sd_model_file, default_sd_model_file, extensions_dir, extensions_builtin_dir # pylint: disable=W0611
from modules.dml import memory_providers, default_memory_provider, directml_do_hijack
from modules.onnx_impl import initialize_onnx, execution_providers
from modules.zluda import initialize_zluda
import modules.interrogate
import modules.memmon
import modules.styles
Expand Down Expand Up @@ -922,6 +923,8 @@ def cast_value(self, key, value):
max_workers = 4
if devices.backend == "directml":
directml_do_hijack()
elif devices.backend == "cuda":
initialize_zluda()
initialize_onnx()


Expand Down
8 changes: 8 additions & 0 deletions modules/zluda.py
@@ -0,0 +1,8 @@
import torch
from modules import shared, devices


def initialize_zluda():
if devices.cuda_ok and torch.cuda.get_device_name(devices.get_optimal_device()).endswith("[ZLUDA]"):
torch.backends.cudnn.enabled = False
shared.opts.cross_attention_optimization = "Batch matrix-matrix"

0 comments on commit cc44386

Please sign in to comment.