Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: automatically replace unsupported torch device #2514

Merged
merged 1 commit into from
Jan 11, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
26 changes: 21 additions & 5 deletions server/utils.py
Expand Up @@ -497,23 +497,39 @@ def _get_proxies():

# 自动检查torch可用的设备。分布式部署时,不运行LLM的机器上可以不装torch

def is_mps_available():
import torch
return hasattr(torch.backends, "mps") and torch.backends.mps.is_available()

def is_cuda_available():
import torch
return torch.cuda.is_available()

def detect_device() -> Literal["cuda", "mps", "cpu"]:
try:
import torch
if torch.cuda.is_available():
if is_cuda_available():
return "cuda"
if torch.backends.mps.is_available():
if is_mps_available():
return "mps"
except:
pass
return "cpu"


def llm_device(device: str = None) -> Literal["cuda", "mps", "cpu"]:
device = device or LLM_DEVICE

# fallback to available device if specified device is not available
if device == 'cuda' and not is_cuda_available() and is_mps_available():
logging.warning("cuda is not available, fallback to mps")
return "mps"
if device == 'mps' and not is_mps_available() and is_cuda_available():
logging.warning("mps is not available, fallback to cuda")
return "cuda"

# auto detect device if not specified
if device not in ["cuda", "mps", "cpu"]:
device = detect_device()
return detect_device()

return device


Expand Down