Skip to content

Commit

Permalink
Install CUDA enabled PyTorch for Windows and Linux
Browse files Browse the repository at this point in the history
PyTorch's Windows wheel files on pypi.org are the CPU build version,
it'll throw `AssertionError: Torch not compiled with CUDA enabled`
when spaCy loads trf model. Reinstall CUDA enabled torch version with
command from PyTorch's document: https://pytorch.org/get-started/locally

Linux's wheel files on pypi are CUDA enabled, but torch's doc also use
the `--extra-index-url` option for CUDA 11.6 so it's better reinstall that
as well.
  • Loading branch information
xxyzz committed Dec 3, 2022
1 parent 0b7ba94 commit e7ed60d
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 5 deletions.
17 changes: 13 additions & 4 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
prefs.defaults["choose_format_manually"] = True
prefs.defaults["wiktionary_gloss_lang"] = "en"
prefs.defaults["use_gpu"] = False
prefs.defaults["cuda"] = "cu117"
for data in load_json_or_pickle(get_plugin_path(), "data/languages.json").values():
prefs.defaults[f"{data['wiki']}_wiktionary_difficulty_limit"] = 5

Expand Down Expand Up @@ -106,6 +107,11 @@ def __init__(self):
self.search_people_box.setChecked(prefs["search_people"])
vl.addWidget(self.search_people_box)

form_layout = QFormLayout()
form_layout.setFieldGrowthPolicy(
QFormLayout.FieldGrowthPolicy.ExpandingFieldsGrow
)

if not ismacos:
self.use_gpu_box = QCheckBox(_("Run spaCy with GPU"))
self.use_gpu_box.setToolTip(
Expand All @@ -116,10 +122,12 @@ def __init__(self):
self.use_gpu_box.setChecked(prefs["use_gpu"])
vl.addWidget(self.use_gpu_box)

form_layout = QFormLayout()
form_layout.setFieldGrowthPolicy(
QFormLayout.FieldGrowthPolicy.ExpandingFieldsGrow
)
cuda_versions = {"cu117": "CUDA 11.7", "cu116": "CUDA 11.6"}
self.cuda_version_box = QComboBox()
for cuda_version, text in cuda_versions.items():
self.cuda_version_box.addItem(text, cuda_version)
self.cuda_version_box.setCurrentText(cuda_versions[prefs["cuda"]])
form_layout.addRow(_("CUDA version"), self.cuda_version_box)

model_size_label = QLabel(
_('<a href="https://spacy.io/models/en">spaCy model</a> size')
Expand Down Expand Up @@ -195,6 +203,7 @@ def save_settings(self) -> None:
prefs["minimal_x_ray_count"] = self.minimal_x_ray_count.value()
if not ismacos:
prefs["use_gpu"] = self.use_gpu_box.isChecked()
prefs["cuda"] = self.cuda_version_box.currentData()

def open_kindle_lemmas_dialog(self) -> None:
klld_path = get_klld_path(self.plugin_path)
Expand Down
20 changes: 19 additions & 1 deletion deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,17 @@ def install_deps(model: str, book_fmt: str | None, notif: Any) -> None:
url = f"https://github.com/explosion/spacy-models/releases/download/{model}-{spacy_model_version}/{model}-{spacy_model_version}-py3-none-any.whl"
pip_install(model, spacy_model_version, url=url, notif=notif)
if model.endswith("_trf"):
from .config import prefs

pip_install("cupy-wheel", dep_versions["cupy"], notif=notif)
# PyTorch's Windows package on pypi.org is CPU build version, reintall the CUDA build version
if iswindows or prefs["cuda"] == "cu116":
pip_install(
"torch",
"",
extra_index=f"https://download.pytorch.org/whl/{prefs['cuda']}",
notif=notif,
)

if ismacos and platform.machine() == "arm64":
pip_install("thinc-apple-ops", dep_versions["thinc-apple-ops"], notif=notif)
Expand Down Expand Up @@ -108,7 +118,11 @@ def get_runnable_pip(py_path: str) -> str:


def pip_install(
pkg: str, pkg_version: str, url: str | None = None, notif: Any = None
pkg: str,
pkg_version: str,
url: str | None = None,
extra_index: str | None = None,
notif: Any = None,
) -> None:
pattern = f"{pkg.replace('-', '_')}-{pkg_version}*"
if not any(LIBS_PATH.glob(pattern)):
Expand Down Expand Up @@ -136,6 +150,10 @@ def pip_install(
args.append(f"{pkg}=={pkg_version}")
else:
args.append(pkg)

if extra_index:
args.extend(["--extra-index-url", extra_index])

run_subprocess(args)


Expand Down
4 changes: 4 additions & 0 deletions translations/zh.po
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ msgstr ""
"需要 CUDA。如果 spaCy 具有帶有 ner 組件的書籍語言的 transformer 模型,則在創"
"建 X-Ray 文件時使用 GPU。"

#: config.py:134
msgid "CUDA version"
msgstr "CUDA 版本"

#: config.py:136
msgid "<a href=\"https://spacy.io/models/en\">spaCy model</a> size"
msgstr "<a href=“https://spacy.io/models/en”>spaCy 模型</a>大小"
Expand Down
4 changes: 4 additions & 0 deletions translations/zh_CN.po
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ msgstr ""
"需要 CUDA。如果 spaCy 具有带有 ner 组件的书籍语言的 transformer 模型,则在创"
"建 X-Ray 文件时使用 GPU。"

#: config.py:134
msgid "CUDA version"
msgstr "CUDA 版本"

#: config.py:136
msgid "<a href=\"https://spacy.io/models/en\">spaCy model</a> size"
msgstr "<a href=“https://spacy.io/models/en”>spaCy 模型</a>大小"
Expand Down
1 change: 1 addition & 0 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,3 +175,4 @@ class Prefs(TypedDict):
choose_format_manually: bool
wiktionary_gloss_lang: str
use_cpu: bool
cuda: str

0 comments on commit e7ed60d

Please sign in to comment.