Skip to content

Commit

Permalink
remove mandatory build of cuda extensions when using a GPU enabled ma…
Browse files Browse the repository at this point in the history
…chine
  • Loading branch information
OlivierDehaene committed Jan 30, 2023
1 parent 36a507c commit a834e64
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 4 deletions.
3 changes: 1 addition & 2 deletions setup.py
Expand Up @@ -73,12 +73,11 @@
from distutils.core import Command
from pathlib import Path

import torch.cuda
from setuptools import find_packages, setup

from torch.utils.cpp_extension import BuildExtension, CUDAExtension

BUILD_EXTENSIONS = (os.environ.get("BUILD_EXTENSIONS", "False") == "True") | torch.cuda.is_available()
BUILD_EXTENSIONS = os.environ.get("BUILD_EXTENSIONS", "False") == "True"

# Remove stale transformers.egg-info directory to avoid https://github.com/pypa/pip/issues/5466

Expand Down
2 changes: 0 additions & 2 deletions src/transformers/models/bloom/modeling_bloom.py
Expand Up @@ -338,8 +338,6 @@ def forward(
use_cache
)
else:
if torch.cuda.is_available():
raise ValueError("You must build the cuda kernel with: `python setup.py build_ext --inplace`")
context_layer, present, attention_probs = self.compute_attention(
fused_qkv=fused_qkv,
layer_past=layer_past,
Expand Down

0 comments on commit a834e64

Please sign in to comment.