Skip to content

Commit

Permalink
delay torch import for inference compatability check (microsoft#2167)
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffra authored Aug 2, 2022
1 parent 1a71e77 commit 776e369
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 3 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/formatting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ jobs:
run: |
which python
python --version
pip install torch==1.9.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
python -c "import torch; print('torch:', torch.__version__, torch)"
- name: Install deepspeed
run: |
Expand Down
8 changes: 7 additions & 1 deletion op_builder/transformer_inference.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import torch
from .builder import CUDAOpBuilder, installed_cuda_version


Expand All @@ -14,6 +13,13 @@ def absolute_name(self):
return f'deepspeed.ops.transformer.inference.{self.NAME}_op'

def is_compatible(self, verbose=True):
try:
import torch
except ImportError:
self.warning(
"Please install torch if trying to pre-compile inference kernels")
return False

cuda_okay = True
if not self.is_rocm_pytorch() and torch.cuda.is_available():
sys_cuda_major, _ = installed_cuda_version()
Expand Down

0 comments on commit 776e369

Please sign in to comment.