We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6f1df80 commit a88bb9bCopy full SHA for a88bb9b
vllm/config.py
@@ -353,7 +353,8 @@ def _verify_cache_dtype(self) -> None:
353
elif self.cache_dtype == "fp8":
354
if not is_hip():
355
nvcc_cuda_version = get_nvcc_cuda_version()
356
- if nvcc_cuda_version < Version("11.8"):
+ if nvcc_cuda_version is not None \
357
+ and nvcc_cuda_version < Version("11.8"):
358
raise ValueError(
359
"FP8 is not supported when cuda version is"
360
"lower than 11.8.")
0 commit comments