Skip to content

Commit 4aa169a

Browse files
mgoinm-misiura
authored andcommitted
[CI] Add SM120 to the Dockerfile (vllm-project#19794)
Signed-off-by: mgoin <mgoin64@gmail.com>
1 parent cc01461 commit 4aa169a

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

docker/Dockerfile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ RUN --mount=type=cache,target=/root/.cache/uv \
7777
# can be useful for both `dev` and `test`
7878
# explicitly set the list to avoid issues with torch 2.2
7979
# see https://github.com/pytorch/pytorch/pull/123243
80-
ARG torch_cuda_arch_list='7.0 7.5 8.0 8.9 9.0 10.0+PTX'
80+
ARG torch_cuda_arch_list='7.0 7.5 8.0 8.9 9.0 10.0 12.0'
8181
ENV TORCH_CUDA_ARCH_LIST=${torch_cuda_arch_list}
8282
# Override the arch list for flash-attn to reduce the binary size
8383
ARG vllm_fa_cmake_gpu_arches='80-real;90-real'
@@ -244,7 +244,7 @@ RUN --mount=type=bind,from=build,src=/workspace/dist,target=/vllm-workspace/dist
244244

245245
# If we need to build FlashInfer wheel before its release:
246246
# $ # Note we remove 7.0 from the arch list compared to the list below, since FlashInfer only supports sm75+
247-
# $ export TORCH_CUDA_ARCH_LIST='7.5 8.0 8.9 9.0a 10.0a'
247+
# $ export TORCH_CUDA_ARCH_LIST='7.5 8.0 8.9 9.0a 10.0a 12.0'
248248
# $ git clone https://github.com/flashinfer-ai/flashinfer.git --recursive
249249
# $ cd flashinfer
250250
# $ git checkout v0.2.6.post1
@@ -261,7 +261,7 @@ if [ "$TARGETPLATFORM" != "linux/arm64" ]; then \
261261
if [[ "$CUDA_VERSION" == 12.8* ]]; then \
262262
uv pip install --system https://download.pytorch.org/whl/cu128/flashinfer/flashinfer_python-0.2.6.post1%2Bcu128torch2.7-cp39-abi3-linux_x86_64.whl; \
263263
else \
264-
export TORCH_CUDA_ARCH_LIST='7.5 8.0 8.9 9.0a 10.0a' && \
264+
export TORCH_CUDA_ARCH_LIST='7.5 8.0 8.9 9.0a 10.0a 12.0' && \
265265
git clone https://github.com/flashinfer-ai/flashinfer.git --single-branch --branch v0.2.6.post1 --recursive && \
266266
# Needed to build AOT kernels
267267
(cd flashinfer && \

0 commit comments

Comments
 (0)