Skip to content

Commit 5255d99

Browse files
authored
[ROCm] Dockerfile fix for flash-attention build (#2885)
1 parent 4f2ad11 commit 5255d99

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

Dockerfile.rocm

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,10 @@ ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/rocm/lib/:/libtorch/lib:
5656
ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/libtorch/include:/libtorch/include/torch/csrc/api/include/:/opt/rocm/include/:
5757

5858
# Install ROCm flash-attention
59-
RUN if [ "$BUILD_FA" == "1" ]; then \
59+
RUN if [ "$BUILD_FA" = "1" ]; then \
6060
mkdir libs \
6161
&& cd libs \
62-
&& git clone https://github.com/ROCmSoftwarePlatform/flash-attention.git \
62+
&& git clone https://github.com/ROCm/flash-attention.git \
6363
&& cd flash-attention \
6464
&& git checkout ${FA_BRANCH} \
6565
&& git submodule update --init \
@@ -83,7 +83,7 @@ RUN if [ "$BASE_IMAGE" = "rocm/pytorch:rocm6.0_ubuntu20.04_py3.9_pytorch_2.1.1"
8383
RUN cd /app \
8484
&& cd vllm \
8585
&& pip install -U -r requirements-rocm.txt \
86-
&& if [ "$BUILD_FA" == "1" ]; then \
86+
&& if [ "$BUILD_FA" = "1" ]; then \
8787
bash patch_xformers.rocm.sh; fi \
8888
&& patch /opt/rocm/include/hip/amd_detail/amd_hip_bf16.h /app/vllm/rocm_patch/rocm_bf16.patch \
8989
&& python3 setup.py install \

0 commit comments

Comments
 (0)