diff --git a/Dockerfile.rocm b/Dockerfile.rocm index e0ef4a0f4131a..54ae06be6e101 100644 --- a/Dockerfile.rocm +++ b/Dockerfile.rocm @@ -56,10 +56,10 @@ ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/rocm/lib/:/libtorch/lib: ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/libtorch/include:/libtorch/include/torch/csrc/api/include/:/opt/rocm/include/: # Install ROCm flash-attention -RUN if [ "$BUILD_FA" == "1" ]; then \ +RUN if [ "$BUILD_FA" = "1" ]; then \ mkdir libs \ && cd libs \ - && git clone https://github.com/ROCmSoftwarePlatform/flash-attention.git \ + && git clone https://github.com/ROCm/flash-attention.git \ && cd flash-attention \ && git checkout ${FA_BRANCH} \ && git submodule update --init \ @@ -83,7 +83,7 @@ RUN if [ "$BASE_IMAGE" = "rocm/pytorch:rocm6.0_ubuntu20.04_py3.9_pytorch_2.1.1" RUN cd /app \ && cd vllm \ && pip install -U -r requirements-rocm.txt \ - && if [ "$BUILD_FA" == "1" ]; then \ + && if [ "$BUILD_FA" = "1" ]; then \ bash patch_xformers.rocm.sh; fi \ && patch /opt/rocm/include/hip/amd_detail/amd_hip_bf16.h /app/vllm/rocm_patch/rocm_bf16.patch \ && python3 setup.py install \