From e44fe8a717253501beef8a88fec4165009c60419 Mon Sep 17 00:00:00 2001 From: James Xu Date: Thu, 5 Dec 2024 19:20:57 -0500 Subject: [PATCH] Use flashinfer nightly --- scripts/ci_install_dependency.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/ci_install_dependency.sh b/scripts/ci_install_dependency.sh index 787cc8b952..95127048d9 100755 --- a/scripts/ci_install_dependency.sh +++ b/scripts/ci_install_dependency.sh @@ -1,16 +1,17 @@ # Install the dependency in CI. # Use repo from environment variable, passed from GitHub Actions -FLASHINFER_REPO="${FLASHINFER_REPO:-https://flashinfer.ai/whl/cu121/torch2.4}" +# FLASHINFER_REPO="${FLASHINFER_REPO:-https://flashinfer.ai/whl/cu121/torch2.4}" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" bash "${SCRIPT_DIR}/killall_sglang.sh" pip install --upgrade pip -pip install -e "python[all]" --find-links https://flashinfer.ai/whl/cu121/torch2.4/flashinfer/ +# pip install -e "python[all]" --find-links https://flashinfer.ai/whl/cu121/torch2.4/flashinfer/ # Force reinstall flashinfer -pip install flashinfer -i ${FLASHINFER_REPO} --force-reinstall +# pip install flashinfer -i ${FLASHINFER_REPO} --force-reinstall +pip install https://github.com/flashinfer-ai/flashinfer-nightly/releases/download/0.1.6%2B6819a0f/flashinfer-0.1.6+6819a0f.cu121torch2.4-cp310-cp310-linux_x86_64.whl --force-reinstall pip install transformers==4.45.2 sentence_transformers accelerate peft