Created
April 26, 2024 05:25
-
-
Save j40903272/77d27a83a7f41a01b39f132ca8b4d41b to your computer and use it in GitHub Desktop.
# Install vLLM with CUDA 11.8.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
conda create -n vllm python=3.10 | |
conda activate vllm | |
export VLLM_VERSION=0.4.0 | |
export PYTHON_VERSION=310 | |
pip install https://github.com/vllm-project/vllm/releases/download/v${VLLM_VERSION}/vllm-${VLLM_VERSION}+cu118-cp${PYTHON_VERSION}-cp${PYTHON_VERSION}-manylinux1_x86_64.whl | |
# Re-install PyTorch with CUDA 11.8. | |
pip uninstall torch -y | |
pip install torch==2.1.2+cu118 --index-url https://download.pytorch.org/whl/cu118 | |
# Re-install xFormers with CUDA 11.8. | |
pip uninstall xformers -y | |
pip install xformers==0.0.23.post1+cu118 --index-url https://download.pytorch.org/whl/cu118 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment