mirror of
https://github.com/RVC-Boss/GPT-SoVITS.git
synced 2025-04-05 12:38:35 +08:00
更新对amd显卡的支持 (#2076)
* Added the instruction for AMD GPU in English * Added the instruction for AMD GPU in Chinese * Update install.sh, now it will check wether user are using cuda or rocm * 恢复原来的readme,已经更新了install.sh * 恢复中文readme * 将n卡的判断条件由nvcc改成nvidia-smi
This commit is contained in:
parent
c0ce55a132
commit
b65ea9181e
59
install.sh
59
install.sh
@ -2,8 +2,13 @@
|
|||||||
|
|
||||||
# 安装构建工具
|
# 安装构建工具
|
||||||
# Install build tools
|
# Install build tools
|
||||||
|
echo "Installing GCC..."
|
||||||
conda install -c conda-forge gcc=14
|
conda install -c conda-forge gcc=14
|
||||||
|
|
||||||
|
echo "Installing G++..."
|
||||||
conda install -c conda-forge gxx
|
conda install -c conda-forge gxx
|
||||||
|
|
||||||
|
echo "Installing ffmpeg and cmake..."
|
||||||
conda install ffmpeg cmake
|
conda install ffmpeg cmake
|
||||||
|
|
||||||
# 设置编译环境
|
# 设置编译环境
|
||||||
@ -12,10 +17,60 @@ export CMAKE_MAKE_PROGRAM="$CONDA_PREFIX/bin/cmake"
|
|||||||
export CC="$CONDA_PREFIX/bin/gcc"
|
export CC="$CONDA_PREFIX/bin/gcc"
|
||||||
export CXX="$CONDA_PREFIX/bin/g++"
|
export CXX="$CONDA_PREFIX/bin/g++"
|
||||||
|
|
||||||
conda install pytorch==2.1.1 torchvision==0.16.1 torchaudio==2.1.1 pytorch-cuda=11.8 -c pytorch -c nvidia
|
echo "Checking for CUDA installation..."
|
||||||
|
if command -v nvidia-smi &> /dev/null; then
|
||||||
|
USE_CUDA=true
|
||||||
|
echo "CUDA found."
|
||||||
|
else
|
||||||
|
echo "CUDA not found."
|
||||||
|
USE_CUDA=false
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
if [ "$USE_CUDA" = false ]; then
|
||||||
|
echo "Checking for ROCm installation..."
|
||||||
|
if [ -d "/opt/rocm" ]; then
|
||||||
|
USE_ROCM=true
|
||||||
|
echo "ROCm found."
|
||||||
|
if grep -qi "microsoft" /proc/version; then
|
||||||
|
echo "You are running WSL."
|
||||||
|
IS_WSL=true
|
||||||
|
else
|
||||||
|
echo "You are NOT running WSL."
|
||||||
|
IS_WSL=false
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "ROCm not found."
|
||||||
|
USE_ROCM=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$USE_CUDA" = true ]; then
|
||||||
|
echo "Installing PyTorch with CUDA support..."
|
||||||
|
conda install pytorch==2.1.1 torchvision==0.16.1 torchaudio==2.1.1 pytorch-cuda=11.8 -c pytorch -c nvidia
|
||||||
|
elif [ "$USE_ROCM" = true ] ; then
|
||||||
|
echo "Installing PyTorch with ROCm support..."
|
||||||
|
pip install torch==2.5.1 torchvision==0.20.1 torchaudio==2.5.1 --index-url https://download.pytorch.org/whl/rocm6.2
|
||||||
|
else
|
||||||
|
echo "Installing PyTorch for CPU..."
|
||||||
|
conda install pytorch==2.1.1 torchvision==0.16.1 torchaudio==2.1.1 cpuonly -c pytorch
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
echo "Installing Python dependencies from requirements.txt..."
|
||||||
|
|
||||||
# 刷新环境
|
# 刷新环境
|
||||||
# Refresh environment
|
# Refresh environment
|
||||||
hash -r
|
hash -r
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
if [ "$USE_ROCM" = true ] && [ "$IS_WSL" = true ] ; then
|
||||||
|
echo "Update to WSL compatible runtime lib..."
|
||||||
|
location=`pip show torch | grep Location | awk -F ": " '{print $2}'`
|
||||||
|
cd ${location}/torch/lib/
|
||||||
|
rm libhsa-runtime64.so*
|
||||||
|
cp /opt/rocm/lib/libhsa-runtime64.so.1.2 libhsa-runtime64.so
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Installation completed successfully!"
|
||||||
|
|
||||||
pip install -r requirements.txt
|
|
Loading…
x
Reference in New Issue
Block a user