mirror of
https://github.com/RVC-Boss/GPT-SoVITS.git
synced 2025-08-22 02:59:48 +08:00
make image smaller
This commit is contained in:
parent
fd2cacdc1f
commit
905c602edc
20
.github/workflows/docker-publish.yaml
vendored
20
.github/workflows/docker-publish.yaml
vendored
@ -26,19 +26,19 @@ jobs:
|
||||
include:
|
||||
- cuda_version: 124
|
||||
lite: true
|
||||
cuda_base: runtime
|
||||
torch_base: lite
|
||||
tag_prefix: cu124-lite
|
||||
- cuda_version: 124
|
||||
lite: false
|
||||
cuda_base: devel
|
||||
torch_base: full
|
||||
tag_prefix: cu124
|
||||
- cuda_version: 128
|
||||
lite: true
|
||||
cuda_base: runtime
|
||||
torch_base: lite
|
||||
tag_prefix: cu128-lite
|
||||
- cuda_version: 128
|
||||
lite: false
|
||||
cuda_base: devel
|
||||
torch_base: full
|
||||
tag_prefix: cu128
|
||||
|
||||
steps:
|
||||
@ -98,7 +98,7 @@ jobs:
|
||||
platforms: linux/amd64
|
||||
build-args: |
|
||||
LITE=${{ matrix.lite }}
|
||||
CUDA_BASE=${{ matrix.cuda_base }}
|
||||
TORCH_BASE=${{ matrix.torch_base }}
|
||||
CUDA_VERSION=${{ matrix.cuda_version }}
|
||||
WORKFLOW=true
|
||||
tags: |
|
||||
@ -113,19 +113,19 @@ jobs:
|
||||
include:
|
||||
- cuda_version: 124
|
||||
lite: true
|
||||
cuda_base: runtime
|
||||
torch_base: lite
|
||||
tag_prefix: cu124-lite
|
||||
- cuda_version: 124
|
||||
lite: false
|
||||
cuda_base: devel
|
||||
torch_base: full
|
||||
tag_prefix: cu124
|
||||
- cuda_version: 128
|
||||
lite: true
|
||||
cuda_base: runtime
|
||||
torch_base: lite
|
||||
tag_prefix: cu128-lite
|
||||
- cuda_version: 128
|
||||
lite: false
|
||||
cuda_base: devel
|
||||
torch_base: full
|
||||
tag_prefix: cu128
|
||||
|
||||
steps:
|
||||
@ -185,7 +185,7 @@ jobs:
|
||||
platforms: linux/arm64
|
||||
build-args: |
|
||||
LITE=${{ matrix.lite }}
|
||||
CUDA_BASE=${{ matrix.cuda_base }}
|
||||
TORCH_BASE=${{ matrix.torch_base }}
|
||||
CUDA_VERSION=${{ matrix.cuda_version }}
|
||||
WORKFLOW=true
|
||||
tags: |
|
||||
|
@ -53,6 +53,14 @@ source "$HOME/anaconda3/etc/profile.d/conda.sh"
|
||||
|
||||
"$HOME/anaconda3/bin/conda" install gcc=14 gxx ffmpeg cmake make unzip -q -y
|
||||
|
||||
if [ "$CUDA_VERSION" = "12.8" ]; then
|
||||
"$HOME/anaconda3/bin/pip" install torch torchaudio --no-cache-dir --index-url https://download.pytorch.org/whl/cu128
|
||||
elif [ "$CUDA_VERSION" = "12.4" ]; then
|
||||
"$HOME/anaconda3/bin/pip" install torch==2.5.1 torchaudio==2.5.1 --no-cache-dir --index-url https://download.pytorch.org/whl/cu124
|
||||
fi
|
||||
|
||||
"$HOME/anaconda3/bin/pip" cache purge
|
||||
|
||||
rm $LOG_PATH
|
||||
|
||||
rm -rf "$HOME/anaconda3/pkgs"
|
||||
|
33
Docker/install_wrapper.sh
Normal file
33
Docker/install_wrapper.sh
Normal file
@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
||||
|
||||
cd "$SCRIPT_DIR" || exit 1
|
||||
|
||||
cd .. || exit 1
|
||||
|
||||
set -e
|
||||
|
||||
source "$HOME/anaconda3/etc/profile.d/conda.sh"
|
||||
|
||||
mkdir GPT_SoVITS
|
||||
|
||||
mkdir GPT_SoVITS/text
|
||||
|
||||
ln -s /workspace/models/pretrained_models /workspace/GPT-SoVITS/GPT_SoVITS/pretrained_models
|
||||
|
||||
ln -s /workspace/models/G2PWModel /workspace/GPT-SoVITS/GPT_SoVITS/text/G2PWModel
|
||||
|
||||
bash install.sh --device "CU${CUDA_VERSION}" --source HF
|
||||
|
||||
pip cache purge
|
||||
|
||||
pip show torch
|
||||
|
||||
rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
rm -rf "$HOME/anaconda3/pkgs"
|
||||
|
||||
mkdir "$HOME/anaconda3/pkgs"
|
||||
|
||||
rm -rf /root/.conda /root/.cache
|
@ -1,80 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
||||
|
||||
cd "$SCRIPT_DIR" || exit 1
|
||||
|
||||
cd .. || exit 1
|
||||
|
||||
set -e
|
||||
|
||||
WORKFLOW=${WORKFLOW:-"false"}
|
||||
LITE=${LITE:-"false"}
|
||||
|
||||
if [ "$WORKFLOW" = "true" ]; then
|
||||
WGET_CMD="wget -nv --tries=25 --wait=5 --read-timeout=40 --retry-on-http-error=404"
|
||||
else
|
||||
WGET_CMD="wget --tries=25 --wait=5 --read-timeout=40 --retry-on-http-error=404"
|
||||
fi
|
||||
|
||||
USE_FUNASR=false
|
||||
USE_FASTERWHISPER=false
|
||||
|
||||
if [ "$LITE" = "true" ]; then
|
||||
USE_FUNASR=true
|
||||
USE_FASTERWHISPER=false
|
||||
else
|
||||
USE_FUNASR=true
|
||||
USE_FASTERWHISPER=true
|
||||
fi
|
||||
|
||||
if [ "$USE_FUNASR" = "true" ]; then
|
||||
echo "Downloading funasr..." &&
|
||||
$WGET_CMD "https://huggingface.co/XXXXRT/GPT-SoVITS-Pretrained/resolve/main/funasr.zip" &&
|
||||
unzip -q funasr.zip -d tools/asr/models/ &&
|
||||
rm -rf funasr.zip
|
||||
else
|
||||
echo "Skipping funasr download"
|
||||
fi
|
||||
|
||||
if [ "$USE_FASTERWHISPER" = "true" ]; then
|
||||
echo "Downloading faster-whisper..." &&
|
||||
$WGET_CMD "https://huggingface.co/XXXXRT/GPT-SoVITS-Pretrained/resolve/main/faster-whisper.zip" &&
|
||||
unzip -q faster-whisper.zip -d tools/asr/models/ &&
|
||||
rm -rf faster-whisper.zip
|
||||
else
|
||||
echo "Skipping faster-whisper download"
|
||||
fi
|
||||
|
||||
source "$HOME/anaconda3/etc/profile.d/conda.sh"
|
||||
|
||||
if [ "$CUDA_VERSION" = 128 ]; then
|
||||
pip install torch torchaudio --no-cache-dir --index-url https://download.pytorch.org/whl/cu128
|
||||
elif [ "$CUDA_VERSION" = 124 ]; then
|
||||
pip install torch==2.5.1 torchaudio==2.5.1 --no-cache-dir --index-url https://download.pytorch.org/whl/cu124
|
||||
fi
|
||||
|
||||
if [ "$LITE" = "true" ]; then
|
||||
bash install.sh --device "CU${CUDA_VERSION}" --source HF
|
||||
elif [ "$LITE" = "false" ]; then
|
||||
bash install.sh --device "CU${CUDA_VERSION}" --source HF --download-uvr5
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p /workspace/model
|
||||
mv /workspace/GPT-SoVITS/GPT_SoVITS/pretrained_models /workspace/model/
|
||||
mv /workspace/GPT-SoVITS/tools/asr/models /workspace/model/
|
||||
mv /workspace/GPT-SoVITS/tools/uvr5/uvr5_weights /workspace/model/
|
||||
|
||||
pip cache purge
|
||||
|
||||
pip show torch
|
||||
|
||||
rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
rm -rf "$HOME/anaconda3/pkgs"
|
||||
|
||||
mkdir "$HOME/anaconda3/pkgs"
|
||||
|
||||
rm -rf /root/.conda /root/.cache
|
32
Dockerfile
32
Dockerfile
@ -1,19 +1,21 @@
|
||||
ARG CUDA_VERSION=124
|
||||
ARG CUDA_BASE=runtime
|
||||
ARG TORCH_BASE=full
|
||||
|
||||
FROM xxxxrt666/cuda-base:cu${CUDA_VERSION}-${CUDA_BASE}
|
||||
FROM xxxxrt666/torch-base:cu${CUDA_VERSION}-${TORCH_BASE}
|
||||
|
||||
LABEL maintainer="XXXXRT"
|
||||
LABEL version="V4"
|
||||
LABEL description="Docker image for GPT-SoVITS"
|
||||
|
||||
ARG CUDA_VERSION=12.4
|
||||
ARG CUDA_VERSION=124
|
||||
|
||||
ENV CUDA_VERSION=${CUDA_VERSION}
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
WORKDIR /workspace/GPT-SoVITS
|
||||
|
||||
COPY . /workspace/GPT-SoVITS
|
||||
COPY Docker /workspace/GPT-SoVITS/
|
||||
|
||||
ARG LITE=false
|
||||
ENV LITE=${LITE}
|
||||
@ -24,19 +26,15 @@ ENV WORKFLOW=${WORKFLOW}
|
||||
ARG TARGETPLATFORM
|
||||
ENV TARGETPLATFORM=${TARGETPLATFORM}
|
||||
|
||||
ENV HOME="/root"
|
||||
|
||||
RUN bash Docker/anaconda_install.sh
|
||||
|
||||
ENV PATH="$HOME/anaconda3/bin:$PATH"
|
||||
COPY extra-req.txt /workspace/GPT-SoVITS/
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
COPY requirements.txt /workspace/GPT-SoVITS/
|
||||
|
||||
ENV PATH="/usr/local/cuda/bin:$PATH"
|
||||
ENV CUDA_HOME="/usr/local/cuda"
|
||||
ENV MAKEFLAGS="-j$(nproc)"
|
||||
COPY install.sh /workspace/GPT-SoVITS/
|
||||
|
||||
RUN bash Docker/setup.sh
|
||||
RUN bash Docker/install_wrapper.sh
|
||||
|
||||
EXPOSE 9871 9872 9873 9874 9880
|
||||
|
||||
@ -44,11 +42,15 @@ ENV PYTHONPATH="/workspace/GPT-SoVITS"
|
||||
|
||||
RUN conda init bash && echo "conda activate base" >> ~/.bashrc
|
||||
|
||||
COPY . /workspace/GPT-SoVITS
|
||||
|
||||
CMD ["/bin/bash", "-c", "\
|
||||
rm -rf /workspace/GPT-SoVITS/GPT_SoVITS/pretrained_models && \
|
||||
rm -rf /workspace/GPT-SoVITS/GPT_SoVITS/text/G2PWModel && \
|
||||
rm -rf /workspace/GPT-SoVITS/tools/asr/models && \
|
||||
rm -rf /workspace/GPT-SoVITS/tools/uvr5/uvr5_weights && \
|
||||
ln -s /workspace/model/pretrained_models /workspace/GPT-SoVITS/GPT_SoVITS/pretrained_models && \
|
||||
ln -s /workspace/model/models /workspace/GPT-SoVITS/tools/asr/models && \
|
||||
ln -s /workspace/model/uvr5_weights /workspace/GPT-SoVITS/tools/uvr5/uvr5_weights && \
|
||||
ln -s /workspace/models/pretrained_models /workspace/GPT-SoVITS/GPT_SoVITS/pretrained_models && \
|
||||
ln -s /workspace/models/G2PWModel /workspace/GPT-SoVITS/GPT_SoVITS/text/G2PWModel && \
|
||||
ln -s /workspace/models/asr_models /workspace/GPT-SoVITS/tools/asr/models && \
|
||||
ln -s /workspace/models/uvr5_weights /workspace/GPT-SoVITS/tools/uvr5/uvr5_weights && \
|
||||
exec bash"]
|
@ -13,6 +13,7 @@ services:
|
||||
volumes:
|
||||
- .:/workspace/GPT-SoVITS
|
||||
- /dev/null:/workspace/GPT-SoVITS/pretrained_models
|
||||
- /dev/null:/workspace/GPT-SoVITS/text/G2PWModel
|
||||
- /dev/null:/workspace/tools/asr/models
|
||||
- /dev/null:/workspace/tools/uvr5/uvr5_weights
|
||||
environment:
|
||||
@ -34,6 +35,7 @@ services:
|
||||
volumes:
|
||||
- .:/workspace/GPT-SoVITS
|
||||
- /dev/null:/workspace/GPT-SoVITS/pretrained_models
|
||||
- /dev/null:/workspace/GPT-SoVITS/text/G2PWModel
|
||||
- /dev/null:/workspace/tools/asr/models
|
||||
- /dev/null:/workspace/tools/uvr5/uvr5_weights
|
||||
environment:
|
||||
@ -55,6 +57,7 @@ services:
|
||||
volumes:
|
||||
- .:/workspace/GPT-SoVITS
|
||||
- /dev/null:/workspace/GPT-SoVITS/pretrained_models
|
||||
- /dev/null:/workspace/GPT-SoVITS/text/G2PWModel
|
||||
- /dev/null:/workspace/tools/asr/models
|
||||
- /dev/null:/workspace/tools/uvr5/uvr5_weights
|
||||
environment:
|
||||
@ -76,6 +79,7 @@ services:
|
||||
volumes:
|
||||
- .:/workspace/GPT-SoVITS
|
||||
- /dev/null:/workspace/GPT-SoVITS/pretrained_models
|
||||
- /dev/null:/workspace/GPT-SoVITS/text/G2PWModel
|
||||
- /dev/null:/workspace/tools/asr/models
|
||||
- /dev/null:/workspace/tools/uvr5/uvr5_weights
|
||||
environment:
|
||||
|
Loading…
x
Reference in New Issue
Block a user