From 31802947108cb12d708404fb621f287fd5d13716 Mon Sep 17 00:00:00 2001 From: XXXXRT666 <157766680+XXXXRT666@users.noreply.github.com> Date: Tue, 20 Feb 2024 15:57:58 +0000 Subject: [PATCH 1/4] Update config.py Change the inference device for Mac to accelerate inference and reduce memory leak --- config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.py b/config.py index 3e9e951..caaadd4 100644 --- a/config.py +++ b/config.py @@ -20,7 +20,7 @@ python_exec = sys.executable or "python" if torch.cuda.is_available(): infer_device = "cuda" elif torch.backends.mps.is_available(): - infer_device = "mps" + infer_device = "cpu" else: infer_device = "cpu" From 861658050b6eab32ce6a34cfee37fc63a53a4ae7 Mon Sep 17 00:00:00 2001 From: XXXXRT666 <157766680+XXXXRT666@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:03:08 +0000 Subject: [PATCH 2/4] Update inference_webui.py Change inference device to accelerate inference on Mac and reduce memory leak --- GPT_SoVITS/inference_webui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPT_SoVITS/inference_webui.py b/GPT_SoVITS/inference_webui.py index c427b25..a046776 100644 --- a/GPT_SoVITS/inference_webui.py +++ b/GPT_SoVITS/inference_webui.py @@ -73,7 +73,7 @@ os.environ['PYTORCH_ENABLE_MPS_FALLBACK'] = '1' # 确保直接启动推理UI时 if torch.cuda.is_available(): device = "cuda" elif torch.backends.mps.is_available(): - device = "mps" + device = "cpu" else: device = "cpu" From 220367f90c85f6dc20751c4a586320c463b28406 Mon Sep 17 00:00:00 2001 From: XXXXRT666 <157766680+XXXXRT666@users.noreply.github.com> Date: Wed, 21 Feb 2024 01:15:11 +0000 Subject: [PATCH 3/4] Update inference_webui.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 精简代码 --- GPT_SoVITS/inference_webui.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/GPT_SoVITS/inference_webui.py b/GPT_SoVITS/inference_webui.py index a046776..3a4bfb3 100644 --- a/GPT_SoVITS/inference_webui.py +++ b/GPT_SoVITS/inference_webui.py @@ -72,8 +72,6 @@ os.environ['PYTORCH_ENABLE_MPS_FALLBACK'] = '1' # 确保直接启动推理UI时 if torch.cuda.is_available(): device = "cuda" -elif torch.backends.mps.is_available(): - device = "cpu" else: device = "cpu" From db40317d9ceaf782b5ccb383e044281a0489f29a Mon Sep 17 00:00:00 2001 From: XXXXRT666 <157766680+XXXXRT666@users.noreply.github.com> Date: Wed, 21 Feb 2024 01:15:31 +0000 Subject: [PATCH 4/4] Update config.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 精简代码 --- config.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/config.py b/config.py index caaadd4..1f74128 100644 --- a/config.py +++ b/config.py @@ -19,8 +19,6 @@ exp_root = "logs" python_exec = sys.executable or "python" if torch.cuda.is_available(): infer_device = "cuda" -elif torch.backends.mps.is_available(): - infer_device = "cpu" else: infer_device = "cpu"