diff --git a/GPT_SoVITS/TTS_infer_pack/TTS.py b/GPT_SoVITS/TTS_infer_pack/TTS.py index 566e9988..3322f036 100644 --- a/GPT_SoVITS/TTS_infer_pack/TTS.py +++ b/GPT_SoVITS/TTS_infer_pack/TTS.py @@ -7,6 +7,7 @@ import traceback from tqdm import tqdm now_dir = os.getcwd() sys.path.append(now_dir) +sys.path.append(os.path.join(now_dir, "GPT_SoVITS")) import ffmpeg import os from typing import Generator, List, Union diff --git a/Inference b/Inference index dccd4205..b8e3155f 160000 --- a/Inference +++ b/Inference @@ -1 +1 @@ -Subproject commit dccd4205f7ef5805f2bc51332643e95c19f3cc95 +Subproject commit b8e3155fe17fb2abb746bae6398767091519e92a diff --git a/app.py b/app.py index 972250a2..331167c0 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,4 @@ -frontend_version = "2.3.1 240320" +frontend_version = "2.3.2 240322" from datetime import datetime import gradio as gr @@ -12,31 +12,35 @@ import wave import os, sys now_dir = os.getcwd() sys.path.append(now_dir) -sys.path.append(os.path.join(now_dir, "Inference/src")) +# 尝试清空含有GPT_SoVITS的路径 +for path in sys.path: + if path.find(r"GPT_SoVITS") != -1: + sys.path.remove(path) + # 取得模型文件夹路径 -config_path = "Inference/config.json" +from Inference.src.config_manager import Inference_Config +inference_config = Inference_Config() -# 读取config.json -if os.path.exists(config_path): - with open(config_path, "r", encoding="utf-8") as f: - _config = json.load(f) - locale_language = str(_config.get("locale", "auto")) - locale_language = None if locale_language.lower() == "auto" else locale_language - tts_port = _config.get("tts_port", 5000) - max_text_length = _config.get("max_text_length", -1) - default_batch_size = _config.get("batch_size", 10) - default_word_count = _config.get("max_word_count", 80) - is_share = _config.get("is_share", "false").lower() == "true" - is_classic = False - enable_auth = _config.get("enable_auth", "false").lower() == "true" - users = _config.get("user", {}) - try: - default_username = list(users.keys())[0] - default_password = users[default_username] - except: - default_username = "admin" - default_password = "admin123" +config_path = inference_config.config_path +locale_language = inference_config.locale_language +tts_port = inference_config.tts_port +default_batch_size = inference_config.default_batch_size +default_word_count = inference_config.default_word_count +enable_auth = inference_config.enable_auth +is_classic = inference_config.is_classic +models_path = inference_config.models_path +max_text_length = inference_config.max_text_length +is_share = inference_config.is_share +default_username, default_password = "admin", "admin123" +if enable_auth: + users = inference_config.users + try: + default_username = list(users.keys())[0] + default_password = users[default_username] + except: + default_username = "admin" + default_password = "admin123" from tools.i18n.i18n import I18nAuto i18n = I18nAuto(locale_language , "Inference/i18n/locale") @@ -358,7 +362,7 @@ with gr.Blocks() as app: with gr.Group(): - top_k = gr.Slider(minimum=1, maximum=30, value=6, label=i18n("Top K"), step=1) + top_k = gr.Slider(minimum=1, maximum=30, value=3, label=i18n("Top K"), step=1) top_p = gr.Slider(minimum=0, maximum=1, value=0.8, label=i18n("Top P")) temperature = gr.Slider( minimum=0, maximum=1, value=0.8, label=i18n("Temperature") diff --git a/requirements.txt b/requirements.txt index 415ad3e5..679ae01f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,7 +30,6 @@ jieba_fast jieba LangSegment>=0.2.6 Faster_Whisper -pyaudio flask_httpauth flask_cors wordsegment