Merge branch 'main' of github.com:bwnotfound/GPT-SoVITS

This commit is contained in:
bwnotfound 2024-02-12 21:47:53 +08:00
commit 7303e108e1
4 changed files with 9 additions and 8 deletions

View File

@ -415,7 +415,7 @@ class Text2SemanticDecoder(nn.Module):
if(idx==0):###第一次跑不能EOS否则没有了 if(idx==0):###第一次跑不能EOS否则没有了
logits = logits[:, :-1] ###刨除1024终止符号的概率 logits = logits[:, :-1] ###刨除1024终止符号的概率
samples = sample( samples = sample(
logits[0], y, top_k=top_k, top_p=top_p, repetition_penalty=1.05, temperature=temperature logits[0], y, top_k=top_k, top_p=top_p, repetition_penalty=1.35, temperature=temperature
)[0].unsqueeze(0) )[0].unsqueeze(0)
if early_stop_num != -1 and (y.shape[1] - prefix_len) > early_stop_num: if early_stop_num != -1 and (y.shape[1] - prefix_len) > early_stop_num:
print("use early stop num:", early_stop_num) print("use early stop num:", early_stop_num)

View File

@ -624,15 +624,15 @@ with gr.Blocks(title="GPT-SoVITS WebUI") as app:
interactive=True, interactive=True,
) )
with gr.Row(): with gr.Row():
top_k = gr.Slider(minimum=1,maximum=100,step=1,label=i18n("top_k"),value=20,interactive=True) top_k = gr.Slider(minimum=1,maximum=100,step=1,label=i18n("top_k"),value=5,interactive=True)
top_p = gr.Slider(minimum=0,maximum=1,step=0.05,label=i18n("top_p"),value=0.6,interactive=True) top_p = gr.Slider(minimum=0,maximum=1,step=0.05,label=i18n("top_p"),value=1,interactive=True)
temperature = gr.Slider(minimum=0,maximum=1,step=0.05,label=i18n("temperature"),value=0.6,interactive=True) temperature = gr.Slider(minimum=0,maximum=1,step=0.05,label=i18n("temperature"),value=1,interactive=True)
inference_button = gr.Button(i18n("合成语音"), variant="primary") inference_button = gr.Button(i18n("合成语音"), variant="primary")
output = gr.Audio(label=i18n("输出的语音")) output = gr.Audio(label=i18n("输出的语音"))
inference_button.click( inference_button.click(
get_tts_wav, get_tts_wav,
[inp_ref, prompt_text, prompt_language, text, text_language, how_to_cut], [inp_ref, prompt_text, prompt_language, text, text_language, how_to_cut,top_k,top_p,temperature],
[output], [output],
) )

View File

@ -1,5 +1,6 @@
import argparse import argparse
import os import os
os.environ["HF_ENDPOINT"]="https://hf-mirror.com"
import traceback import traceback
import requests import requests
from glob import glob from glob import glob
@ -103,4 +104,4 @@ if __name__ == '__main__':
model_size = cmd.model_size, model_size = cmd.model_size,
language = cmd.language, language = cmd.language,
precision = cmd.precision, precision = cmd.precision,
) )

View File

@ -43,7 +43,7 @@ def execute_asr(input_folder, output_folder, model_size, language):
text = model.generate(input="%s/%s"%(input_folder, name))[0]["text"] text = model.generate(input="%s/%s"%(input_folder, name))[0]["text"]
output.append(f"{input_folder}/{name}|{output_file_name}|{language.upper()}|{text}") output.append(f"{input_folder}/{name}|{output_file_name}|{language.upper()}|{text}")
except: except:
return print(traceback.format_exc()) print(traceback.format_exc())
output_folder = output_folder or "output/asr_opt" output_folder = output_folder or "output/asr_opt"
os.makedirs(output_folder, exist_ok=True) os.makedirs(output_folder, exist_ok=True)
@ -73,4 +73,4 @@ if __name__ == '__main__':
output_folder = cmd.output_folder, output_folder = cmd.output_folder,
model_size = cmd.model_size, model_size = cmd.model_size,
language = cmd.language, language = cmd.language,
) )