Update inference_webui.py

This commit is contained in:
XXXXRT666 2024-02-21 13:26:30 +00:00 committed by GitHub
parent 97e3479b07
commit 83c9e8ff02
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -16,6 +16,7 @@ logging.getLogger("asyncio").setLevel(logging.ERROR)
logging.getLogger("charset_normalizer").setLevel(logging.ERROR) logging.getLogger("charset_normalizer").setLevel(logging.ERROR)
logging.getLogger("torchaudio._extension").setLevel(logging.ERROR) logging.getLogger("torchaudio._extension").setLevel(logging.ERROR)
import pdb import pdb
import torch
if os.path.exists("./gweight.txt"): if os.path.exists("./gweight.txt"):
with open("./gweight.txt", 'r', encoding="utf-8") as file: with open("./gweight.txt", 'r', encoding="utf-8") as file:
@ -48,11 +49,11 @@ is_share = os.environ.get("is_share", "False")
is_share = eval(is_share) is_share = eval(is_share)
if "_CUDA_VISIBLE_DEVICES" in os.environ: if "_CUDA_VISIBLE_DEVICES" in os.environ:
os.environ["CUDA_VISIBLE_DEVICES"] = os.environ["_CUDA_VISIBLE_DEVICES"] os.environ["CUDA_VISIBLE_DEVICES"] = os.environ["_CUDA_VISIBLE_DEVICES"]
is_half = eval(os.environ.get("is_half", "True")) is_half = eval(os.environ.get("is_half", "True")) and not torch.backends.mps.is_available()
import gradio as gr import gradio as gr
from transformers import AutoModelForMaskedLM, AutoTokenizer from transformers import AutoModelForMaskedLM, AutoTokenizer
import numpy as np import numpy as np
import librosa, torch import librosa
from feature_extractor import cnhubert from feature_extractor import cnhubert
cnhubert.cnhubert_base_path = cnhubert_base_path cnhubert.cnhubert_base_path = cnhubert_base_path