Resolve Python Logger warnings (#2379)

Signed-off-by: Emmanuel Ferdman <emmanuelferdman@gmail.com>
This commit is contained in:
Emmanuel Ferdman 2025-06-10 13:03:23 +03:00 committed by GitHub
parent d39836b8fa
commit 0d2f273402
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 7 additions and 7 deletions

View File

@ -354,7 +354,7 @@ class ScaledAdam(BatchedOptimizer):
if ans < 1.0: if ans < 1.0:
first_state["num_clipped"] += 1 first_state["num_clipped"] += 1
if ans < 0.1: if ans < 0.1:
logging.warn(f"Scaling gradients by {ans}, model_norm_threshold={model_norm_threshold}") logging.warning(f"Scaling gradients by {ans}, model_norm_threshold={model_norm_threshold}")
if self.show_dominant_parameters: if self.show_dominant_parameters:
assert p.shape[0] == len(param_names) assert p.shape[0] == len(param_names)
self._show_gradient_dominating_parameter(tuples, tot_sumsq) self._show_gradient_dominating_parameter(tuples, tot_sumsq)
@ -362,7 +362,7 @@ class ScaledAdam(BatchedOptimizer):
def _show_gradient_dominating_parameter(self, tuples: List[Tuple[Tensor, dict, List[str]]], tot_sumsq: Tensor): def _show_gradient_dominating_parameter(self, tuples: List[Tuple[Tensor, dict, List[str]]], tot_sumsq: Tensor):
""" """
Show information of parameter wihch dominanting tot_sumsq. Show information of parameter which dominating tot_sumsq.
Args: Args:
tuples: a list of tuples of (param, state, param_names) tuples: a list of tuples of (param, state, param_names)
@ -415,7 +415,7 @@ class ScaledAdam(BatchedOptimizer):
dominant_grad, dominant_grad,
) = sorted_by_proportion[dominant_param_name] ) = sorted_by_proportion[dominant_param_name]
logging.info( logging.info(
f"Parameter Dominanting tot_sumsq {dominant_param_name}" f"Parameter Dominating tot_sumsq {dominant_param_name}"
f" with proportion {dominant_proportion:.2f}," f" with proportion {dominant_proportion:.2f},"
f" where dominant_sumsq=(grad_sumsq*orig_rms_sq)" f" where dominant_sumsq=(grad_sumsq*orig_rms_sq)"
f"={dominant_sumsq:.3e}," f"={dominant_sumsq:.3e},"

View File

@ -283,7 +283,7 @@ def get_hparams_from_file(config_path):
def check_git_hash(model_dir): def check_git_hash(model_dir):
source_dir = os.path.dirname(os.path.realpath(__file__)) source_dir = os.path.dirname(os.path.realpath(__file__))
if not os.path.exists(os.path.join(source_dir, ".git")): if not os.path.exists(os.path.join(source_dir, ".git")):
logger.warn( logger.warning(
"{} is not a git repository, therefore hash value comparison will be ignored.".format( "{} is not a git repository, therefore hash value comparison will be ignored.".format(
source_dir, source_dir,
) )
@ -296,7 +296,7 @@ def check_git_hash(model_dir):
if os.path.exists(path): if os.path.exists(path):
saved_hash = open(path).read() saved_hash = open(path).read()
if saved_hash != cur_hash: if saved_hash != cur_hash:
logger.warn( logger.warning(
"git hash values are different. {}(saved) != {}(current)".format( "git hash values are different. {}(saved) != {}(current)".format(
saved_hash[:8], saved_hash[:8],
cur_hash[:8], cur_hash[:8],

4
api.py
View File

@ -1071,10 +1071,10 @@ default_refer = DefaultRefer(args.default_refer_path, args.default_refer_text, a
# 模型路径检查 # 模型路径检查
if sovits_path == "": if sovits_path == "":
sovits_path = g_config.pretrained_sovits_path sovits_path = g_config.pretrained_sovits_path
logger.warn(f"未指定SoVITS模型路径, fallback后当前值: {sovits_path}") logger.warning(f"未指定SoVITS模型路径, fallback后当前值: {sovits_path}")
if gpt_path == "": if gpt_path == "":
gpt_path = g_config.pretrained_gpt_path gpt_path = g_config.pretrained_gpt_path
logger.warn(f"未指定GPT模型路径, fallback后当前值: {gpt_path}") logger.warning(f"未指定GPT模型路径, fallback后当前值: {gpt_path}")
# 指定默认参考音频, 调用方 未提供/未给全 参考音频参数时使用 # 指定默认参考音频, 调用方 未提供/未给全 参考音频参数时使用
if default_refer.path == "" or default_refer.text == "" or default_refer.language == "": if default_refer.path == "" or default_refer.text == "" or default_refer.language == "":