From f85ef1aa08df90b9ab4b22b444398f0c16e36d3c Mon Sep 17 00:00:00 2001 From: huangxu1991 <40886464+huangxu1991@users.noreply.github.com> Date: Wed, 13 Mar 2024 17:48:27 +0800 Subject: [PATCH] Add use_distributed_sampler=False in Trainer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit if you have defined your own sampler, you should have to set use_distributed_sampler to False! 当使用自定义的 sampler 时,必须设置 use_distributed_sampler 为 False --- GPT_SoVITS/s1_train.py | 1 + 1 file changed, 1 insertion(+) diff --git a/GPT_SoVITS/s1_train.py b/GPT_SoVITS/s1_train.py index fb273542..c8ec5f3b 100644 --- a/GPT_SoVITS/s1_train.py +++ b/GPT_SoVITS/s1_train.py @@ -132,6 +132,7 @@ def main(args): logger=logger, num_sanity_val_steps=0, callbacks=[ckpt_callback], + use_distributed_sampler=False, # 非常简单的修改,但解决了采用自定义的 bucket_sampler 下训练步数不一致的问题! ) model: Text2SemanticLightningModule = Text2SemanticLightningModule(