From 7c3492c2406f51e9048a57985f590b78e888abde Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B1=CF=81=CE=BF=CF=85=CF=83=CE=AD=C2=B7=CE=BC?= =?UTF-8?q?=C2=B7=CE=B3=CE=B9=CE=BF=CF=85=CE=BC=CE=B5=CE=BC=CE=AF=C2=B7?= =?UTF-8?q?=CE=A7=CE=B9=CE=BD=CE=B1=CE=BA=CE=AC=CE=BD=CE=BD=CE=B1?= <40709280+NaruseMioShirakana@users.noreply.github.com> Date: Sat, 6 Apr 2024 15:48:36 +0800 Subject: [PATCH] Add files via upload --- GPT_SoVITS/AR/modules/embedding_onnx.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/GPT_SoVITS/AR/modules/embedding_onnx.py b/GPT_SoVITS/AR/modules/embedding_onnx.py index b93405b..2f8dcf1 100644 --- a/GPT_SoVITS/AR/modules/embedding_onnx.py +++ b/GPT_SoVITS/AR/modules/embedding_onnx.py @@ -48,16 +48,17 @@ class SinePositionalEmbedding(nn.Module): self.dropout = torch.nn.Dropout(p=dropout) self.reverse = False self.div_term = torch.exp(torch.arange(0, self.embedding_dim, 2) * -(math.log(10000.0) / self.embedding_dim)) + self.pe = self.extend_pe(2000) def extend_pe(self, x): - position = torch.cumsum(torch.ones_like(x[:,:,0]), dim=1).transpose(0, 1) + position = torch.cumsum(torch.ones((x,1)), dim=0) scpe = (position * self.div_term).unsqueeze(0) pe = torch.cat([torch.sin(scpe), torch.cos(scpe)]).permute(1, 2, 0) pe = pe.contiguous().view(1, -1, self.embedding_dim) return pe def forward(self, x: torch.Tensor) -> torch.Tensor: - pe = self.extend_pe(x) + pe = self.pe[:,:x.size(1),:] output = x.unsqueeze(-1) if x.ndim == 2 else x output = output * self.x_scale + self.alpha * pe return self.dropout(output)