mirror of
https://github.com/RVC-Boss/GPT-SoVITS.git
synced 2025-04-26 19:46:37 +08:00
Add files via upload
This commit is contained in:
parent
a17399d75b
commit
7c3492c240
@ -48,16 +48,17 @@ class SinePositionalEmbedding(nn.Module):
|
|||||||
self.dropout = torch.nn.Dropout(p=dropout)
|
self.dropout = torch.nn.Dropout(p=dropout)
|
||||||
self.reverse = False
|
self.reverse = False
|
||||||
self.div_term = torch.exp(torch.arange(0, self.embedding_dim, 2) * -(math.log(10000.0) / self.embedding_dim))
|
self.div_term = torch.exp(torch.arange(0, self.embedding_dim, 2) * -(math.log(10000.0) / self.embedding_dim))
|
||||||
|
self.pe = self.extend_pe(2000)
|
||||||
|
|
||||||
def extend_pe(self, x):
|
def extend_pe(self, x):
|
||||||
position = torch.cumsum(torch.ones_like(x[:,:,0]), dim=1).transpose(0, 1)
|
position = torch.cumsum(torch.ones((x,1)), dim=0)
|
||||||
scpe = (position * self.div_term).unsqueeze(0)
|
scpe = (position * self.div_term).unsqueeze(0)
|
||||||
pe = torch.cat([torch.sin(scpe), torch.cos(scpe)]).permute(1, 2, 0)
|
pe = torch.cat([torch.sin(scpe), torch.cos(scpe)]).permute(1, 2, 0)
|
||||||
pe = pe.contiguous().view(1, -1, self.embedding_dim)
|
pe = pe.contiguous().view(1, -1, self.embedding_dim)
|
||||||
return pe
|
return pe
|
||||||
|
|
||||||
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
||||||
pe = self.extend_pe(x)
|
pe = self.pe[:,:x.size(1),:]
|
||||||
output = x.unsqueeze(-1) if x.ndim == 2 else x
|
output = x.unsqueeze(-1) if x.ndim == 2 else x
|
||||||
output = output * self.x_scale + self.alpha * pe
|
output = output * self.x_scale + self.alpha * pe
|
||||||
return self.dropout(output)
|
return self.dropout(output)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user