mirror of https://github.com/coqui-ai/TTS.git
revert some of Hifigan generator updates
parent
02bc776c35
commit
13dca6e6b6
|
@ -85,12 +85,12 @@ class ResBlock1(torch.nn.Module):
|
||||||
x: [B, C, T]
|
x: [B, C, T]
|
||||||
"""
|
"""
|
||||||
for c1, c2 in zip(self.convs1, self.convs2):
|
for c1, c2 in zip(self.convs1, self.convs2):
|
||||||
o = F.leaky_relu(x, LRELU_SLOPE)
|
xt = F.leaky_relu(x, LRELU_SLOPE)
|
||||||
o = c1(o)
|
xt = c1(xt)
|
||||||
o = F.leaky_relu(o, LRELU_SLOPE)
|
xt = F.leaky_relu(xt, LRELU_SLOPE)
|
||||||
o = c2(o)
|
xt = c2(xt)
|
||||||
o = o + x
|
x = xt + x
|
||||||
return o
|
return x
|
||||||
|
|
||||||
def remove_weight_norm(self):
|
def remove_weight_norm(self):
|
||||||
for l in self.convs1:
|
for l in self.convs1:
|
||||||
|
@ -134,10 +134,10 @@ class ResBlock2(torch.nn.Module):
|
||||||
|
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
for c in self.convs:
|
for c in self.convs:
|
||||||
o = F.leaky_relu(x, LRELU_SLOPE)
|
xt = F.leaky_relu(x, LRELU_SLOPE)
|
||||||
o = c(o)
|
xt = c(xt)
|
||||||
o = o + x
|
x = xt + x
|
||||||
return o
|
return x
|
||||||
|
|
||||||
def remove_weight_norm(self):
|
def remove_weight_norm(self):
|
||||||
for l in self.convs:
|
for l in self.convs:
|
||||||
|
@ -223,7 +223,6 @@ class HifiganGenerator(torch.nn.Module):
|
||||||
o = F.leaky_relu(o)
|
o = F.leaky_relu(o)
|
||||||
o = self.conv_post(o)
|
o = self.conv_post(o)
|
||||||
o = torch.tanh(o)
|
o = torch.tanh(o)
|
||||||
breakpoint()
|
|
||||||
return o
|
return o
|
||||||
|
|
||||||
@torch.no_grad()
|
@torch.no_grad()
|
||||||
|
|
Loading…
Reference in New Issue