From 13dca6e6b616788f05c331885ef1687e9b9b7797 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eren=20G=C3=B6lge?= Date: Wed, 7 Apr 2021 19:15:31 +0200 Subject: [PATCH] revert some of Hifigan generator updates --- TTS/vocoder/models/hifigan_generator.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/TTS/vocoder/models/hifigan_generator.py b/TTS/vocoder/models/hifigan_generator.py index b8f6d99f..8545e828 100644 --- a/TTS/vocoder/models/hifigan_generator.py +++ b/TTS/vocoder/models/hifigan_generator.py @@ -85,12 +85,12 @@ class ResBlock1(torch.nn.Module): x: [B, C, T] """ for c1, c2 in zip(self.convs1, self.convs2): - o = F.leaky_relu(x, LRELU_SLOPE) - o = c1(o) - o = F.leaky_relu(o, LRELU_SLOPE) - o = c2(o) - o = o + x - return o + xt = F.leaky_relu(x, LRELU_SLOPE) + xt = c1(xt) + xt = F.leaky_relu(xt, LRELU_SLOPE) + xt = c2(xt) + x = xt + x + return x def remove_weight_norm(self): for l in self.convs1: @@ -134,10 +134,10 @@ class ResBlock2(torch.nn.Module): def forward(self, x): for c in self.convs: - o = F.leaky_relu(x, LRELU_SLOPE) - o = c(o) - o = o + x - return o + xt = F.leaky_relu(x, LRELU_SLOPE) + xt = c(xt) + x = xt + x + return x def remove_weight_norm(self): for l in self.convs: @@ -223,7 +223,6 @@ class HifiganGenerator(torch.nn.Module): o = F.leaky_relu(o) o = self.conv_post(o) o = torch.tanh(o) - breakpoint() return o @torch.no_grad()