mirror of https://github.com/coqui-ai/TTS.git
Log spectrogram constraction
parent
f623b4e586
commit
088a105a43
Binary file not shown.
|
@ -74,7 +74,6 @@ class AttentionWrapper(nn.Module):
|
|||
|
||||
# Normalize attention weight
|
||||
alignment = F.softmax(alignment, dim=-1) ## TODO: might be buggy
|
||||
print(alignment.size())
|
||||
|
||||
# Attention context vector
|
||||
# (batch, 1, dim)
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
8
train.py
8
train.py
|
@ -182,6 +182,7 @@ def main(args):
|
|||
current_step)
|
||||
tb.add_scalar('Time/StepTime', step_time, current_step)
|
||||
|
||||
|
||||
if current_step % c.save_step == 0:
|
||||
checkpoint_path = 'checkpoint_{}.pth.tar'.format(current_step)
|
||||
checkpoint_path = os.path.join(OUT_PATH, checkpoint_path)
|
||||
|
@ -194,6 +195,13 @@ def main(args):
|
|||
'date': datetime.date.today().strftime("%B %d, %Y")},
|
||||
checkpoint_path)
|
||||
print("\n | > Checkpoint is saved : {}".format(checkpoint_path))
|
||||
|
||||
# Log spectrogram reconstruction
|
||||
const_spec = linear_output[0].data.cpu()[None, :]
|
||||
gt_spec = linear_spec_var[0].data.cpu()[None, :]
|
||||
tb.add_image('Spec/Reconstruction', const_spec, current_step)
|
||||
tb.add_image('Spec/GroundTruth', gt_spec, current_step)
|
||||
|
||||
lr_scheduler.step(loss.data[0])
|
||||
tb.add_scalar('Time/EpochTime', epoch_time, epoch)
|
||||
epoch_time = 0
|
||||
|
|
Loading…
Reference in New Issue