fix memory leak duee to diagonal alingmnet score

pull/10/head
Eren Golge 2019-10-02 00:30:25 +02:00
parent fc9af0ab3c
commit 8dec2a9e95
2 changed files with 3 additions and 3 deletions

View File

@ -204,7 +204,7 @@ def train(model, criterion, criterion_st, optimizer, optimizer_st, scheduler,
"GradNormST:{:.5f} AvgTextLen:{:.1f} AvgSpecLen:{:.1f} StepTime:{:.2f} "
"LoaderTime:{:.2f} LR:{:.6f}".format(
num_iter, batch_n_iter, global_step,
postnet_loss.item(), decoder_loss.item(), stop_loss.item(), align_score.item(),
postnet_loss.item(), decoder_loss.item(), stop_loss.item(), align_score,
grad_norm, grad_norm_st, avg_text_length, avg_spec_length, step_time,
loader_time, current_lr),
flush=True)
@ -404,7 +404,7 @@ def evaluate(model, criterion, criterion_st, ap, global_step, epoch):
postnet_loss.item(), keep_avg['avg_postnet_loss'],
decoder_loss.item(), keep_avg['avg_decoder_loss'],
stop_loss.item(), keep_avg['avg_stop_loss'],
align_score.item(), keep_avg['avg_align_score']),
align_score, keep_avg['avg_align_score']),
flush=True)
if args.rank == 0:

View File

@ -8,4 +8,4 @@ def alignment_diagonal_score(alignments):
Shape:
alignments : batch x decoder_steps x encoder_steps
"""
return alignments.max(dim=1)[0].mean(dim=1).mean(dim=0)
return alignments.max(dim=1)[0].mean(dim=1).mean(dim=0).item()