remove print

pull/10/head
Eren Golge 2019-06-04 00:40:03 +02:00
parent 31fe02412c
commit 7d45e5d011
1 changed files with 0 additions and 1 deletions

View File

@ -207,7 +207,6 @@ class Attention(nn.Module):
if not self.training and self.forward_attn_mask:
_, n = prev_alpha.max(1)
val, n2 = alpha.max(1)
print(True)
for b in range(alignment.shape[0]):
alpha[b, n[b] + 2:] = 0
alpha[b, :(n[b] - 1)] = 0 # ignore all previous states to prevent repetition.