update forward attention

pull/10/head
Eren Golge 2019-06-24 16:57:29 +02:00
parent 118fe61028
commit c72470bcfc
1 changed files with 1 additions and 1 deletions

View File

@ -208,7 +208,7 @@ class Attention(nn.Module):
_, n = prev_alpha.max(1)
val, n2 = alpha.max(1)
for b in range(alignment.shape[0]):
alpha[b, n[b] + 2:] = 0
alpha[b, n[b] + 3:] = 0
alpha[b, :(n[b] - 1)] = 0 # ignore all previous states to prevent repetition.
alpha[b, (n[b] - 2)] = 0.01 * val[b] # smoothing factor for the prev step
# compute attention weights