From 1c99be2ffddb0ed368bae82616d76fa2e287b401 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Mon, 18 Feb 2019 13:06:26 +0100 Subject: [PATCH] Change window size for attention --- layers/attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/layers/attention.py b/layers/attention.py index d64f6894..ee18386e 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -118,8 +118,8 @@ class AttentionRNNCell(nn.Module): self.rnn_cell = nn.GRUCell(annot_dim + memory_dim, rnn_dim) self.windowing = windowing if self.windowing: - self.win_back = 1 - self.win_front = 3 + self.win_back = 3 + self.win_front = 6 self.win_idx = None # pick bahdanau or location sensitive attention if align_model == 'b':