diff --git a/layers/losses.py b/layers/losses.py index ab472519..3b60c1f4 100644 --- a/layers/losses.py +++ b/layers/losses.py @@ -58,7 +58,7 @@ class MSELossMasked(nn.Module): class AttentionEntropyLoss(nn.Module): - def forward(self, align): + def forward(self, align): #pylint disable=no-self-use """ Forces attention to be more decisive by penalizing soft attention weights