Skip to content

Commit

Permalink
fix exact window size masking
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jan 29, 2021
1 parent aa7e1b2 commit 270adde
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion local_attention/local_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def forward(self, q, k, v, input_mask = None):

if self.exact_windowsize:
max_causal_window_size = (self.window_size * self.look_backward)
mask = mask & (bq_t[:, :, :, None] > (bq_k[:, :, None, :] + max_causal_window_size))
mask = mask | (bq_t[:, :, :, None] > (bq_k[:, :, None, :] + max_causal_window_size))

dots.masked_fill_(mask, mask_value)
del mask
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'local-attention',
packages = find_packages(),
version = '1.2.1',
version = '1.2.2',
license='MIT',
description = 'Local windowed attention, for language modeling',
author = 'Phil Wang',
Expand Down

0 comments on commit 270adde

Please sign in to comment.