Skip to content

Commit

Permalink
fix alibi with flash attention
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed May 15, 2023
1 parent bfaab9a commit 4c3ec34
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
1 change: 1 addition & 0 deletions MEGABYTE_pytorch/attend.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ def flash_attn(self, q, k, v, mask = None, attn_bias = None):
if exists(mask):
attn_bias = attn_bias.masked_fill(~mask, mask_value)

mask = attn_bias
causal = False

# pytorch 2.0 flash attn: q, k, v, mask, dropout, causal, softmax_scale
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'MEGABYTE-pytorch',
packages = find_packages(),
version = '0.0.4',
version = '0.0.5',
license='MIT',
description = 'MEGABYTE - Pytorch',
long_description_content_type = 'text/markdown',
Expand Down

0 comments on commit 4c3ec34

Please sign in to comment.