Skip to content

Commit

Permalink
fix output of self-attention, release 0.1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Oct 27, 2020
1 parent 00cebfe commit 0b1aa96
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion isab_pytorch/isab_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def __init__(self, dim, heads = 8):
self.scale = (dim // heads) ** -0.5
self.to_q = nn.Linear(dim, dim, bias = False)
self.to_kv = nn.Linear(dim, dim * 2, bias = False)
self.to_out = nn.Linear(dim, dim)

def forward(self, x, context, mask = None):
h, scale = self.heads, self.scale
Expand All @@ -36,7 +37,7 @@ def forward(self, x, context, mask = None):
out = einsum('b h i j, b h j d -> b h i d', attn, v)

out = rearrange(out, 'b h n d -> b n (h d)', h = h)
return out
return self.to_out(out)

class ISAB(nn.Module):
def __init__(
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'isab-pytorch',
packages = find_packages(),
version = '0.0.3',
version = '0.1.0',
license='MIT',
description = 'Induced Set Attention Block - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 0b1aa96

Please sign in to comment.