Skip to content

Commit

Permalink
make sure rotary positional embeddings are same type as queries
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Nov 6, 2021
1 parent 48a1535 commit 3f6c461
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
1 change: 1 addition & 0 deletions routing_transformer/routing_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@ def rotate_every_two(x):
return rearrange(x, '... d j -> ... (d j)')

def apply_rotary_pos_emb(q, k, v, sinu_pos):
sinu_pos = sinu_pos.type(q.dtype)
sinu_pos = rearrange(sinu_pos, '() n (j d) -> n j d', j = 2)
sin, cos = sinu_pos.unbind(dim = -2)
sin, cos = map(lambda t: repeat(t, 'b n -> b (n j)', j = 2), (sin, cos))
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'routing_transformer',
packages = find_packages(exclude=['examples']),
version = '1.6.0',
version = '1.6.1',
license='MIT',
description = 'Routing Transformer (Pytorch)',
author = 'Phil Wang, Aran Komatsuzaki',
Expand Down

0 comments on commit 3f6c461

Please sign in to comment.