Skip to content

Commit

Permalink
add beartype decorator to RingRotaryEmbedding
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Apr 11, 2024
1 parent aa7be57 commit 5fd1e48
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 1 deletion.
3 changes: 3 additions & 0 deletions ring_attention_pytorch/ring_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def cast_tuple(t, length = 1):
def divisible_by(num, den):
return (num % den) == 0

@beartype
def default_attention(
q: Tensor,
k: Tensor,
Expand Down Expand Up @@ -80,6 +81,7 @@ def default_attention(
# rotary embeddings with modifications to support striped attention

class RingRotaryEmbedding(Module):
@beartype
def __init__(
self,
dim,
Expand All @@ -105,6 +107,7 @@ def is_cuda(self):
return self.inv_freq.is_cuda

@autocast(enabled = False)
@beartype
def forward(
self,
seq_len: int
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'ring-attention-pytorch',
packages = find_packages(exclude=[]),
version = '0.3.20',
version = '0.3.21',
license='MIT',
description = 'Ring Attention - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 5fd1e48

Please sign in to comment.