Skip to content

Commit 96256aa

Browse files
authored
Merge pull request #2519 from amorehead/patch-1
Fix `head_dim` reference in `AttentionRope` class of `attention.py`
2 parents d342565 + 2ae814f commit 96256aa

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

timm/layers/attention.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,8 +154,8 @@ def __init__(
154154
self.k_proj = nn.Linear(dim, attn_dim, bias=qkv_bias)
155155
self.v_proj = nn.Linear(dim, attn_dim, bias=qkv_bias)
156156

157-
self.q_norm = norm_layer(self.head_dim) if qk_norm else nn.Identity()
158-
self.k_norm = norm_layer(self.head_dim) if qk_norm else nn.Identity()
157+
self.q_norm = norm_layer(head_dim) if qk_norm else nn.Identity()
158+
self.k_norm = norm_layer(head_dim) if qk_norm else nn.Identity()
159159
self.attn_drop = nn.Dropout(attn_drop)
160160
self.norm = norm_layer(attn_dim) if scale_norm else nn.Identity()
161161
self.proj = nn.Linear(attn_dim, dim)

0 commit comments

Comments
 (0)