Skip to content

Commit

Permalink
integrated edits as recommended in the PR #15804
Browse files Browse the repository at this point in the history
  • Loading branch information
AUTOMATIC1111 committed Jun 8, 2024
1 parent de7f5cd commit 0769aa3
Showing 1 changed file with 1 addition and 13 deletions.
14 changes: 1 addition & 13 deletions modules/sd_hijack_optimizations.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,18 +486,7 @@ def xformers_attention_forward(self, x, context=None, mask=None, **kwargs):
k_in = self.to_k(context_k)
v_in = self.to_v(context_v)

def _reshape(t):
"""rearrange(t, 'b n (h d) -> b n h d', h=h).
Using torch native operations to avoid overhead as this function is
called frequently. (70 times/it for SDXL)
"""
b, n, _ = t.shape # Get the batch size (b) and sequence length (n)
d = t.shape[2] // h # Determine the depth per head
return t.reshape(b, n, h, d)

q = _reshape(q_in)
k = _reshape(k_in)
v = _reshape(v_in)
q, k, v = (t.reshape(t.shape[0], t.shape[1], h, -1) for t in (q_in, k_in, v_in))

del q_in, k_in, v_in

Expand All @@ -509,7 +498,6 @@ def _reshape(t):

out = out.to(dtype)

# out = rearrange(out, 'b n h d -> b n (h d)', h=h)
b, n, h, d = out.shape
out = out.reshape(b, n, h * d)
return self.to_out(out)
Expand Down

0 comments on commit 0769aa3

Please sign in to comment.