Skip to content

Commit

Permalink
Fix potential issue.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed May 20, 2024
1 parent 276f8fc commit 1900e51
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions comfy/ldm/modules/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -625,15 +625,15 @@ def forward(self, x, context=None, transformer_options={}):
x = self.norm(x)
if not self.use_linear:
x = self.proj_in(x)
x = x.movedim(1, -1).flatten(1, 2).contiguous()
x = x.movedim(1, 3).flatten(1, 2).contiguous()
if self.use_linear:
x = self.proj_in(x)
for i, block in enumerate(self.transformer_blocks):
transformer_options["block_index"] = i
x = block(x, context=context[i], transformer_options=transformer_options)
if self.use_linear:
x = self.proj_out(x)
x = x.reshape(x.shape[0], h, w, x.shape[-1]).movedim(-1, 1).contiguous()
x = x.reshape(x.shape[0], h, w, x.shape[-1]).movedim(3, 1).contiguous()
if not self.use_linear:
x = self.proj_out(x)
return x + x_in
Expand Down

0 comments on commit 1900e51

Please sign in to comment.