Skip to content

Commit

Permalink
Reduce floating point rounding errors in loras.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Jul 15, 2023
1 parent 91ed281 commit 6fb084f
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion comfy/sd.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,9 @@ def patch_model(self):
if key not in self.backup:
self.backup[key] = weight.clone()

weight[:] = self.calculate_weight(self.patches[key], weight.clone(), key)
temp_weight = weight.to(torch.float32, copy=True)
weight[:] = self.calculate_weight(self.patches[key], temp_weight, key).to(weight.dtype)
del temp_weight
return self.model

def calculate_weight(self, patches, weight, key):
Expand Down

0 comments on commit 6fb084f

Please sign in to comment.