Skip to content

Commit

Permalink
Fix controlnet not upcasting on models that have it enabled.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed May 19, 2024
1 parent 4ae1515 commit 11a2ad5
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions comfy/cldm/cldm.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def __init__(
adm_in_channels=None,
transformer_depth_middle=None,
transformer_depth_output=None,
attn_precision=None,
device=None,
operations=comfy.ops.disable_weight_init,
**kwargs,
Expand Down Expand Up @@ -202,7 +203,7 @@ def __init__(
SpatialTransformer(
ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim,
disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer,
use_checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations
use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations
)
)
self.input_blocks.append(TimestepEmbedSequential(*layers))
Expand Down Expand Up @@ -262,7 +263,7 @@ def __init__(
mid_block += [SpatialTransformer( # always uses a self-attn
ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim,
disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer,
use_checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations
use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations
),
ResBlock(
ch,
Expand Down

0 comments on commit 11a2ad5

Please sign in to comment.