Skip to content

Commit

Permalink
make SD2 compatible with --medvram setting
Browse files Browse the repository at this point in the history
  • Loading branch information
AUTOMATIC1111 committed Nov 26, 2022
1 parent 64c7b79 commit b5050ad
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions modules/lowvram.py
Expand Up @@ -51,6 +51,10 @@ def first_stage_model_decode_wrap(z):
send_me_to_gpu(first_stage_model, None)
return first_stage_model_decode(z)

# for SD1, cond_stage_model is CLIP and its NN is in the tranformer frield, but for SD2, it's open clip, and it's in model field
if hasattr(sd_model.cond_stage_model, 'model'):
sd_model.cond_stage_model.transformer = sd_model.cond_stage_model.model

# remove three big modules, cond, first_stage, and unet from the model and then
# send the model to GPU. Then put modules back. the modules will be in CPU.
stored = sd_model.cond_stage_model.transformer, sd_model.first_stage_model, sd_model.model
Expand All @@ -65,6 +69,10 @@ def first_stage_model_decode_wrap(z):
sd_model.first_stage_model.decode = first_stage_model_decode_wrap
parents[sd_model.cond_stage_model.transformer] = sd_model.cond_stage_model

if hasattr(sd_model.cond_stage_model, 'model'):
sd_model.cond_stage_model.model = sd_model.cond_stage_model.transformer
del sd_model.cond_stage_model.transformer

if use_medvram:
sd_model.model.register_forward_pre_hook(send_me_to_gpu)
else:
Expand Down

0 comments on commit b5050ad

Please sign in to comment.