From 6ed537fb3406203ec968cc4cdfb4395070980488 Mon Sep 17 00:00:00 2001 From: William Berman Date: Mon, 6 Mar 2023 01:39:50 -0800 Subject: [PATCH] ema step, don't empty cuda cache --- src/diffusers/training_utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/diffusers/training_utils.py b/src/diffusers/training_utils.py index 67a8e48d381f..340b96e29ac5 100644 --- a/src/diffusers/training_utils.py +++ b/src/diffusers/training_utils.py @@ -203,8 +203,6 @@ def step(self, parameters: Iterable[torch.nn.Parameter]): else: s_param.copy_(param) - torch.cuda.empty_cache() - def copy_to(self, parameters: Iterable[torch.nn.Parameter]) -> None: """ Copy current averaged parameters into given collection of parameters.