From f7278638e48d4a2e6c619d08078a3a622e4db438 Mon Sep 17 00:00:00 2001 From: Will Berman Date: Mon, 6 Mar 2023 01:54:56 -0800 Subject: [PATCH] ema step, don't empty cuda cache (#2563) --- src/diffusers/training_utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/diffusers/training_utils.py b/src/diffusers/training_utils.py index 67a8e48d38..340b96e29a 100644 --- a/src/diffusers/training_utils.py +++ b/src/diffusers/training_utils.py @@ -203,8 +203,6 @@ class EMAModel: else: s_param.copy_(param) - torch.cuda.empty_cache() - def copy_to(self, parameters: Iterable[torch.nn.Parameter]) -> None: """ Copy current averaged parameters into given collection of parameters.