diff --git a/src/diffusers/training_utils.py b/src/diffusers/training_utils.py index 67a8e48d38..340b96e29a 100644 --- a/src/diffusers/training_utils.py +++ b/src/diffusers/training_utils.py @@ -203,8 +203,6 @@ class EMAModel: else: s_param.copy_(param) - torch.cuda.empty_cache() - def copy_to(self, parameters: Iterable[torch.nn.Parameter]) -> None: """ Copy current averaged parameters into given collection of parameters.