From 289c385d73a44d59e628c76793e294e383672b8b Mon Sep 17 00:00:00 2001 From: DN6 Date: Thu, 19 Jun 2025 21:57:21 +0530 Subject: [PATCH] update --- src/diffusers/hooks/group_offloading.py | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/src/diffusers/hooks/group_offloading.py b/src/diffusers/hooks/group_offloading.py index 577578edd2..0015c50c7c 100644 --- a/src/diffusers/hooks/group_offloading.py +++ b/src/diffusers/hooks/group_offloading.py @@ -240,28 +240,6 @@ class ModuleGroup: os.makedirs(os.path.dirname(self.safetensors_file_path), exist_ok=True) tensors_to_save = {key: tensor.data.to(self.offload_device) for tensor, key in self.tensor_to_key.items()} safetensors.torch.save_file(tensors_to_save, self.safetensors_file_path) - def offload_(self): - r"""Offloads the group of modules to the offload_device.""" - if self.offload_to_disk_path: - # TODO: we can potentially optimize this code path by checking if the _all_ the desired - # safetensor files exist on the disk and if so, skip this step entirely, reducing IO - # overhead. Currently, we just check if the given `safetensors_file_path` exists and if not - # we perform a write. - # Check if the file has been saved in this session or if it already exists on disk. - if not self._is_offloaded_to_disk and not os.path.exists(self.safetensors_file_path): - os.makedirs(os.path.dirname(self.safetensors_file_path), exist_ok=True) - tensors_to_save = { - key: tensor.data.to(self.offload_device) for tensor, key in self.tensor_to_key.items() - } - safetensors.torch.save_file(tensors_to_save, self.safetensors_file_path) - - # The group is now considered offloaded to disk for the rest of the session. - self._is_offloaded_to_disk = True - - # We do this to free up the RAM which is still holding the up tensor data. - for tensor_obj in self.tensor_to_key.keys(): - tensor_obj.data = torch.empty_like(tensor_obj.data, device=self.offload_device) - return # The group is now considered offloaded to disk for the rest of the session. self._is_offloaded_to_disk = True