1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

Reduce Memory Cost in Flux Training (#9829)

* Improve NPU performance

* Improve NPU performance

* Improve NPU performance

* Improve NPU performance

* [bugfix] bugfix for npu free memory

* [bugfix] bugfix for npu free memory

* [bugfix] bugfix for npu free memory

* Reduce memory cost for flux training process

---------

Co-authored-by: 蒋硕 <jiangshuo9@h-partners.com>
Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>
This commit is contained in:
Leo Jiang
2024-11-01 00:49:32 -06:00
committed by GitHub
parent 3deed729e6
commit a98a839de7
2 changed files with 12 additions and 0 deletions

View File

@@ -1740,6 +1740,9 @@ def main(args):
torch_npu.npu.empty_cache()
gc.collect()
images = None
del pipeline
# Save the lora layers
accelerator.wait_for_everyone()
if accelerator.is_main_process:
@@ -1798,6 +1801,9 @@ def main(args):
ignore_patterns=["step_*", "epoch_*"],
)
images = None
del pipeline
accelerator.end_training()

View File

@@ -1844,6 +1844,9 @@ def main(args):
del text_encoder_one, text_encoder_two
free_memory()
images = None
del pipeline
# Save the lora layers
accelerator.wait_for_everyone()
if accelerator.is_main_process:
@@ -1908,6 +1911,9 @@ def main(args):
ignore_patterns=["step_*", "epoch_*"],
)
images = None
del pipeline
accelerator.end_training()