From 9918d13eba295d878aa68f5b1ae10aca1a2fc2f6 Mon Sep 17 00:00:00 2001 From: MQY <3463526515@qq.com> Date: Mon, 18 Aug 2025 16:26:17 +0800 Subject: [PATCH] fix(training_utils): wrap device in list for DiffusionPipeline (#12178) - Modify offload_models function to handle DiffusionPipeline correctly - Ensure compatibility with both single and multiple module inputs Co-authored-by: Sayak Paul --- src/diffusers/training_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/diffusers/training_utils.py b/src/diffusers/training_utils.py index d33b80dba0..7a98fa3da1 100644 --- a/src/diffusers/training_utils.py +++ b/src/diffusers/training_utils.py @@ -339,7 +339,8 @@ def offload_models( original_devices = [next(m.parameters()).device for m in modules] else: assert len(modules) == 1 - original_devices = modules[0].device + # For DiffusionPipeline, wrap the device in a list to make it iterable + original_devices = [modules[0].device] # move to target device for m in modules: m.to(device)