diff --git a/src/diffusers/loaders/lora.py b/src/diffusers/loaders/lora.py index 5d89658830..8703cdee40 100644 --- a/src/diffusers/loaders/lora.py +++ b/src/diffusers/loaders/lora.py @@ -1268,9 +1268,10 @@ class LoraLoaderMixin: unet_module.lora_A[adapter_name].to(device) unet_module.lora_B[adapter_name].to(device) # this is a param, not a module, so device placement is not in-place -> re-assign - unet_module.lora_magnitude_vector[adapter_name] = unet_module.lora_magnitude_vector[ - adapter_name - ].to(device) + if hasattr(unet_module, "lora_magnitude_vector") and unet_module.lora_magnitude_vector is not None: + unet_module.lora_magnitude_vector[adapter_name] = unet_module.lora_magnitude_vector[ + adapter_name + ].to(device) # Handle the text encoder modules_to_process = [] @@ -1288,9 +1289,13 @@ class LoraLoaderMixin: text_encoder_module.lora_A[adapter_name].to(device) text_encoder_module.lora_B[adapter_name].to(device) # this is a param, not a module, so device placement is not in-place -> re-assign - text_encoder_module.lora_magnitude_vector[ - adapter_name - ] = text_encoder_module.lora_magnitude_vector[adapter_name].to(device) + if ( + hasattr(text_encoder, "lora_magnitude_vector") + and text_encoder_module.lora_magnitude_vector is not None + ): + text_encoder_module.lora_magnitude_vector[ + adapter_name + ] = text_encoder_module.lora_magnitude_vector[adapter_name].to(device) class StableDiffusionXLLoraLoaderMixin(LoraLoaderMixin):