From 5901c2508bce6dd70ebdf8380de1cfde13a3d38a Mon Sep 17 00:00:00 2001 From: linoytsaban Date: Wed, 21 May 2025 18:50:31 +0300 Subject: [PATCH] vace padding --- src/diffusers/loaders/lora_pipeline.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/diffusers/loaders/lora_pipeline.py b/src/diffusers/loaders/lora_pipeline.py index 61d5df3fd9..c6c95006f3 100644 --- a/src/diffusers/loaders/lora_pipeline.py +++ b/src/diffusers/loaders/lora_pipeline.py @@ -4943,6 +4943,7 @@ class WanLoraLoaderMixin(LoraBaseMixin): # First, ensure that the checkpoint is a compatible one and can be successfully loaded. state_dict = self.lora_state_dict(pretrained_model_name_or_path_or_dict, **kwargs) # convert T2V LoRA to I2V LoRA (when loaded to Wan I2V) by adding zeros for the additional (missing) _img layers + print("_maybe_expand_t2v_lora_for_i2v?????????????????") state_dict = self._maybe_expand_t2v_lora_for_i2v( transformer=getattr(self, self.transformer_name) if not hasattr(self, "transformer") else self.transformer, state_dict=state_dict,