From 46755bdf1a2d58dab2a93d0840348803e0cdc931 Mon Sep 17 00:00:00 2001 From: linoytsaban Date: Wed, 21 May 2025 18:24:05 +0300 Subject: [PATCH] vace padding --- src/diffusers/loaders/lora_pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/diffusers/loaders/lora_pipeline.py b/src/diffusers/loaders/lora_pipeline.py index 8be620b0d6..61d5df3fd9 100644 --- a/src/diffusers/loaders/lora_pipeline.py +++ b/src/diffusers/loaders/lora_pipeline.py @@ -4814,7 +4814,8 @@ class WanLoraLoaderMixin(LoraBaseMixin): return state_dict target_device = transformer.device - + print("expanding t2v to i2v") + print("wtf 0",hasattr(transformer, 'vace_blocks')) if any(k.startswith("transformer.blocks.") for k in state_dict): num_blocks = len({k.split("blocks.")[1].split(".")[0] for k in state_dict if "blocks." in k}) is_i2v_lora = any("add_k_proj" in k for k in state_dict) and any("add_v_proj" in k for k in state_dict) @@ -4850,7 +4851,6 @@ class WanLoraLoaderMixin(LoraBaseMixin): device=target_device, ) - print(hasattr(transformer, 'vace_blocks')) if hasattr(transformer, 'vace_blocks'): print(f"{i}, WTF 0")