From 6682956333b0105433fa31ec319aada7ffff2924 Mon Sep 17 00:00:00 2001 From: Ju Hoon Park Date: Wed, 3 Sep 2025 18:35:41 +0900 Subject: [PATCH] Add AttentionMixin to WanVACETransformer3DModel (#12268) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add AttentionMixin to WanVACETransformer3DModel to enable methods like `set_attn_processor()`. * Import AttentionMixin in transformer_wan_vace.py Special thanks to @tolgacangoz 🙇‍♂️ --- src/diffusers/models/transformers/transformer_wan_vace.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/diffusers/models/transformers/transformer_wan_vace.py b/src/diffusers/models/transformers/transformer_wan_vace.py index e039d36219..e5a9c7e0a6 100644 --- a/src/diffusers/models/transformers/transformer_wan_vace.py +++ b/src/diffusers/models/transformers/transformer_wan_vace.py @@ -21,7 +21,7 @@ import torch.nn as nn from ...configuration_utils import ConfigMixin, register_to_config from ...loaders import FromOriginalModelMixin, PeftAdapterMixin from ...utils import USE_PEFT_BACKEND, logging, scale_lora_layers, unscale_lora_layers -from ..attention import FeedForward +from ..attention import AttentionMixin, FeedForward from ..cache_utils import CacheMixin from ..modeling_outputs import Transformer2DModelOutput from ..modeling_utils import ModelMixin @@ -134,7 +134,9 @@ class WanVACETransformerBlock(nn.Module): return conditioning_states, control_hidden_states -class WanVACETransformer3DModel(ModelMixin, ConfigMixin, PeftAdapterMixin, FromOriginalModelMixin, CacheMixin): +class WanVACETransformer3DModel( + ModelMixin, ConfigMixin, PeftAdapterMixin, FromOriginalModelMixin, CacheMixin, AttentionMixin +): r""" A Transformer model for video-like data used in the Wan model.