mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-27 17:22:53 +03:00
feat: when using PT 2.0 use LoRAAttnProcessor2_0 for text enc LoRA. (#3691)
This commit is contained in:
@@ -1168,7 +1168,10 @@ class LoraLoaderMixin:
|
||||
cross_attention_dim = value_dict["to_k_lora.down.weight"].shape[1]
|
||||
hidden_size = value_dict["to_k_lora.up.weight"].shape[0]
|
||||
|
||||
attn_processors[key] = LoRAAttnProcessor(
|
||||
attn_processor_class = (
|
||||
LoRAAttnProcessor2_0 if hasattr(F, "scaled_dot_product_attention") else LoRAAttnProcessor
|
||||
)
|
||||
attn_processors[key] = attn_processor_class(
|
||||
hidden_size=hidden_size,
|
||||
cross_attention_dim=cross_attention_dim,
|
||||
rank=rank,
|
||||
|
||||
Reference in New Issue
Block a user