From 4dcd6729071306251ea9f49639938c0ea9be0672 Mon Sep 17 00:00:00 2001 From: Aryan Date: Tue, 15 Jul 2025 12:29:28 +0200 Subject: [PATCH] add back set_attention_backend --- src/diffusers/models/attention.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index b174cb093d..2d5eaaa691 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -160,6 +160,16 @@ class AttentionModuleMixin: """ if not return_deprecated_lora: return self.processor + + def set_attention_backend(self, backend: str): + from .attention_dispatch import AttentionBackendName + + available_backends = {x.value for x in AttentionBackendName.__members__.values()} + if backend not in available_backends: + raise ValueError(f"`{backend=}` must be one of the following: " + ", ".join(available_backends)) + + backend = AttentionBackendName(backend.lower()) + self.processor._attention_backend = backend def set_use_npu_flash_attention(self, use_npu_flash_attention: bool) -> None: """