From 2bb37965699fc81c339a81a68ec8566c99695ea6 Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Tue, 26 Aug 2025 12:07:46 +0200 Subject: [PATCH] up --- src/diffusers/models/attention_dispatch.py | 4 ++++ src/diffusers/utils/kernels_utils.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/diffusers/models/attention_dispatch.py b/src/diffusers/models/attention_dispatch.py index d0a8127507..10dbc4e0ee 100644 --- a/src/diffusers/models/attention_dispatch.py +++ b/src/diffusers/models/attention_dispatch.py @@ -374,6 +374,10 @@ def _check_attention_backend_requirements(backend: AttentionBackendName) -> None raise RuntimeError( f"Flash Attention 3 Hub backend '{backend.value}' is not usable because the `kernels` package isn't available. Please install it with `pip install kernels`." ) + if flash_attn_3_hub_func is None: + raise RuntimeError( + "`flash_attn_3_hub_func` wasn't available. Please double if `kernels` was able to successfully pull the FA3 kernel from kernels-community/vllm-flash-attn3." + ) elif backend in [AttentionBackendName._FLASH_VARLEN_3_HUB]: raise NotImplementedError diff --git a/src/diffusers/utils/kernels_utils.py b/src/diffusers/utils/kernels_utils.py index ba1c5efcbe..346fc40c60 100644 --- a/src/diffusers/utils/kernels_utils.py +++ b/src/diffusers/utils/kernels_utils.py @@ -13,5 +13,5 @@ def _get_fa3_from_hub(): try: flash_attn_3_hub = get_kernel(_DEFAULT_HUB_ID_FA3) return flash_attn_3_hub - except Exception as e: - raise e + except Exception: + return None