mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-27 17:22:53 +03:00
[bugfix] NPU Adaption for Sana (#10724)
* NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * NPU Adaption for Sanna * [bugfix]NPU Adaption for Sanna --------- Co-authored-by: J石页 <jiangshuo9@h-partners.com> Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>
This commit is contained in:
@@ -995,7 +995,8 @@ def main(args):
|
||||
if args.enable_npu_flash_attention:
|
||||
if is_torch_npu_available():
|
||||
logger.info("npu flash attention enabled.")
|
||||
transformer.enable_npu_flash_attention()
|
||||
for block in transformer.transformer_blocks:
|
||||
block.attn2.set_use_npu_flash_attention(True)
|
||||
else:
|
||||
raise ValueError("npu flash attention requires torch_npu extensions and is supported only on npu device ")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user