1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

Add MLU Support. (#12629)

* Add MLU Support.

* fix comment.

* rename is_mlu_available to is_torch_mlu_available

* Apply style fixes

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
a120092009
2025-11-12 13:15:26 +08:00
committed by GitHub
parent 0c7589293b
commit aecf0c53bf
3 changed files with 9 additions and 1 deletions

View File

@@ -108,6 +108,7 @@ from .import_utils import (
is_tensorboard_available,
is_timm_available,
is_torch_available,
is_torch_mlu_available,
is_torch_npu_available,
is_torch_version,
is_torch_xla_available,

View File

@@ -192,6 +192,7 @@ except importlib_metadata.PackageNotFoundError:
_torch_xla_available, _torch_xla_version = _is_package_available("torch_xla")
_torch_npu_available, _torch_npu_version = _is_package_available("torch_npu")
_torch_mlu_available, _torch_mlu_version = _is_package_available("torch_mlu")
_transformers_available, _transformers_version = _is_package_available("transformers")
_hf_hub_available, _hf_hub_version = _is_package_available("huggingface_hub")
_kernels_available, _kernels_version = _is_package_available("kernels")
@@ -243,6 +244,10 @@ def is_torch_npu_available():
return _torch_npu_available
def is_torch_mlu_available():
return _torch_mlu_available
def is_flax_available():
return _flax_available

View File

@@ -20,7 +20,7 @@ import os
from typing import Callable, Dict, List, Optional, Tuple, Union
from . import logging
from .import_utils import is_torch_available, is_torch_npu_available, is_torch_version
from .import_utils import is_torch_available, is_torch_mlu_available, is_torch_npu_available, is_torch_version
if is_torch_available():
@@ -286,6 +286,8 @@ def get_device():
return "xpu"
elif torch.backends.mps.is_available():
return "mps"
elif is_torch_mlu_available():
return "mlu"
else:
return "cpu"