1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

[Core] Remove TF import checks (#4968)

[TF] Remove tf
This commit is contained in:
Patrick von Platen
2023-09-11 11:22:40 +02:00
committed by GitHub
parent b6e0b016ce
commit 2c60f7d14e
3 changed files with 1 additions and 49 deletions

View File

@@ -69,7 +69,6 @@ from .import_utils import (
is_onnx_available,
is_scipy_available,
is_tensorboard_available,
is_tf_available,
is_torch_available,
is_torch_version,
is_torchsde_available,

View File

@@ -62,43 +62,6 @@ else:
logger.info("Disabling PyTorch because USE_TORCH is set")
_torch_available = False
_tf_version = "N/A"
if USE_TF in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TORCH not in ENV_VARS_TRUE_VALUES:
_tf_available = importlib.util.find_spec("tensorflow") is not None
if _tf_available:
candidates = (
"tensorflow",
"tensorflow-cpu",
"tensorflow-gpu",
"tf-nightly",
"tf-nightly-cpu",
"tf-nightly-gpu",
"intel-tensorflow",
"intel-tensorflow-avx512",
"tensorflow-rocm",
"tensorflow-macos",
"tensorflow-aarch64",
)
_tf_version = None
# For the metadata, we have to look for both tensorflow and tensorflow-cpu
for pkg in candidates:
try:
_tf_version = importlib_metadata.version(pkg)
break
except importlib_metadata.PackageNotFoundError:
pass
_tf_available = _tf_version is not None
if _tf_available:
if version.parse(_tf_version) < version.parse("2"):
logger.info(f"TensorFlow found but with version {_tf_version}. Diffusers requires version 2 minimum.")
_tf_available = False
else:
logger.info(f"TensorFlow version {_tf_version} available.")
else:
logger.info("Disabling Tensorflow because USE_TORCH is set")
_tf_available = False
_jax_version = "N/A"
_flax_version = "N/A"
if USE_JAX in ENV_VARS_TRUE_AND_AUTO_VALUES:
@@ -308,10 +271,6 @@ def is_torch_available():
return _torch_available
def is_tf_available():
return _tf_available
def is_flax_available():
return _flax_available

View File

@@ -23,7 +23,7 @@ from difflib import get_close_matches
from pathlib import Path
from diffusers.models.auto import get_values
from diffusers.utils import ENV_VARS_TRUE_VALUES, is_flax_available, is_tf_available, is_torch_available
from diffusers.utils import ENV_VARS_TRUE_VALUES, is_flax_available, is_torch_available
# All paths are set with the intent you should run this script from the root of the repo with the command
@@ -421,10 +421,6 @@ def get_all_auto_configured_models():
for attr_name in dir(diffusers.models.auto.modeling_auto):
if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING_NAMES"):
result = result | set(get_values(getattr(diffusers.models.auto.modeling_auto, attr_name)))
if is_tf_available():
for attr_name in dir(diffusers.models.auto.modeling_tf_auto):
if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
result = result | set(get_values(getattr(diffusers.models.auto.modeling_tf_auto, attr_name)))
if is_flax_available():
for attr_name in dir(diffusers.models.auto.modeling_flax_auto):
if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
@@ -462,8 +458,6 @@ def check_all_models_are_auto_configured():
missing_backends = []
if not is_torch_available():
missing_backends.append("PyTorch")
if not is_tf_available():
missing_backends.append("TensorFlow")
if not is_flax_available():
missing_backends.append("Flax")
if len(missing_backends) > 0: