1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

Make sure Diffusers works even if Hub is down (#3447)

* Make sure Diffusers works even if Hub is down

* Make sure hub down is well tested
This commit is contained in:
Patrick von Platen
2023-05-23 15:22:43 +02:00
committed by GitHub
parent d4197bf4d7
commit 9e2734a710
2 changed files with 13 additions and 6 deletions

View File

@@ -30,6 +30,7 @@ import PIL
import torch
from huggingface_hub import hf_hub_download, model_info, snapshot_download
from packaging import version
from requests.exceptions import HTTPError
from tqdm.auto import tqdm
import diffusers
@@ -1228,6 +1229,17 @@ class DiffusionPipeline(ConfigMixin):
allow_patterns = None
ignore_patterns = None
if not local_files_only:
try:
info = model_info(
pretrained_model_name,
use_auth_token=use_auth_token,
revision=revision,
)
except HTTPError as e:
logger.warn(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.")
local_files_only = True
if not local_files_only:
config_file = hf_hub_download(
pretrained_model_name,
@@ -1239,11 +1251,6 @@ class DiffusionPipeline(ConfigMixin):
resume_download=resume_download,
use_auth_token=use_auth_token,
)
info = model_info(
pretrained_model_name,
use_auth_token=use_auth_token,
revision=revision,
)
config_dict = cls._dict_from_json_file(config_file)

View File

@@ -353,7 +353,7 @@ class DownloadTests(unittest.TestCase):
with mock.patch("requests.request", return_value=response_mock):
# Download this model to make sure it's in the cache.
pipe = StableDiffusionPipeline.from_pretrained(
"hf-internal-testing/tiny-stable-diffusion-torch", safety_checker=None, local_files_only=True
"hf-internal-testing/tiny-stable-diffusion-torch", safety_checker=None
)
comps = {k: v for k, v in pipe.components.items() if hasattr(v, "parameters")}