From d304f3dd51033aa78f67149f0aaa549e8146c551 Mon Sep 17 00:00:00 2001 From: Lucain Date: Thu, 4 Jan 2024 17:05:55 +0100 Subject: [PATCH] Respect offline mode when loading pipeline (#6456) * Respect offline mode when loading model * default to local entry if connectionerror --- src/diffusers/pipelines/pipeline_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/diffusers/pipelines/pipeline_utils.py b/src/diffusers/pipelines/pipeline_utils.py index e7a795365a..3054c491fd 100644 --- a/src/diffusers/pipelines/pipeline_utils.py +++ b/src/diffusers/pipelines/pipeline_utils.py @@ -13,7 +13,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import fnmatch import importlib import inspect @@ -27,6 +26,7 @@ from typing import Any, Callable, Dict, List, Optional, Union import numpy as np import PIL.Image +import requests import torch from huggingface_hub import ( ModelCard, @@ -35,7 +35,7 @@ from huggingface_hub import ( model_info, snapshot_download, ) -from huggingface_hub.utils import validate_hf_hub_args +from huggingface_hub.utils import OfflineModeIsEnabled, validate_hf_hub_args from packaging import version from requests.exceptions import HTTPError from tqdm.auto import tqdm @@ -1654,7 +1654,7 @@ class DiffusionPipeline(ConfigMixin, PushToHubMixin): if not local_files_only: try: info = model_info(pretrained_model_name, token=token, revision=revision) - except HTTPError as e: + except (HTTPError, OfflineModeIsEnabled, requests.ConnectionError) as e: logger.warn(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.") local_files_only = True model_info_call_error = e # save error to reraise it if model is not cached locally