From e0c504c13da2723b4fcf3f9e0de2ab37ed01a59f Mon Sep 17 00:00:00 2001 From: Wauplin Date: Thu, 25 Sep 2025 16:51:53 +0200 Subject: [PATCH 1/2] Support huggingface_hub 0.x and 1.x --- setup.py | 3 ++- src/diffusers/configuration_utils.py | 4 ++-- src/diffusers/dependency_versions_table.py | 3 ++- src/diffusers/models/modeling_flax_utils.py | 4 ++-- src/diffusers/pipelines/pipeline_loading_utils.py | 6 +++--- src/diffusers/pipelines/pipeline_utils.py | 6 +++--- src/diffusers/utils/hub_utils.py | 6 +++--- tests/models/test_modeling_common.py | 7 +++---- tests/pipelines/test_pipelines.py | 6 +++--- 9 files changed, 23 insertions(+), 22 deletions(-) diff --git a/setup.py b/setup.py index ba3ad8e2b307..15a3414f62f9 100644 --- a/setup.py +++ b/setup.py @@ -102,7 +102,8 @@ "filelock", "flax>=0.4.1", "hf-doc-builder>=0.3.0", - "huggingface-hub>=0.34.0", + "httpx<1.0.0", + "huggingface-hub>=0.34.0,<2.0", "requests-mock==1.10.0", "importlib_metadata", "invisible-watermark>=0.2.0", diff --git a/src/diffusers/configuration_utils.py b/src/diffusers/configuration_utils.py index 540aab03071d..1c4ee33acbfd 100644 --- a/src/diffusers/configuration_utils.py +++ b/src/diffusers/configuration_utils.py @@ -30,11 +30,11 @@ from huggingface_hub import DDUFEntry, create_repo, hf_hub_download from huggingface_hub.utils import ( EntryNotFoundError, + HfHubHTTPError, RepositoryNotFoundError, RevisionNotFoundError, validate_hf_hub_args, ) -from requests import HTTPError from typing_extensions import Self from . import __version__ @@ -419,7 +419,7 @@ def load_config( raise EnvironmentError( f"{pretrained_model_name_or_path} does not appear to have a file named {cls.config_name}." ) - except HTTPError as err: + except HfHubHTTPError as err: raise EnvironmentError( "There was a specific connection error when trying to load" f" {pretrained_model_name_or_path}:\n{err}" diff --git a/src/diffusers/dependency_versions_table.py b/src/diffusers/dependency_versions_table.py index 79dc4c50a050..bfc4e9818ba3 100644 --- a/src/diffusers/dependency_versions_table.py +++ b/src/diffusers/dependency_versions_table.py @@ -9,7 +9,8 @@ "filelock": "filelock", "flax": "flax>=0.4.1", "hf-doc-builder": "hf-doc-builder>=0.3.0", - "huggingface-hub": "huggingface-hub>=0.34.0", + "httpx": "httpx<1.0.0", + "huggingface-hub": "huggingface-hub>=0.34.0,<2.0", "requests-mock": "requests-mock==1.10.0", "importlib_metadata": "importlib_metadata", "invisible-watermark": "invisible-watermark>=0.2.0", diff --git a/src/diffusers/models/modeling_flax_utils.py b/src/diffusers/models/modeling_flax_utils.py index 573828dc4b03..8050afff2767 100644 --- a/src/diffusers/models/modeling_flax_utils.py +++ b/src/diffusers/models/modeling_flax_utils.py @@ -26,11 +26,11 @@ from huggingface_hub import create_repo, hf_hub_download from huggingface_hub.utils import ( EntryNotFoundError, + HfHubHTTPError, RepositoryNotFoundError, RevisionNotFoundError, validate_hf_hub_args, ) -from requests import HTTPError from .. import __version__, is_torch_available from ..utils import ( @@ -385,7 +385,7 @@ def from_pretrained( raise EnvironmentError( f"{pretrained_model_name_or_path} does not appear to have a file named {FLAX_WEIGHTS_NAME}." ) - except HTTPError as err: + except HfHubHTTPError as err: raise EnvironmentError( f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n" f"{err}" diff --git a/src/diffusers/pipelines/pipeline_loading_utils.py b/src/diffusers/pipelines/pipeline_loading_utils.py index 388128df0ebd..b7a3e08105ff 100644 --- a/src/diffusers/pipelines/pipeline_loading_utils.py +++ b/src/diffusers/pipelines/pipeline_loading_utils.py @@ -19,12 +19,12 @@ from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Union +import httpx import requests import torch from huggingface_hub import DDUFEntry, ModelCard, model_info, snapshot_download -from huggingface_hub.utils import OfflineModeIsEnabled, validate_hf_hub_args +from huggingface_hub.utils import HfHubHTTPError, OfflineModeIsEnabled, validate_hf_hub_args from packaging import version -from requests.exceptions import HTTPError from .. import __version__ from ..utils import ( @@ -1110,7 +1110,7 @@ def _download_dduf_file( if not local_files_only: try: info = model_info(pretrained_model_name, token=token, revision=revision) - except (HTTPError, OfflineModeIsEnabled, requests.ConnectionError) as e: + except (HfHubHTTPError, OfflineModeIsEnabled, requests.ConnectionError, httpx.NetworkError) as e: logger.warning(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.") local_files_only = True model_info_call_error = e # save error to reraise it if model is not cached locally diff --git a/src/diffusers/pipelines/pipeline_utils.py b/src/diffusers/pipelines/pipeline_utils.py index 01b3c56777c8..3f6e53099b38 100644 --- a/src/diffusers/pipelines/pipeline_utils.py +++ b/src/diffusers/pipelines/pipeline_utils.py @@ -23,6 +23,7 @@ from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Union, get_args, get_origin +import httpx import numpy as np import PIL.Image import requests @@ -36,9 +37,8 @@ read_dduf_file, snapshot_download, ) -from huggingface_hub.utils import OfflineModeIsEnabled, validate_hf_hub_args +from huggingface_hub.utils import HfHubHTTPError, OfflineModeIsEnabled, validate_hf_hub_args from packaging import version -from requests.exceptions import HTTPError from tqdm.auto import tqdm from typing_extensions import Self @@ -1616,7 +1616,7 @@ def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]: if not local_files_only: try: info = model_info(pretrained_model_name, token=token, revision=revision) - except (HTTPError, OfflineModeIsEnabled, requests.ConnectionError) as e: + except (HfHubHTTPError, OfflineModeIsEnabled, requests.ConnectionError, httpx.NetworkError) as e: logger.warning(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.") local_files_only = True model_info_call_error = e # save error to reraise it if model is not cached locally diff --git a/src/diffusers/utils/hub_utils.py b/src/diffusers/utils/hub_utils.py index fcdf49156a8f..b6e99452aa88 100644 --- a/src/diffusers/utils/hub_utils.py +++ b/src/diffusers/utils/hub_utils.py @@ -38,13 +38,13 @@ from huggingface_hub.file_download import REGEX_COMMIT_HASH from huggingface_hub.utils import ( EntryNotFoundError, + HfHubHTTPError, RepositoryNotFoundError, RevisionNotFoundError, is_jinja_available, validate_hf_hub_args, ) from packaging import version -from requests import HTTPError from .. import __version__ from .constants import ( @@ -316,7 +316,7 @@ def _get_model_file( raise EnvironmentError( f"{pretrained_model_name_or_path} does not appear to have a file named {weights_name}." ) from e - except HTTPError as e: + except HfHubHTTPError as e: raise EnvironmentError( f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n{e}" ) from e @@ -432,7 +432,7 @@ def _get_checkpoint_shard_files( # We have already dealt with RepositoryNotFoundError and RevisionNotFoundError when getting the index, so # we don't have to catch them here. We have also dealt with EntryNotFoundError. - except HTTPError as e: + except HfHubHTTPError as e: raise EnvironmentError( f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load {pretrained_model_name_or_path}. You should try" " again after checking your internet connection." diff --git a/tests/models/test_modeling_common.py b/tests/models/test_modeling_common.py index 5e7be62342c3..3a008edfe1c2 100644 --- a/tests/models/test_modeling_common.py +++ b/tests/models/test_modeling_common.py @@ -37,9 +37,8 @@ import torch.nn as nn from accelerate.utils.modeling import _get_proper_dtype, compute_module_sizes, dtype_byte_size from huggingface_hub import ModelCard, delete_repo, snapshot_download, try_to_load_from_cache -from huggingface_hub.utils import is_jinja_available +from huggingface_hub.utils import HfHubHTTPError, is_jinja_available from parameterized import parameterized -from requests.exceptions import HTTPError from diffusers.models import FluxTransformer2DModel, SD3Transformer2DModel, UNet2DConditionModel from diffusers.models.attention_processor import ( @@ -272,7 +271,7 @@ def test_cached_files_are_used_when_no_internet(self): response_mock = mock.Mock() response_mock.status_code = 500 response_mock.headers = {} - response_mock.raise_for_status.side_effect = HTTPError + response_mock.raise_for_status.side_effect = HfHubHTTPError("Server down", response=mock.Mock()) response_mock.json.return_value = {} # Download this model to make sure it's in the cache. @@ -296,7 +295,7 @@ def test_local_files_only_with_sharded_checkpoint(self): error_response = mock.Mock( status_code=500, headers={}, - raise_for_status=mock.Mock(side_effect=HTTPError), + raise_for_status=mock.Mock(side_effect=HfHubHTTPError("Server down", response=mock.Mock())), json=mock.Mock(return_value={}), ) diff --git a/tests/pipelines/test_pipelines.py b/tests/pipelines/test_pipelines.py index 09df140f1af8..3a6981361268 100644 --- a/tests/pipelines/test_pipelines.py +++ b/tests/pipelines/test_pipelines.py @@ -33,9 +33,9 @@ import torch import torch.nn as nn from huggingface_hub import snapshot_download +from huggingface_hub.utils import HfHubHTTPError from parameterized import parameterized from PIL import Image -from requests.exceptions import HTTPError from transformers import CLIPImageProcessor, CLIPModel, CLIPTextConfig, CLIPTextModel, CLIPTokenizer from diffusers import ( @@ -430,7 +430,7 @@ def test_cached_files_are_used_when_no_internet(self): response_mock = mock.Mock() response_mock.status_code = 500 response_mock.headers = {} - response_mock.raise_for_status.side_effect = HTTPError + response_mock.raise_for_status.side_effect = HfHubHTTPError("Server down", response=mock.Mock()) response_mock.json.return_value = {} # Download this model to make sure it's in the cache. @@ -457,7 +457,7 @@ def test_local_files_only_are_used_when_no_internet(self): response_mock = mock.Mock() response_mock.status_code = 500 response_mock.headers = {} - response_mock.raise_for_status.side_effect = HTTPError + response_mock.raise_for_status.side_effect = HfHubHTTPError("Server down", response=mock.Mock()) response_mock.json.return_value = {} # first check that with local files only the pipeline can only be used if cached From bfe038fb700586932e2ce62e6994457259a18e8c Mon Sep 17 00:00:00 2001 From: Wauplin Date: Thu, 25 Sep 2025 17:05:23 +0200 Subject: [PATCH 2/2] httpx --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 15a3414f62f9..372a5685957e 100644 --- a/setup.py +++ b/setup.py @@ -260,6 +260,7 @@ def run(self): install_requires = [ deps["importlib_metadata"], deps["filelock"], + deps["httpx"], deps["huggingface-hub"], deps["numpy"], deps["regex"],