1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00
This commit is contained in:
Dhruv Nair
2025-10-23 18:56:14 +02:00
parent 0a7bde9200
commit 8733fef39d
8 changed files with 48 additions and 35 deletions

View File

@@ -159,7 +159,7 @@ Change the [`~ComponentSpec.default_creation_method`] to `from_pretrained` and u
```py
guider_spec = t2i_pipeline.get_component_spec("guider")
guider_spec.default_creation_method="from_pretrained"
guider_spec.repo="YiYiXu/modular-loader-t2i-guider"
guider_spec.pretrained_model_name_or_path="YiYiXu/modular-loader-t2i-guider"
guider_spec.subfolder="pag_guider"
pag_guider = guider_spec.load()
t2i_pipeline.update_components(guider=pag_guider)

View File

@@ -313,14 +313,14 @@ unet_spec
ComponentSpec(
name='unet',
type_hint=<class 'diffusers.models.unets.unet_2d_condition.UNet2DConditionModel'>,
repo='RunDiffusion/Juggernaut-XL-v9',
pretrained_model_name_or_path='RunDiffusion/Juggernaut-XL-v9',
subfolder='unet',
variant='fp16',
default_creation_method='from_pretrained'
)
# modify to load from a different repository
unet_spec.repo = "stabilityai/stable-diffusion-xl-base-1.0"
unet_spec.pretrained_model_name_or_path = "stabilityai/stable-diffusion-xl-base-1.0"
# load component with modified spec
unet = unet_spec.load(torch_dtype=torch.float16)

View File

@@ -157,7 +157,7 @@ guider.push_to_hub("YiYiXu/modular-loader-t2i-guider", subfolder="pag_guider")
```py
guider_spec = t2i_pipeline.get_component_spec("guider")
guider_spec.default_creation_method="from_pretrained"
guider_spec.repo="YiYiXu/modular-loader-t2i-guider"
guider_spec.pretrained_model_name_or_path="YiYiXu/modular-loader-t2i-guider"
guider_spec.subfolder="pag_guider"
pag_guider = guider_spec.load()
t2i_pipeline.update_components(guider=pag_guider)

View File

@@ -313,14 +313,14 @@ unet_spec
ComponentSpec(
name='unet',
type_hint=<class 'diffusers.models.unets.unet_2d_condition.UNet2DConditionModel'>,
repo='RunDiffusion/Juggernaut-XL-v9',
pretrained_model_name_or_path='RunDiffusion/Juggernaut-XL-v9',
subfolder='unet',
variant='fp16',
default_creation_method='from_pretrained'
)
# 修改以从不同的仓库加载
unet_spec.repo = "stabilityai/stable-diffusion-xl-base-1.0"
unet_spec.pretrained_model_name_or_path = "stabilityai/stable-diffusion-xl-base-1.0"
# 使用修改后的规范加载组件
unet = unet_spec.load(torch_dtype=torch.float16)

View File

@@ -387,9 +387,9 @@ def is_valid_url(url):
return False
def _validate_single_file_path(pretrained_model_name_or_path):
if os.path.isfile(pretrained_model_name_or_path):
return True
def _is_single_file_path_or_url(pretrained_model_name_or_path):
if not os.path.isfile(pretrained_model_name_or_path) or not is_valid_url(pretrained_model_name_or_path):
return False
repo_id, weight_name = _extract_repo_id_and_weights_name(pretrained_model_name_or_path)
return bool(repo_id and weight_name)

View File

@@ -1639,8 +1639,8 @@ class ModularPipeline(ConfigMixin, PushToHubMixin):
pretrained_model_name_or_path (`str` or `os.PathLike`, optional):
Path to a pretrained pipeline configuration. It will first try to load config from
`modular_model_index.json`, then fallback to `model_index.json` for compatibility with standard
non-modular repositories. If the pretrained_model_name_or_path does not contain any pipeline config, it will be set to None
during initialization.
non-modular repositories. If the pretrained_model_name_or_path does not contain any pipeline config, it
will be set to None during initialization.
trust_remote_code (`bool`, optional):
Whether to trust remote code when loading the pipeline, need to be set to True if you want to create
pipeline blocks based on the custom code in `pretrained_model_name_or_path`
@@ -2113,8 +2113,8 @@ class ModularPipeline(ConfigMixin, PushToHubMixin):
**kwargs: additional kwargs to be passed to `from_pretrained()`.Can be:
- a single value to be applied to all components to be loaded, e.g. torch_dtype=torch.bfloat16
- a dict, e.g. torch_dtype={"unet": torch.bfloat16, "default": torch.float32}
- if potentially override ComponentSpec if passed a different loading field in kwargs, e.g. `pretrained_model_name_or_path`,
`variant`, `revision`, etc.
- if potentially override ComponentSpec if passed a different loading field in kwargs, e.g.
`pretrained_model_name_or_path`, `variant`, `revision`, etc.
"""
if names is None:
@@ -2397,12 +2397,12 @@ class ModularPipeline(ConfigMixin, PushToHubMixin):
Example:
>>> from diffusers.pipelines.modular_pipeline_utils import ComponentSpec >>> from diffusers import
UNet2DConditionModel >>> spec = ComponentSpec(
... name="unet", ... type_hint=UNet2DConditionModel, ... config=None, ... pretrained_model_name_or_path="path/to/pretrained_model_name_or_path", ...
subfolder="subfolder", ... variant=None, ... revision=None, ...
default_creation_method="from_pretrained",
... name="unet", ... type_hint=UNet2DConditionModel, ... config=None, ...
pretrained_model_name_or_path="path/to/pretrained_model_name_or_path", ... subfolder="subfolder", ...
variant=None, ... revision=None, ... default_creation_method="from_pretrained",
... ) >>> ModularPipeline._component_spec_to_dict(spec) {
"type_hint": ("diffusers", "UNet2DConditionModel"), "pretrained_model_name_or_path": "path/to/repo", "subfolder": "subfolder",
"variant": None, "revision": None,
"type_hint": ("diffusers", "UNet2DConditionModel"), "pretrained_model_name_or_path": "path/to/repo",
"subfolder": "subfolder", "variant": None, "revision": None,
}
"""
if component_spec.default_creation_method != "from_pretrained":
@@ -2451,11 +2451,13 @@ class ModularPipeline(ConfigMixin, PushToHubMixin):
ComponentSpec: A reconstructed ComponentSpec object.
Example:
>>> spec_dict = { ... "type_hint": ("diffusers", "UNet2DConditionModel"), ... "pretrained_model_name_or_path":
"stabilityai/stable-diffusion-xl", ... "subfolder": "unet", ... "variant": None, ... "revision": None, ...
} >>> ModularPipeline._dict_to_component_spec("unet", spec_dict) ComponentSpec(
name="unet", type_hint=UNet2DConditionModel, config=None, pretrained_model_name_or_path="stabilityai/stable-diffusion-xl",
subfolder="unet", variant=None, revision=None, default_creation_method="from_pretrained"
>>> spec_dict = { ... "type_hint": ("diffusers", "UNet2DConditionModel"), ...
"pretrained_model_name_or_path": "stabilityai/stable-diffusion-xl", ... "subfolder": "unet", ... "variant":
None, ... "revision": None, ... } >>> ModularPipeline._dict_to_component_spec("unet", spec_dict)
ComponentSpec(
name="unet", type_hint=UNet2DConditionModel, config=None,
pretrained_model_name_or_path="stabilityai/stable-diffusion-xl", subfolder="unet", variant=None,
revision=None, default_creation_method="from_pretrained"
)
"""
# make a shallow copy so we can pop() safely

View File

@@ -21,7 +21,7 @@ from typing import Any, Dict, List, Literal, Optional, Type, Union
import torch
from ..configuration_utils import ConfigMixin, FrozenDict
from ..loaders.single_file_utils import _validate_single_file_path
from ..loaders.single_file_utils import _is_single_file_path_or_url
from ..utils import is_torch_available, logging
@@ -92,13 +92,20 @@ class ComponentSpec:
type_hint: Optional[Type] = None
description: Optional[str] = None
config: Optional[FrozenDict] = None
# YiYi Notes: should we change it to pretrained_model_name_or_path for consistency? a bit long for a field name
pretrained_model_name_or_path: Optional[Union[str, List[str]]] = field(default=None, metadata={"loading": True})
subfolder: Optional[str] = field(default="", metadata={"loading": True})
variant: Optional[str] = field(default=None, metadata={"loading": True})
revision: Optional[str] = field(default=None, metadata={"loading": True})
default_creation_method: Literal["from_config", "from_pretrained"] = "from_pretrained"
# Deprecated
repo: Optional[Union[str, List[str]]] = field(default=None, metadata={"loading": False})
def __post_init__(self):
repo_value = self.repo
if repo_value is not None and self.pretrained_model_name_or_path is None:
object.__setattr__(self, "pretrained_model_name_or_path", repo_value)
def __hash__(self):
"""Make ComponentSpec hashable, using load_id as the hash value."""
return hash((self.name, self.load_id, self.default_creation_method))
@@ -183,8 +190,8 @@ class ComponentSpec:
@property
def load_id(self) -> str:
"""
Unique identifier for this spec's pretrained load, composed of pretrained_model_name_or_path|subfolder|variant|revision (no empty
segments).
Unique identifier for this spec's pretrained load, composed of
pretrained_model_name_or_path|subfolder|variant|revision (no empty segments).
"""
if self.default_creation_method == "from_config":
return "null"
@@ -203,7 +210,8 @@ class ComponentSpec:
Returns:
Dict mapping loading field names to their values. e.g. {
"pretrained_model_name_or_path": "path/to/repo", "subfolder": "subfolder", "variant": "variant", "revision": "revision"
"pretrained_model_name_or_path": "path/to/repo", "subfolder": "subfolder", "variant": "variant",
"revision": "revision"
} If a segment value is "null", it's replaced with None. Returns None if load_id is "null" (indicating
component not created with `load` method).
"""
@@ -260,20 +268,21 @@ class ComponentSpec:
# YiYi TODO: add guard for type of model, if it is supported by from_pretrained
def load(self, **kwargs) -> Any:
"""Load component using from_pretrained."""
# select loading fields from kwargs passed from user: e.g. pretrained_model_name_or_path, subfolder, variant, revision, note the list could change
passed_loading_kwargs = {key: kwargs.pop(key) for key in self.loading_fields() if key in kwargs}
# merge loading field value in the spec with user passed values to create load_kwargs
load_kwargs = {key: passed_loading_kwargs.get(key, getattr(self, key)) for key in self.loading_fields()}
# pretrained_model_name_or_path is a required argument for from_pretrained, a.k.a. pretrained_model_name_or_path
pretrained_model_name_or_path = load_kwargs.pop("pretrained_model_name_or_path", None)
if pretrained_model_name_or_path is None:
raise ValueError(
"`pretrained_model_name_or_path` info is required when using `load` method (you can directly set it in `pretrained_model_name_or_path` field of the ComponentSpec or pass it as an argument)"
)
is_single_file = _validate_single_file_path(pretrained_model_name_or_path)
is_single_file = _is_single_file_path_or_url(pretrained_model_name_or_path)
if is_single_file and self.type_hint is None:
raise ValueError("type_hint is required when loading a single file model")
raise ValueError(
f"`type_hint` is required when loading a single file model but is missing for component: {self.name}"
)
if self.type_hint is None:
try:

View File

@@ -51,7 +51,7 @@ class SDXLModularTests:
pipeline_class = StableDiffusionXLModularPipeline
pipeline_blocks_class = StableDiffusionXLAutoBlocks
repo = "hf-internal-testing/tiny-sdxl-modular"
pretrained_model_name_or_path = "hf-internal-testing/tiny-sdxl-modular"
params = frozenset(
[
"prompt",
@@ -66,7 +66,9 @@ class SDXLModularTests:
batch_params = frozenset(["prompt", "negative_prompt", "image", "mask_image"])
def get_pipeline(self, components_manager=None, torch_dtype=torch.float32):
pipeline = self.pipeline_blocks_class().init_pipeline(self.repo, components_manager=components_manager)
pipeline = self.pipeline_blocks_class().init_pipeline(
self.pretrained_model_name_or_path, components_manager=components_manager
)
pipeline.load_components(torch_dtype=torch_dtype)
return pipeline
@@ -157,7 +159,7 @@ class SDXLModularIPAdapterTests:
blocks = self.pipeline_blocks_class()
_ = blocks.sub_blocks.pop("ip_adapter")
pipe = blocks.init_pipeline(self.repo)
pipe = blocks.init_pipeline(self.pretrained_model_name_or_path)
pipe.load_components(torch_dtype=torch.float32)
pipe = pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)