Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions src/diffusers/models/auto_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from huggingface_hub.utils import validate_hf_hub_args

from ..configuration_utils import ConfigMixin
from ..utils import logging
from ..utils import DIFFUSERS_LOAD_ID_FIELDS, logging
from ..utils.dynamic_modules_utils import get_class_from_dynamic_module, resolve_trust_remote_code


Expand Down Expand Up @@ -220,4 +220,11 @@ def from_pretrained(cls, pretrained_model_or_path: Optional[Union[str, os.PathLi
raise ValueError(f"AutoModel can't find a model linked to {orig_class_name}.")

kwargs = {**load_config_kwargs, **kwargs}
return model_cls.from_pretrained(pretrained_model_or_path, **kwargs)
model = model_cls.from_pretrained(pretrained_model_or_path, **kwargs)

load_id_kwargs = {"pretrained_model_name_or_path": pretrained_model_or_path, **kwargs}
parts = [load_id_kwargs.get(field, "null") for field in DIFFUSERS_LOAD_ID_FIELDS]
load_id = "|".join("null" if p is None else p for p in parts)
model._diffusers_load_id = load_id

return model
2 changes: 2 additions & 0 deletions src/diffusers/modular_pipelines/modular_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -2142,6 +2142,8 @@ def load_components(self, names: Optional[Union[List[str], str]] = None, **kwarg
name
for name in self._component_specs.keys()
if self._component_specs[name].default_creation_method == "from_pretrained"
and self._component_specs[name].pretrained_model_name_or_path is not None
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

with this change, now, by default with pipeline.load_components():

  1. we don't load components that's already loaded in pipeline
  2. we don't load the components that do not have valid pretrained_model_name_or_path

for (1), currently, under the main, if we run script below, we will load the text_encoder, add it to the pipeline with update_components, and then reload again when you run load_components() - which is a bit unintuitive I think

in this PR, it will skip text_encoder instead of loading all components

pipe = ModularPipeline.from_pretrained()
text_encoder = AutoModel.from_pretrained(...)
pipe.update_components(text_encoder=text_encoder )
pipe.load_components(torch_dtype=torch.bfloat16)

for (2), in main, if you run this

from diffusers import ModularPipeline
pipe = ModularPipeline.from_pretrained("Qwen/Qwen-Image")
pipe.load_components(torch_dtype=torch.bfloat16)

you would get a confusing message like this


Failed to create component controlnet:
- Component spec: ComponentSpec(name='controlnet', type_hint=<class 'diffusers.models.controlnets.controlnet_qwenimage.QwenImageControlNetModel'>, description=None, config=None, pretrained_model_name_or_path=None, subfolder='', variant=None, revision=None, default_creation_method='from_pretrained', repo=None)
- load() called with kwargs: {'torch_dtype': torch.bfloat16}
If this component is not required for your workflow you can safely ignore this message.

Traceback:
Traceback (most recent call last):
  File "/fsx/yiyi/diffusers/src/diffusers/modular_pipelines/modular_pipeline.py", line 2173, in load_components
    components_to_register[name] = spec.load(**component_load_kwargs)
  File "/fsx/yiyi/diffusers/src/diffusers/modular_pipelines/modular_pipeline_utils.py", line 279, in load
    raise ValueError(
ValueError: `pretrained_model_name_or_path` info is required when using `load` method (you can directly set it in `pretrained_model_name_or_path` field of the ComponentSpec or pass it as an argument)

this is because controlnet is a component in qwen auto pipeline, but not included in Qwen/Qwen-Image; in this PR, we just skip it by default and not sending any message - I think it is more of an expected behavior

Copy link
Member

@sayakpaul sayakpaul Feb 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am guessing pretrained_model_name_or_path will always be set for a component that's expected?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is up to the user.

We work with modular_model_index.json like this: https://huggingface.co/sayakpaul/new-template/blob/main/modular_model_index.json

Unlike model_index.json, you can update it manually to make the pipeline load from different repos.

This one was automatically converted from a regular Qwen Pipeline:

ModularPipeline.from_pretrained("Qwen/Qwen-Image").save_pretrained()

So it has all the pretrained_model_name_or_path info for all the components in "Qwen/Qwen-Image" but not the ControlNet.

Custom ones can be all empty to start with:

MyCustomBlock.init_pipeline().save_pretrained()

Another example: https://huggingface.co/diffusers/flux2-bnb-4bit-modular/blob/main/modular_model_index.json#L49

Here we updated the pretrained_model_name_or_path for text_encoder/transformer to load from quantized checkpoints instead.

So it's very flexible - and I think it's expected behavior that if pretrained_model_name_or_path is not set, we just skip loading that component (vs throwing an error).

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That makes sense, thanks for explaining. I was missing the point on the latter.

and getattr(self, name, None) is None
]
elif isinstance(names, str):
names = [names]
Expand Down
8 changes: 4 additions & 4 deletions src/diffusers/modular_pipelines/modular_pipeline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@
import inspect
import re
from collections import OrderedDict
from dataclasses import dataclass, field, fields
from dataclasses import dataclass, field
from typing import Any, Dict, List, Literal, Optional, Type, Union

import torch

from ..configuration_utils import ConfigMixin, FrozenDict
from ..loaders.single_file_utils import _is_single_file_path_or_url
from ..utils import is_torch_available, logging
from ..utils import DIFFUSERS_LOAD_ID_FIELDS, is_torch_available, logging


if is_torch_available():
Expand Down Expand Up @@ -185,7 +185,7 @@ def loading_fields(cls) -> List[str]:
"""
Return the names of all loading‐related fields (i.e. those whose field.metadata["loading"] is True).
"""
return [f.name for f in fields(cls) if f.metadata.get("loading", False)]
return DIFFUSERS_LOAD_ID_FIELDS.copy()

@property
def load_id(self) -> str:
Expand All @@ -197,7 +197,7 @@ def load_id(self) -> str:
return "null"
parts = [getattr(self, k) for k in self.loading_fields()]
parts = ["null" if p is None else p for p in parts]
return "|".join(p for p in parts if p)
return "|".join(parts)

@classmethod
def decode_load_id(cls, load_id: str) -> Dict[str, Optional[str]]:
Expand Down
1 change: 1 addition & 0 deletions src/diffusers/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
DEFAULT_HF_PARALLEL_LOADING_WORKERS,
DEPRECATED_REVISION_ARGS,
DIFFUSERS_DYNAMIC_MODULE_NAME,
DIFFUSERS_LOAD_ID_FIELDS,
FLAX_WEIGHTS_NAME,
GGUF_FILE_EXTENSION,
HF_ENABLE_PARALLEL_LOADING,
Expand Down
8 changes: 8 additions & 0 deletions src/diffusers/utils/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,11 @@
ENCODE_ENDPOINT_SD_V1 = "https://qc6479g0aac6qwy9.us-east-1.aws.endpoints.huggingface.cloud/"
ENCODE_ENDPOINT_SD_XL = "https://xjqqhmyn62rog84g.us-east-1.aws.endpoints.huggingface.cloud/"
ENCODE_ENDPOINT_FLUX = "https://ptccx55jz97f9zgo.us-east-1.aws.endpoints.huggingface.cloud/"


DIFFUSERS_LOAD_ID_FIELDS = [
"pretrained_model_name_or_path",
"subfolder",
"variant",
"revision",
]
Loading