Skip to content

Commit 7a215ec

Browse files
yiyi@huggingface.coclaude
andcommitted
[LTX-2.3] Add modular pipeline modules and registration
Adds the LTX-2.3 modular pipeline structure: - modular_pipelines/ltx2/: encoders, modular_blocks, modular_pipeline - Registration in __init__.py, auto_pipeline.py, modular_pipeline mapping - Checkpoint utilities for parity testing - Supports T2V with CFG guidance (pixel-identical to reference) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent c3c9555 commit 7a215ec

File tree

10 files changed

+1556
-0
lines changed

10 files changed

+1556
-0
lines changed

src/diffusers/__init__.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -434,6 +434,9 @@
434434
"FluxKontextAutoBlocks",
435435
"FluxKontextModularPipeline",
436436
"FluxModularPipeline",
437+
"LTX2AutoBlocks",
438+
"LTX2Blocks",
439+
"LTX2ModularPipeline",
437440
"HeliosAutoBlocks",
438441
"HeliosModularPipeline",
439442
"HeliosPyramidAutoBlocks",
@@ -1195,6 +1198,9 @@
11951198
FluxKontextAutoBlocks,
11961199
FluxKontextModularPipeline,
11971200
FluxModularPipeline,
1201+
LTX2AutoBlocks,
1202+
LTX2Blocks,
1203+
LTX2ModularPipeline,
11981204
HeliosAutoBlocks,
11991205
HeliosModularPipeline,
12001206
HeliosPyramidAutoBlocks,

src/diffusers/modular_pipelines/__init__.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,11 @@
7070
"FluxKontextAutoBlocks",
7171
"FluxKontextModularPipeline",
7272
]
73+
_import_structure["ltx2"] = [
74+
"LTX2AutoBlocks",
75+
"LTX2Blocks",
76+
"LTX2ModularPipeline",
77+
]
7378
_import_structure["flux2"] = [
7479
"Flux2AutoBlocks",
7580
"Flux2KleinAutoBlocks",
@@ -103,6 +108,7 @@
103108
else:
104109
from .components_manager import ComponentsManager
105110
from .flux import FluxAutoBlocks, FluxKontextAutoBlocks, FluxKontextModularPipeline, FluxModularPipeline
111+
from .ltx2 import LTX2AutoBlocks, LTX2Blocks, LTX2ModularPipeline
106112
from .flux2 import (
107113
Flux2AutoBlocks,
108114
Flux2KleinAutoBlocks,
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
from typing import TYPE_CHECKING
2+
3+
from ...utils import (
4+
DIFFUSERS_SLOW_IMPORT,
5+
OptionalDependencyNotAvailable,
6+
_LazyModule,
7+
get_objects_from_module,
8+
is_torch_available,
9+
is_transformers_available,
10+
)
11+
12+
13+
_dummy_objects = {}
14+
_import_structure = {}
15+
16+
try:
17+
if not (is_transformers_available() and is_torch_available()):
18+
raise OptionalDependencyNotAvailable()
19+
except OptionalDependencyNotAvailable:
20+
from ...utils import dummy_torch_and_transformers_objects # noqa F403
21+
22+
_dummy_objects.update(get_objects_from_module(dummy_torch_and_transformers_objects))
23+
else:
24+
_import_structure["modular_blocks_ltx2"] = ["LTX2Blocks", "LTX2AutoBlocks", "LTX2Stage1Blocks", "LTX2Stage2Blocks", "LTX2FullPipelineBlocks"]
25+
_import_structure["modular_blocks_ltx2_upsample"] = ["LTX2UpsampleBlocks", "LTX2UpsampleCoreBlocks"]
26+
_import_structure["modular_pipeline"] = [
27+
"LTX2ModularPipeline",
28+
"LTX2UpsampleModularPipeline",
29+
]
30+
31+
if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
32+
try:
33+
if not (is_transformers_available() and is_torch_available()):
34+
raise OptionalDependencyNotAvailable()
35+
except OptionalDependencyNotAvailable:
36+
from ...utils.dummy_torch_and_transformers_objects import * # noqa F403
37+
else:
38+
from .modular_blocks_ltx2 import LTX2AutoBlocks, LTX2Blocks, LTX2FullPipelineBlocks, LTX2Stage1Blocks, LTX2Stage2Blocks
39+
from .modular_blocks_ltx2_upsample import LTX2UpsampleBlocks, LTX2UpsampleCoreBlocks
40+
from .modular_pipeline import LTX2ModularPipeline, LTX2UpsampleModularPipeline
41+
else:
42+
import sys
43+
44+
sys.modules[__name__] = _LazyModule(
45+
__name__,
46+
globals()["__file__"],
47+
_import_structure,
48+
module_spec=__spec__,
49+
)
50+
51+
for name, value in _dummy_objects.items():
52+
setattr(sys.modules[__name__], name, value)
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
"""Checkpoint utilities for parity debugging. No effect when _checkpoints is None."""
2+
from dataclasses import dataclass, field
3+
4+
import torch
5+
6+
7+
@dataclass
8+
class Checkpoint:
9+
save: bool = False
10+
stop: bool = False
11+
load: bool = False
12+
data: dict = field(default_factory=dict)
13+
14+
15+
def _maybe_checkpoint(checkpoints, name, data):
16+
if not checkpoints:
17+
return
18+
ckpt = checkpoints.get(name)
19+
if ckpt is None:
20+
return
21+
if ckpt.save:
22+
ckpt.data.update({
23+
k: v.cpu().clone() if isinstance(v, torch.Tensor) else v
24+
for k, v in data.items()
25+
})
26+
if ckpt.stop:
27+
raise StopIteration(name)

0 commit comments

Comments
 (0)