diff --git a/src/diffusers/pipelines/flux/pipeline_flux_fill.py b/src/diffusers/pipelines/flux/pipeline_flux_fill.py index 3c3e92c7d2a7..7d05ae2b383c 100644 --- a/src/diffusers/pipelines/flux/pipeline_flux_fill.py +++ b/src/diffusers/pipelines/flux/pipeline_flux_fill.py @@ -220,6 +220,7 @@ def __init__( tokenizer_2=tokenizer_2, transformer=transformer, scheduler=scheduler, + ip_adapter=ip_adapter, ) self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) if getattr(self, "vae", None) else 8 # Flux latents are turned into 2x2 patches and packed. This means the latent width and height has to be divisible @@ -240,6 +241,14 @@ def __init__( ) self.default_sample_size = 128 + def load_ip_adapter(self, ip_adapter_path): + try: + self.ip_adapter = torch.load(ip_adapter_path) + print(f"[FluxFillPipeline] IP Adapter loaded from: {ip_adapter_path}") + except Exception as e: + print(f"[FluxFillPipeline] Failed to load IP Adapter: {str(e}") + raise + # Copied from diffusers.pipelines.flux.pipeline_flux.FluxPipeline._get_t5_prompt_embeds def _get_t5_prompt_embeds( self,