Skip to content

Commit 9f855a3

Browse files
authored
fix for crash with inpainting model introduced by #1866 (#1922)
* fix for crash using inpainting model * prevent crash due to invalid attention_maps_saver
1 parent 62b80a8 commit 9f855a3

File tree

1 file changed

+7
-4
lines changed

1 file changed

+7
-4
lines changed

ldm/models/diffusion/ksampler.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -208,9 +208,12 @@ def route_callback(k_callback_values):
208208
model_wrap_cfg = CFGDenoiser(self.model, threshold=threshold, warmup=max(0.8*S,S-10))
209209
model_wrap_cfg.prepare_to_sample(S, extra_conditioning_info=extra_conditioning_info)
210210

211-
attention_map_token_ids = range(1, extra_conditioning_info.tokens_count_including_eos_bos - 1)
212-
attention_maps_saver = None if attention_maps_callback is None else AttentionMapSaver(token_ids = attention_map_token_ids, latents_shape=x.shape[-2:])
213-
if attention_maps_callback is not None:
211+
# setup attention maps saving. checks for None are because there are multiple code paths to get here.
212+
attention_maps_saver = None
213+
if attention_maps_callback is not None and extra_conditioning_info is not None:
214+
eos_token_index = extra_conditioning_info.tokens_count_including_eos_bos - 1
215+
attention_map_token_ids = range(1, eos_token_index)
216+
attention_maps_saver = AttentionMapSaver(token_ids = attention_map_token_ids, latents_shape=x.shape[-2:])
214217
model_wrap_cfg.invokeai_diffuser.setup_attention_map_saving(attention_maps_saver)
215218

216219
extra_args = {
@@ -226,7 +229,7 @@ def route_callback(k_callback_values):
226229
),
227230
None,
228231
)
229-
if attention_maps_callback is not None:
232+
if attention_maps_saver is not None:
230233
attention_maps_callback(attention_maps_saver)
231234
return sampling_result
232235

0 commit comments

Comments
 (0)