Skip to content

Commit 280285f

Browse files
committed
combine attention processors
1 parent 72a9489 commit 280285f

File tree

3 files changed

+206
-126
lines changed

3 files changed

+206
-126
lines changed

src/diffusers/models/attention.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from torch import nn
1919

2020
from ..utils import deprecate
21-
from .attention_processor import Attention, SpatialAttnProcessor
21+
from .attention_processor import Attention
2222
from .embeddings import CombinedTimestepLabelEmbeddings
2323

2424

@@ -104,9 +104,9 @@ def _as_attention_processor_attention(self):
104104
bias=True,
105105
upcast_softmax=True,
106106
norm_num_groups=self.group_norm.num_groups,
107-
processor=SpatialAttnProcessor(),
108107
eps=self.group_norm.eps,
109108
rescale_output_factor=self.rescale_output_factor,
109+
residual_connection=True,
110110
)
111111

112112
param = next(self.parameters())

0 commit comments

Comments
 (0)