mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-27 17:22:53 +03:00
Fix Basic Transformer Block (#5683)
* fix * Update src/diffusers/models/attention.py Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com> --------- Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com>
This commit is contained in:
@@ -287,7 +287,7 @@ class BasicTransformerBlock(nn.Module):
|
||||
else:
|
||||
raise ValueError("Incorrect norm")
|
||||
|
||||
if self.pos_embed is not None and self.use_ada_layer_norm_single is None:
|
||||
if self.pos_embed is not None and self.use_ada_layer_norm_single is False:
|
||||
norm_hidden_states = self.pos_embed(norm_hidden_states)
|
||||
|
||||
attn_output = self.attn2(
|
||||
|
||||
Reference in New Issue
Block a user