diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index 92d84acbbe..0aacddf34d 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -201,13 +201,11 @@ class Transformer2DModel(ModelMixin, ConfigMixin): residual = hidden_states hidden_states = self.norm(hidden_states) - if not self.use_linear_projection: hidden_states = self.proj_in(hidden_states) inner_dim = hidden_states.shape[1] hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * weight, inner_dim) else: - hidden_states = self.norm(hidden_states) inner_dim = hidden_states.shape[1] hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * weight, inner_dim) hidden_states = self.proj_in(hidden_states)