mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-29 07:22:12 +03:00
Making hidden_state contiguous before applying multiple linear layers
This commit is contained in:
@@ -277,7 +277,7 @@ class AttentionBlock(nn.Module):
|
||||
# norm
|
||||
hidden_states = self.group_norm(hidden_states)
|
||||
|
||||
hidden_states = hidden_states.view(batch, channel, height * width).transpose(1, 2)
|
||||
hidden_states = hidden_states.view(batch, channel, height * width).transpose(1, 2).contiguous()
|
||||
|
||||
# proj to q, k, v
|
||||
query_proj = self.query(hidden_states)
|
||||
|
||||
Reference in New Issue
Block a user