1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-29 07:22:12 +03:00

Making hidden_state contiguous before applying multiple linear layers

This commit is contained in:
thomasw21
2022-11-22 11:55:03 +01:00
parent fa4d738cbb
commit 1cd09cccf3

View File

@@ -277,7 +277,7 @@ class AttentionBlock(nn.Module):
# norm
hidden_states = self.group_norm(hidden_states)
hidden_states = hidden_states.view(batch, channel, height * width).transpose(1, 2)
hidden_states = hidden_states.view(batch, channel, height * width).transpose(1, 2).contiguous()
# proj to q, k, v
query_proj = self.query(hidden_states)